Skip to content

Commit

Permalink
add open_virtual_dataarray
Browse files Browse the repository at this point in the history
  • Loading branch information
TomNicholas committed Nov 14, 2024
1 parent ab69033 commit ec9748c
Show file tree
Hide file tree
Showing 3 changed files with 87 additions and 7 deletions.
2 changes: 1 addition & 1 deletion virtualizarr/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from virtualizarr.manifests import ChunkManifest, ManifestArray # type: ignore # noqa
from virtualizarr.accessor import VirtualiZarrDatasetAccessor # type: ignore # noqa
from virtualizarr.backend import open_virtual_dataset # noqa: F401
from virtualizarr.backend import open_virtual_dataset, open_virtual_dataarray # noqa: F401

from importlib.metadata import version as _version

Expand Down
52 changes: 51 additions & 1 deletion virtualizarr/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
Optional,
)

from xarray import Dataset, Index
from xarray import DataArray, Dataset, Index

from virtualizarr.manifests import ManifestArray
from virtualizarr.readers import (
Expand Down Expand Up @@ -198,3 +198,53 @@ def open_virtual_dataset(
)

return vds


def open_virtual_dataarray(
filepath: str,
*,
filetype: FileType | None = None,
group: str | None = None,
drop_variables: Iterable[str] | None = None,
loadable_variables: Iterable[str] | None = None,
decode_times: bool | None = None,
indexes: Mapping[str, Index] | None = None,
virtual_array_class=ManifestArray,
reader_options: Optional[dict] = None,
) -> DataArray:

drop_variables, loadable_variables = check_for_collisions(
drop_variables,
loadable_variables,
)

if virtual_array_class is not ManifestArray:
raise NotImplementedError()

if reader_options is None:
reader_options = {}

if filetype is not None:
# if filetype is user defined, convert to FileType
filetype = FileType(filetype)
else:
filetype = automatically_determine_filetype(
filepath=filepath, reader_options=reader_options
)

backend_cls = VIRTUAL_BACKENDS.get(filetype.name.lower())

if backend_cls is None:
raise NotImplementedError(f"Unsupported file type: {filetype.name}")

vda = backend_cls.open_virtual_dataarray(
filepath,
group=group,
drop_variables=drop_variables,
loadable_variables=loadable_variables,
decode_times=decode_times,
indexes=indexes,
reader_options=reader_options,
)

return vda
40 changes: 35 additions & 5 deletions virtualizarr/readers/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

from xarray import (
Coordinates,
DataArray,
Dataset,
DataTree,
Index,
Expand Down Expand Up @@ -93,11 +94,11 @@ def open_loadable_vars_and_indexes(


def construct_virtual_dataset(
virtual_vars,
loadable_vars,
indexes,
coord_names,
attrs,
virtual_vars: Mapping[str, Variable],
loadable_vars: Mapping[str, Variable],
indexes: Mapping[str, Index],
coord_names: Iterable[str],
attrs: dict[str, str],
) -> Dataset:
"""Construct a virtual Datset from consistuent parts."""

Expand All @@ -117,6 +118,23 @@ def construct_virtual_dataset(
return vds


def construct_virtual_dataarray(
virtual_var: Variable,
loadable_vars: Mapping[str, Variable],
indexes: Mapping[str, Index],
coord_names: Iterable[str],
attrs: dict[str, str],
) -> DataArray:

vda = DataArray(
data=virtual_var,
coords=coord_names,
# indexes={}, # TODO should be added in a later version of xarray
attrs=attrs,
)
return vda


def separate_coords(
vars: Mapping[str, Variable],
indexes: MutableMapping[str, Index],
Expand Down Expand Up @@ -167,6 +185,18 @@ def separate_coords(


class VirtualBackend(ABC):
@staticmethod
def open_virtual_dataarray(
filepath: str,
group: str | None = None,
drop_variables: Iterable[str] | None = None,
loadable_variables: Iterable[str] | None = None,
decode_times: bool | None = None,
indexes: Mapping[str, Index] | None = None,
reader_options: Optional[dict] = None,
) -> DataArray:
raise NotImplementedError()

@staticmethod
def open_virtual_dataset(
filepath: str,
Expand Down

0 comments on commit ec9748c

Please sign in to comment.