diff --git a/docs/example_surface_v070.yml b/docs/example_surface_v070.yml index f0987934d..7498d4c6c 100644 --- a/docs/example_surface_v070.yml +++ b/docs/example_surface_v070.yml @@ -16,7 +16,7 @@ tracklog: # changed from events class: surface file: # peesv: Re-entered the file block, as the file is a specific concept that we currently put logic on - absolute: c:/absolute/share/results/maps/volantis_gp_base--depth.gri + absolute: c:/absolute/share/results/maps/volantis_gp_base--depth.gri # JRIV added relative: share/results/maps/volantis_gp_base--depth.gri checksum_md5: kjhsdfvsdlfk23knerknvk23 # @@ -31,9 +31,9 @@ fmu: - detailed description - optional - # KAN TENKE MEG Å DROPPE DENNE, DA DEN MÅ INJISERES ETTERPÅ - # workflow: # not sure, but a reference to the workflow / job that made this. Making it expandable. - # reference: rms/structural_model + # KAN TENKE MEG Å DROPPE DENNE, DA DEN MÅ INJISERES ETTERPÅ, + workflow: + reference: post eclipse script # generes av scriptet som aggraderer... under aggradation?? element: # currently not used on Drogon, but extensively used on e.g. JS diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 000000000..c3d952ffe --- /dev/null +++ b/pytest.ini @@ -0,0 +1,10 @@ +#pytest tests/test_grid3d/test_grid.py::test_roffbin_import_v2_emerald -o log_cli=true -s + +[pytest] +minversion = 6.0 +addopts = --verbose +log_cli = False +log_cli_format = %(levelname)8s (%(relativeCreated)6.0fms) %(filename)44s [%(funcName)40s()] %(lineno)4d >> %(message)s +log_cli_level = INFO +testpaths = + tests diff --git a/src/fmu/dataio/_export_item.py b/src/fmu/dataio/_export_item.py index c4e74893f..f5d5809b2 100644 --- a/src/fmu/dataio/_export_item.py +++ b/src/fmu/dataio/_export_item.py @@ -1,6 +1,8 @@ """Private module for Surface IO in DataIO class.""" +import warnings import logging import json +from datetime import datetime import numpy as np from collections import OrderedDict @@ -21,6 +23,7 @@ def __init__(self, dataio, obj, verbosity="warning"): self.dataio = dataio self.obj = obj self.verbosity = verbosity + self.subtype = None if self.verbosity is None: self.verbosity = self.dataio._verbosity @@ -36,21 +39,301 @@ def save_to_file(self): written to disk here. """ if isinstance(self.obj, xtgeo.RegularSurface): - self._surface_to_file() + self.subtype = "RegularSurface" + + self._data_process() + self._data_process_object() + self._fmu_inject_workflow() # this will vary if surface, table, grid, ... + self._item_to_file() + + def _data_process(self): + """Process som potentially common subfields in the data block. + + These subfields are: + - name + - top/base (from relation) + - content + - time + - properties + - grid_model + - is_observation + - is_prediction + - description + """ + self._data_process_name() + self._data_process_relation() + self._data_process_content() + self._data_process_timedata() + self._data_process_various() + + def _data_process_name(self): + """Process the name subfield.""" + # first detect if name is given, or infer name from object if possible + # then determine if name is stratgraphic and assing a "true" valid name + logger.info("Evaluate data:name attribute") + usename = "unknown" + meta = self.dataio._meta_data + + if self.dataio._name is None: + try: + usename = self.obj._name + except AttributeError: + warnings.warn("Cannot set name", UserWarning) + else: + usename = self.dataio._name + + # next check if usename has a "truename" and/or aliases from the config + strat = self.dataio._meta_strat # shortform + + if strat is None: + meta["name"] = usename + elif strat is not None and usename not in strat: + meta["name"] = usename + else: + meta["name"] = strat[usename].get("name", usename) + meta["stratigraphic"] = strat[usename].get("stratigraphic", False) + meta["alias"] = strat[usename].get("alias", None) + meta["stratigraphic_alias"] = strat[usename].get( + "stratigraphic_alias", None + ) + logger.info( + "Evaluate data:name attribute done, true name is <%s>", meta["name"] + ) + + def _data_process_relation(self): + """Process the relation input which gives offset and top/base settings. + + For example:: + + relation: + offset: 3.5 + + top: + ref: TopVolantis + offset: 2.0 + base: + ref: BaseVolantis + offset: 8.3 + + The stratigraphic input in fmuconfig may look like this:: + + TopVolantis: <-- RMS modelling name -> ref + stratigraphic: true + name: VOLANTIS GP. Top <-- SMDA / official name -> name + + So the dilemmea is that in the input, it is natural for the end user + to use the RMS modelling name, but it may be that the SMDA name also + is applied? And what if not found? Assume OK or complain? Should one + validate at all? + + """ + logger.info("Evaluate relation (offset, top, base), if any") + meta = self.dataio._meta_data + if self.dataio._relation is None: + logger.info("No relation found, which may be ok") + return # relation data are missing + + rel = self.dataio._relation # shall be a dictionary + + offset = rel.get("offset", None) + if offset is not None: + logger.info("Offset is found") + meta["offset"] = offset + + # top process top and base (both must be present in case) + top = rel.get("top", None) + base = rel.get("base", None) + if top is None or base is None: + logger.info("Relation top and/base is missing, skip further") + return + + topname = rel["top"].get("ref", None) + basename = rel["base"].get("ref", None) + + if topname is None or basename is None: + warnings.warn( + "Relation top and/base is present but is missing, skip further", + UserWarning, + ) + return + + # finally, validate if top/base name is stratigraphic and set metadata + group = {"top": topname, "base": basename} + strat = self.dataio._meta_strat + for item, somename in group.items(): + usename = somename + offset = 0.0 + stratigraphic = False + if somename in strat: + logger.info("Found <%s> in stratigraphy", somename) + usename = strat[somename].get("name", somename) + stratigraphic = strat[somename].get("stratigraphic", False) + offset = rel[item].get("offset", 0.0) + else: + logger.error("Did not find <%s> in stratigraphy input", somename) + raise ValueError(f"Cannot find {somename} in stratigraphy input") + meta[item] = OrderedDict() + meta[item]["name"] = usename + meta[item]["stratigraphic"] = stratigraphic + meta[item]["offset"] = offset + + def _data_process_content(self): + """Process the content block (within data block) which can complex.""" + logger.info("Evaluate content") + content = self.dataio._content + meta = self.dataio._meta_data + usecontent = "unset" + useextra = None + if content is None: + usecontent = "undefined" + + elif isinstance(content, str): + usecontent = content + + else: + usecontent = (list(content.keys()))[0] + useextra = content[usecontent] - def _surface_to_file(self): - """Save a RegularSurface instance.""" + if usecontent not in ALLOWED_CONTENTS: + raise ValueError(f"Sorry, content <{usecontent}> is not in list!") + + meta["content"] = usecontent + if useextra: + meta[usecontent] = useextra + + def _data_process_timedata(self): + """Process the time subfield.""" + # first detect if timedata is given, the process it + logger.info("Evaluate data:name attribute") + meta = self.dataio._meta_data + timedata = self.dataio._timedata + if timedata is None: + return + + for xtime in timedata: + tdate = str(xtime[0]) + tlabel = None + if len(xtime) > 1: + tlabel = xtime[1] + tdate = tdate.replace("-", "") # 2021-04-23 --> 20210403 + tdate = datetime.strptime(tdate, "%Y%m%d") + tdate = tdate.strftime("%Y-%m-%dT%H:%M:%S") + if "time" not in meta: + meta["time"] = list() + usetime = OrderedDict() + usetime["value"] = tdate + if tlabel: + usetime["label"] = tlabel + meta["time"].append(usetime) + + def _data_process_various(self): + """Process "all the rest" of the generic items. + + i.e.:: + unit, + vertical_domain + depth_reference + properties (as tmp) + grid_model + is_prediction + is_observation + """ + logger.info("Process various general items in data block") + meta = self.dataio._meta_data + print("XXXX", self.dataio._vertical_domain) + meta["unit"] = self.dataio._unit + (meta["vertical_domain"], meta["depth_reference"],) = list( + self.dataio._vertical_domain.items() + )[0] + meta["is_prediction"] = self.dataio._is_prediction + meta["is_observation"] = self.dataio._is_observation + + # tmp solution for properties + meta["properties"] = list() + props = OrderedDict() + props["name"] = "SomeName" + props["attribute"] = "SomeAttribute" + props["is_discrete"] = False + props["calculation"] = None + meta["properties"].append(props) + + # tmp: + meta["grid_model"] = OrderedDict() + meta["grid_model"]["name"] = "SomeGrid" + + # tmp: + meta["description"] = list() + meta["description"].append("This is description line 1") + meta["description"].append("This is description line 2") + + def _data_process_object(self): + """Process data fileds which are object dependent. + + I.e:: + + layout + spec + bbox + + Note that 'format' field will be added in _item_to_file + """ + + if self.subtype == "RegularSurface": + self._data_process_object_regularsurface() + + def _data_process_object_regularsurface(self): + """Process/collect the data items for RegularSurface""" + logger.info("Process data metadata for RegularSurface") + + dataio = self.dataio + regsurf = self.obj + + meta = dataio._meta_data # shortform + + meta["layout"] = "regular" + + # define spec record + specs = regsurf.metadata.required + newspecs = OrderedDict() + for spec, val in specs.items(): + if isinstance(val, (np.float32, np.float64)): + val = float(val) + newspecs[spec] = val + meta["spec"] = newspecs + meta["spec"]["undef"] = 1.0e30 # irap binary undef + + meta["bbox"] = OrderedDict() + meta["bbox"]["xmin"] = float(regsurf.xmin) + meta["bbox"]["xmax"] = float(regsurf.xmax) + meta["bbox"]["ymin"] = float(regsurf.ymin) + meta["bbox"]["ymax"] = float(regsurf.ymax) + meta["bbox"]["zmin"] = float(regsurf.values.min()) + meta["bbox"]["zmax"] = float(regsurf.values.max()) + logger.info("Process data metadata for RegularSurface... done!!") + + def _fmu_inject_workflow(self): + """Inject workflow into fmu metadata block.""" + self.dataio._meta_fmu["workflow"] = self.dataio._workflow + + def _item_to_file(self): + logger.info("Export item to file...") + if self.subtype == "RegularSurface": + self._item_to_file_regularsurface() + + def _item_to_file_regularsurface(self): + """Write RegularSurface to file""" + logger.info(f"Export {self.subtype} to file...") dataio = self.dataio # shorter obj = self.obj - if isinstance(dataio._description, str): - attr = dataio._description.lower().replace(" ", "_") + if isinstance(dataio._tagname, str): + attr = dataio._tagname.lower().replace(" ", "_") else: attr = None fname, fpath = _utils.construct_filename( obj.name, - descr=attr, + tagname=attr, loc="surface", outroot=dataio.export_root, verbosity=dataio._verbosity, @@ -70,18 +353,24 @@ def _surface_to_file(self): if "irap" in dataio.surface_fformat: obj.to_file(outfile, fformat="irap_binary") md5sum = _utils.md5sum(outfile) + self.dataio._meta_data["format"] = "irap_binary" # populate the file block which needs to done here dataio._meta_file["md5sum"] = md5sum dataio._meta_file["relative_path"] = str(relpath) dataio._meta_file["absolute_path"] = str(abspath) - allmeta = self._process_all_metadata("RegularSurface") - _utils.export_metadata_file(metafile, allmeta, verbosity=self.verbosity) + allmeta = self._item_to_file_collect_all_metadata() + _utils.export_metadata_file( + metafile, allmeta, verbosity=self.verbosity, savefmt=dataio.meta_format + ) else: + self.dataio._meta_data["format"] = "hdf" obj.to_hdf(outfile) - def _process_all_metadata(self, subtype): + def _item_to_file_collect_all_metadata(self): """Process all metadata for actual instance.""" + logger.info("Collect all metadata") + dataio = self.dataio allmeta = OrderedDict() @@ -94,97 +383,7 @@ def _process_all_metadata(self, subtype): allmeta["masterdata"] = dataio._meta_masterdata allmeta["tracklog"] = dataio._meta_tracklog allmeta["fmu"] = dataio._meta_fmu + allmeta["data"] = dataio._meta_data + print("YYY+n", json.dumps(allmeta, indent=2, default=str)) - data_meta = None - if subtype == "RegularSurface": - data_meta = self._process_data_regularsurface_metadata() - allmeta["data"] = data_meta - - # process_display_metadata(dataio, regsurf) - # allmeta["display"] = dataio._meta_display - - logger.debug( - "Metadata after data:\n%s", json.dumps(allmeta, indent=2, default=str) - ) return allmeta - - def _process_data_regularsurface_metadata(self): - """Process the actual 'data' block in metadata for RegularSurface.""" - logger.info("Process data metadata for RegularSurface") - - dataio = self.dataio - regsurf = self.obj - - meta = dataio._meta_data # shortform - strat = dataio._meta_strat # shortform - - meta["layout"] = "regular" - - # true name (will backup to model name if not present) - if strat is None: - meta["name"] = regsurf.name - elif strat is not None and regsurf.name not in strat: - meta["name"] = regsurf.name - else: - meta["name"] = strat[regsurf.name].get("name", regsurf.name) - meta["stratigraphic"] = strat[regsurf.name].get("stratigraphic", False) - meta["alias"] = strat[regsurf.name].get("alias", None) - meta["stratigraphic_alias"] = strat[regsurf.name].get( - "stratigraphic_alias", None - ) - - content, extra = self.process_data_content() - meta["content"] = content - if extra is not None: - meta[content] = extra - - # meta["properties"] = dataio._details.get("properties", None) - meta["unit"] = dataio._unit - meta["vertical_domain"] = dataio._vertical_domain - meta["is_prediction"] = dataio._is_prediction - meta["is_observation"] = dataio._is_observation - if dataio._timedata is not None: - meta["time1"] = dataio._timedata.get("time1", None) - meta["time2"] = dataio._timedata.get("time2", None) - - # define spec record - specs = regsurf.metadata.required - newspecs = OrderedDict() - for spec, val in specs.items(): - if isinstance(val, np.float): - val = float(val) - newspecs[spec] = val - meta["spec"] = newspecs - - meta["spec"]["undef"] = 1.0e30 # irap binary undef - - meta["bbox"] = OrderedDict() - meta["bbox"]["xmin"] = float(regsurf.xmin) - meta["bbox"]["xmax"] = float(regsurf.xmax) - meta["bbox"]["ymin"] = float(regsurf.ymin) - meta["bbox"]["ymax"] = float(regsurf.ymax) - meta["bbox"]["zmin"] = float(regsurf.values.min()) - meta["bbox"]["zmax"] = float(regsurf.values.max()) - logger.info("Process data metadata for RegularSurface... done!!") - return meta - - def process_data_content(self): - """Process the content block (within data block) which can complex.""" - content = self.dataio._content - - usecontent = "unset" - useextra = None - if content is None: - usecontent = "undefined" - - elif isinstance(content, str): - usecontent = content - - else: - usecontent = (list(content.keys()))[0] - useextra = content[usecontent] - - if usecontent not in ALLOWED_CONTENTS: - raise ValueError(f"Sorry, content <{usecontent}> is not in list!") - - return usecontent, useextra diff --git a/src/fmu/dataio/_utils.py b/src/fmu/dataio/_utils.py index 8e8f4711c..589ec1b35 100644 --- a/src/fmu/dataio/_utils.py +++ b/src/fmu/dataio/_utils.py @@ -2,6 +2,7 @@ import logging from pathlib import Path import hashlib +import json from . import _oyaml as oyaml @@ -10,7 +11,7 @@ def construct_filename( name, - descr=None, + tagname=None, t1=None, t2=None, fmu=1, @@ -23,9 +24,9 @@ def construct_filename( fmu style 1: surface: - namehorizon--description - namehorizon--description--t1 - namehorizon--description--t2_t1 + namehorizon--tagname + namehorizon--tagname--t1 + namehorizon--tagname--t2_t1 e.g. topvolantis--ds_gf_extracted @@ -35,9 +36,9 @@ def construct_filename( gridname-- gridproperty - gridname--propdescription - gridname--description--t1 - gridname--description--t2_t1 + gridname--proptagname + gridname--tagname--t1 + gridname--tagname--t2_t1 e.g. geogrid_valysar--phit @@ -55,8 +56,8 @@ def construct_filename( if fmu == 1: stem = name.lower() - if descr: - stem += "--" + descr.lower() + if tagname: + stem += "--" + tagname.lower() if t1 and not t2: stem += "--" + str(t1).lower() @@ -100,13 +101,20 @@ def verify_path(createfolder, filedest, filename, ext, verbosity="WARNING"): return path, metapath, relpath, abspath -def export_metadata_file(yfile, metadata, verbosity="WARNING") -> None: +def export_metadata_file(yfile, metadata, savefmt="yaml", verbosity="WARNING") -> None: """Export genericly and ordered to the complementary metadata file.""" logger.setLevel(level=verbosity) if metadata: - yamlblock = oyaml.safe_dump(metadata) - with open(yfile, "w") as stream: - stream.write(yamlblock) + if savefmt == "yaml": + yamlblock = oyaml.safe_dump(metadata) + with open(yfile, "w") as stream: + stream.write(yamlblock) + else: + jfile = str(yfile).replace(".yml", ".json") + jsonblock = json.dumps(metadata, default=str, indent=2) + with open(jfile, "w") as stream: + stream.write(jsonblock) + else: raise RuntimeError( "Export of metadata was requested, but no metadata are present." diff --git a/src/fmu/dataio/dataio.py b/src/fmu/dataio/dataio.py index 12dbfaa43..593a6db8c 100644 --- a/src/fmu/dataio/dataio.py +++ b/src/fmu/dataio/dataio.py @@ -1,19 +1,19 @@ """Module for DataIO class. The metadata spec is presented in -https://github.com/equinor/fmu-metadata/blob/dev/definitions/0.7.0/ +https://github.com/equinor/fmu-metadata/blob/dev/definitions/0.7.*/ -The processing is based on handling first level keys which are: +The processing is based on handling first level keys which are --- scalar -- -$schema | -$version | "dollars", source fmuconfig -$source | +-- scalar SPECIALS (previous marked with $ prefix) -- +schema | hard set in code +version | "dollars", source fmuconfig +source | class - determined by datatype, inferred -- nested -- -file - file paths and checksums +file - file paths and checksums (change) still a discussion where to be tracklog - data events, source = ? data - about the data (see class). inferred from data + fmuconfig display - Deduced mostly from fmuconfig @@ -43,7 +43,23 @@ class - determined by datatype, inferred logger = logging.getLogger(__name__) logger.setLevel(logging.CRITICAL) -DOLLARS = {"$schema": "unset", "$version": "0.0.0", "$source": "undefined"} +DOLLARS = OrderedDict( + [ + ( + "schema", + "https://main-fmu-schemas-dev.radix.equinor.com/schemas/0.7.0/" + "fmu_results.json", + ), + ( + "version", + "0.7.1", + ), + ( + "source", + "fmu", + ), + ] +) class ExportData: @@ -53,34 +69,49 @@ class ExportData: grid_fformat = "hdf" export_root = "../../share/results" createfolder = True + meta_format = "yaml" def __init__( self, + name: Optional[str] = None, + relation: Optional[dict] = None, config: Optional[dict] = None, content: Optional[Union[str, dict]] = None, unit: Optional[str] = None, - description: Optional[str] = None, + tagname: Optional[str] = None, vertical_domain: Optional[dict] = {"depth": "msl"}, - timedata: Optional[dict] = None, + timedata: Optional[list] = None, is_prediction: Optional[bool] = True, is_observation: Optional[bool] = False, + workflow: Optional[str] = None, verbosity: Optional[str] = "CRITICAL", ) -> None: """Instantate ExportData object. Args: + name: The name of the object. If not set it is tried to be inferred from + the xtgeo object. The name is then checked towards the stratigraphy + list, and name is replaced with official stratigraphic name if found. + For example, if "TopValysar" is the model name and the actual name + is "Valysar Top Fm." that latter name will be used. + relation: The relation of the object with respect to itself and/or + other stratigraphic units. The default is None, but for e.g. seismic + attributes this can be important. The input is a dictionary with + the following fields: to-be... config: A configuation dictionary. In the standard case this is read from FMU global vaiables (via fmuconfig). The dictionary must contain some predefined main level keys. content: Is a string or a dictionary with one key. Example is "depth" or {"fluid_contact": {"xxx": "yyy", "zzz": "uuu"}} unit: Is the unit of the exported item(s), e.g. "m" or "fraction". - description: This is a short description which be be a part of file name + tagname: This is a short tag description which be be a part of file name vertical_domain: This is dictionary with a key and a reference e.g. {"depth": "msl"} which is default - timedata: If given, display timedata... + timedata: If given, a list of lists with dates, .e.g. + [[20200101, "firsttime"], [20180101, "secondtime"]] or just [[20210101]] is_prediction: True (default) of model prediction data is_observation: Default is False. + workflow: Short tag desciption of workflow (as description) verbosity: Is logging/message level for this module. Input as in standard python logging; e.g. "WARNING", "INFO". @@ -89,14 +120,17 @@ def __init__( """ logger.info("Create instance of ExportData") + self._name = name + self._relation = relation self._config = config self._content = content - self._unit = (unit,) - self._description = description + self._unit = unit + self._tagname = tagname self._timedata = timedata self._vertical_domain = vertical_domain self._is_prediction = is_prediction self._is_observation = is_observation + self._workflow = workflow self._verbosity = verbosity logger.setLevel(level=self._verbosity) @@ -105,7 +139,7 @@ def __init__( # define chunks of metadata for primary first order categories # (except class which is set directly later) self._meta_strat = None - self._meta_dollars = OrderedDict() # $version etc + self._meta_dollars = DOLLARS # schema, version, source self._meta_file = OrderedDict() # file (to be populated in export job) self._meta_tracklog = [] # tracklog: self._meta_data = OrderedDict() # data: @@ -118,8 +152,7 @@ def __init__( self._get_meta_strat() # Get the metadata for some of the general stuff, fully or partly - # Note that data are found later (e.g. in _surface_io) - self._get_meta_dollars() + # Note that data are found later (e.g. in _export_item) self._get_meta_masterdata() self._get_meta_access() self._get_meta_tracklog() @@ -132,29 +165,6 @@ def __init__( # Private metadata methods which retrieve metadata that are not closely linked to # the actual instance to be exported. - def _get_meta_dollars(self) -> None: - """Get metadata from the few $ from the fmuconfig file. - - $schema - $version - $source - """ - - if self._config is None: - logger.warning("Config is missing, just use defaults") - for dollar, data in DOLLARS.items(): - self._meta_dollars[dollar] = data - return - - for dollar in DOLLARS.keys(): - if dollar not in self._config.keys(): - raise ValueError(f"No {dollar} present in config.") - - self._meta_dollars[dollar] = self._config[dollar] - - logger.info("Metadata for $ variables are set!") - return - def _get_meta_masterdata(self) -> None: """Get metadata from masterdata section in config. @@ -398,7 +408,9 @@ def _store_ensemble_metadata(self): meta["masterdata"] = self._meta_masterdata meta["fmu"] = OrderedDict() meta["fmu"]["ensemble"] = self._meta_fmu["ensemble"].copy() - _utils.export_metadata_file(metafile, meta, verbosity=self._verbosity) + _utils.export_metadata_file( + metafile, meta, verbosity=self._verbosity, savefmt=self.meta_format + ) else: # read the current metadatafile and compare ensemble id to issue a warning diff --git a/tests/data/drogon/global_config2/global_variables.yml b/tests/data/drogon/global_config2/global_variables.yml index 67fcdbe91..03021008d 100644 --- a/tests/data/drogon/global_config2/global_variables.yml +++ b/tests/data/drogon/global_config2/global_variables.yml @@ -1,20 +1,18 @@ # Autogenerated from global configuration. # DO NOT EDIT THIS FILE MANUALLY! -# Machine st-linrgs200.st.statoil.no by user jriv, at 2021-04-20 18:53:52.770006, using fmu.config ver. 1.0.5 -$schema: https://main-fmu-schemas-dev.radix.equinor.com/schemas/0.7.0/fmu_results.json -$version: 0.7.0 -$source: fmu +# Machine xxx by user jriv, at 2021-04-20 18:53:52.770006, using fmu.config ver. 1.0.5 + masterdata: smda: country: - - identifier: Norway - uuid: ad214d85-8a1d-19da-e053-c918a4889309 + - identifier: Norway + uuid: ad214d85-8a1d-19da-e053-c918a4889309 discovery: - - short_identifier: DROGON - uuid: ad214d85-8a1d-19da-e053-c918a4889309 + - short_identifier: DROGON + uuid: ad214d85-8a1d-19da-e053-c918a4889309 field: - - identifier: DROGON - uuid: 00000000-0000-0000-0000-000000000000 + - identifier: DROGON + uuid: 00000000-0000-0000-0000-000000000000 coordinate_system: identifier: ST_WGS84_UTM37N_P32637 uuid: ad214d85-dac7-19da-e053-c918a4889309 @@ -31,8 +29,8 @@ model: name: ff revision: AUTO description: - - Drogon course data set for Equinor FMU - - Planned for open sourcing + - Drogon course data set for Equinor FMU + - Planned for open sourcing stratigraphy: MSL: stratigraphic: false @@ -44,11 +42,11 @@ stratigraphy: stratigraphic: true name: VOLANTIS GP. Top alias: - - TopVOLANTIS - - TOP_VOLANTIS + - TopVOLANTIS + - TOP_VOLANTIS stratigraphic_alias: - - TopValysar - - Valysar Fm. Top + - TopValysar + - Valysar Fm. Top TopTherys: stratigraphic: true name: Therys Fm. Top @@ -84,43 +82,43 @@ fmu: name: ff revision: 21.0.0.dev desciption: - - Drogon model for courses etc + - Drogon model for courses etc realization: null ensemble: null global: SEISMIC_DATES: - - 2018-01-01 - - 2018-07-01 - - 2019-07-01 - - 2020-07-01 - SEISMIC_DIFFDATES: - - - 2018-07-01 - - 2018-01-01 - - - 2019-07-01 - 2018-01-01 - - - 2020-07-01 - - 2018-01-01 - - - 2019-07-01 - 2018-07-01 - - - 2020-07-01 - 2019-07-01 + - 2020-07-01 + SEISMIC_DIFFDATES: + - - 2018-07-01 + - 2018-01-01 + - - 2019-07-01 + - 2018-01-01 + - - 2020-07-01 + - 2018-01-01 + - - 2019-07-01 + - 2018-07-01 + - - 2020-07-01 + - 2019-07-01 ECLIPSE_INIT_DATE: 2018-01-01 ECLIPSE_HIST_DATES: - - 2018-01-01 - - 2018-07-01 - - 2019-07-01 - - 2020-07-01 - ECLIPSE_HIST_DIFFDATES: - - - 2018-07-01 - - 2018-01-01 - - - 2019-07-01 - 2018-01-01 - - - 2020-07-01 - - 2018-01-01 - - - 2019-07-01 - 2018-07-01 - - - 2020-07-01 - 2019-07-01 + - 2020-07-01 + ECLIPSE_HIST_DIFFDATES: + - - 2018-07-01 + - 2018-01-01 + - - 2019-07-01 + - 2018-01-01 + - - 2020-07-01 + - 2018-01-01 + - - 2019-07-01 + - 2018-07-01 + - - 2020-07-01 + - 2019-07-01 DCONV_ALTERNATIVE: 1 HUM_MODEL_MODE: 1 FACIESMODEL_ALTERNATIVE: 0 @@ -1167,12 +1165,12 @@ global: rms: horizons: TOP_RES: - - TopVolantis - - TopTherys - - TopVolon - - BaseVolantis + - TopVolantis + - TopTherys + - TopVolon + - BaseVolantis zones: ZONE_RES: - - Valysar - - Therys - - Volon + - Valysar + - Therys + - Volon diff --git a/tests/test_export_item.py b/tests/test_export_item.py new file mode 100644 index 000000000..370c4d997 --- /dev/null +++ b/tests/test_export_item.py @@ -0,0 +1,181 @@ +"""Test the individual functions in module _export_item.""" +from collections import OrderedDict +import xtgeo +import json +import yaml +import pytest + +import fmu.dataio +import fmu.dataio._export_item as ei + +CFG = OrderedDict() +CFG["template"] = {"name": "Test", "revision": "AUTO"} +CFG["masterdata"] = { + "smda": { + "country": [ + {"identifier": "Norway", "uuid": "ad214d85-8a1d-19da-e053-c918a4889309"} + ], + "discovery": [{"short_identifier": "abdcef", "uuid": "ghijk"}], + } +} + +CFG2 = {} +with open("tests/data/drogon/global_config2/global_variables.yml", "r") as stream: + CFG2 = yaml.safe_load(stream) + + +def test_data_process_name(): + """Test the _data_process_name function.""" + # test case 1 + dataio = fmu.dataio.ExportData( + name="Valysar", + config=CFG2, + content="depth", + tagname="WhatEver", + ) + obj = xtgeo.RegularSurface(name="SomeName") + exportitem = ei._ExportItem(dataio, obj, verbosity="INFO") + exportitem._data_process_name() + assert dataio._meta_data["name"] == "Valysar Fm." + + # test case 2, name is given via object + dataio = fmu.dataio.ExportData( + config=CFG2, + content="depth", + tagname="WhatEver", + ) + obj = xtgeo.RegularSurface(name="Valysar") + exportitem = ei._ExportItem(dataio, obj, verbosity="INFO") + exportitem._data_process_name() + assert dataio._meta_data["name"] == "Valysar Fm." + + # test case 3, name is given via object but not present in stratigraphy + dataio = fmu.dataio.ExportData( + config=CFG2, + content="depth", + tagname="WhatEver", + ) + obj = xtgeo.RegularSurface(name="Something else") + exportitem = ei._ExportItem(dataio, obj, verbosity="INFO") + exportitem._data_process_name() + assert dataio._meta_data["name"] == "Something else" + assert "stratigraphic" not in dataio._meta_data + + +def test_data_process_relation(): + """Test the _data_process_relation function.""" + # 1: name is given by RMS name: + rel1 = { + "offset": 4.0, + "top": {"ref": "TopVolantis", "offset": 2.0}, + "base": {"ref": "TopVolon", "offset": 0.0}, + } + # 2: name is given as mix of SMDA name and RMS name: + rel2 = { + "offset": 4.0, + "top": {"ref": "TopVolantis", "offset": 2.0}, + "base": {"ref": "Volon FM. Top", "offset": 0.0}, + } + # 3: ref is missing for top + rel3 = { + "offset": 4.0, + "top": {"offset": 2.0}, + "base": {"ref": "Volon FM. Top", "offset": 0.0}, + } + + # test rel1 + dataio = fmu.dataio.ExportData( + name="Valysar", + relation=rel1, + config=CFG2, + content="depth", + tagname="WhatEver", + ) + obj = xtgeo.RegularSurface() + + exportitem = ei._ExportItem(dataio, obj, verbosity="INFO") + + exportitem._data_process_relation() + assert dataio._meta_data["offset"] == 4.0 + assert dataio._meta_data["top"]["name"] == "VOLANTIS GP. Top" + assert dataio._meta_data["base"]["stratigraphic"] is True + + # test rel2 + dataio = fmu.dataio.ExportData( + name="Valysar", + relation=rel2, + config=CFG2, + content="depth", + tagname="WhatEver", + ) + obj = xtgeo.RegularSurface() + + exportitem = ei._ExportItem(dataio, obj, verbosity="INFO") + + with pytest.raises(ValueError) as verr: + exportitem._data_process_relation() + assert "Cannot find Volon FM. Top" in str(verr) + + # test rel3 + dataio = fmu.dataio.ExportData( + name="Valysar", + relation=rel3, + config=CFG2, + content="depth", + tagname="WhatEver", + ) + obj = xtgeo.RegularSurface() + + exportitem = ei._ExportItem(dataio, obj, verbosity="INFO") + + # with pytest.warns(UserWarning) as uwarn: + # exportitem._data_process_relation() + # assert "Relation top and/base is present but" in str(uwarn) + + +def test_data_process_timedata(): + """Test the _data_process_timedata function.""" + # test case 1 + dataio = fmu.dataio.ExportData( + name="Valysar", + config=CFG2, + content="depth", + timedata=[["20210101", "first"], [20210902, "second"]], + tagname="WhatEver", + ) + obj = xtgeo.RegularSurface(name="SomeName") + exportitem = ei._ExportItem(dataio, obj, verbosity="INFO") + exportitem._data_process_timedata() + print(json.dumps(dataio._meta_data["time"], indent=2, default=str)) + assert dataio._meta_data["time"][0]["value"] == "2021-01-01T00:00:00" + assert dataio._meta_data["time"][0]["label"] == "first" + + +def test_data_process_content(): + """Test the _data_process_content function.""" + # test case 1 + dataio = fmu.dataio.ExportData( + name="Valysar", + config=CFG2, + content="depth", + timedata=[["20210101", "first"], [20210902, "second"]], + tagname="WhatEver", + ) + obj = xtgeo.RegularSurface(name="SomeName") + exportitem = ei._ExportItem(dataio, obj, verbosity="INFO") + exportitem._data_process_content() + assert dataio._meta_data["content"] == "depth" + + # test case 2 + dataio = fmu.dataio.ExportData( + name="Valysar", + config=CFG2, + content={"seismic": {"attribute": "attribute_timeshifted_somehow"}}, + timedata=[["20210101", "first"], [20210902, "second"]], + tagname="WhatEver", + ) + obj = xtgeo.RegularSurface(name="SomeName") + exportitem = ei._ExportItem(dataio, obj, verbosity="INFO") + exportitem._data_process_content() + assert dataio._meta_data["content"] == "seismic" + assert dataio._meta_data["seismic"]["attribute"] == "attribute_timeshifted_somehow" diff --git a/tests/test_fmu_dataio.py b/tests/test_fmu_dataio.py index ee1a64e5c..570c55b74 100644 --- a/tests/test_fmu_dataio.py +++ b/tests/test_fmu_dataio.py @@ -5,9 +5,6 @@ import fmu.dataio CFG = OrderedDict() -CFG["$schema"] = "some_scheme" -CFG["$version"] = "0.7.0" -CFG["$source"] = "fmu" CFG["model"] = {"name": "Test", "revision": "21.0.0"} CFG["masterdata"] = { "smda": { @@ -36,13 +33,11 @@ def test_instantate_class_no_keys(): def test_get_meta_dollars(): - """The private routine that provides $.""" + """The private routine that provides special (earlier with $ in front).""" case = fmu.dataio.ExportData() case._config = CFG - case._get_meta_dollars() - assert case._meta_dollars["$schema"] == CFG["$schema"] - assert case._meta_dollars["$version"] == CFG["$version"] - assert case._meta_dollars["$source"] == CFG["$source"] + assert "schema" in case._meta_dollars["schema"] + assert "fmu" in case._meta_dollars["source"] def test_get_meta_masterdata(): diff --git a/tests/test_fmu_dataio_surface.py b/tests/test_fmu_dataio_surface.py index 91d0a5a98..2ff61e997 100644 --- a/tests/test_fmu_dataio_surface.py +++ b/tests/test_fmu_dataio_surface.py @@ -57,7 +57,7 @@ def test_surface_io_larger_case(tmp_path): timedata=None, is_prediction=True, is_observation=False, - description="what Descr", + tagname="what Descr", verbosity="WARNING", ) exp._pwd = tmp_path diff --git a/tests/test_fmu_dataio_utils.py b/tests/test_fmu_dataio_utils.py index 44b0ac5b3..7385483ad 100644 --- a/tests/test_fmu_dataio_utils.py +++ b/tests/test_fmu_dataio_utils.py @@ -5,7 +5,7 @@ @pytest.mark.parametrize( - "name, descr, t1, t2, loc, expectedstem, expectedpath", + "name, tagname, t1, t2, loc, expectedstem, expectedpath", [ ( "some", @@ -55,11 +55,11 @@ ], ) def test_utils_construct_file( - tmp_path, name, descr, t1, t2, loc, expectedstem, expectedpath + tmp_path, name, tagname, t1, t2, loc, expectedstem, expectedpath ): """Testing construct file.""" stem, dest = _utils.construct_filename( - name, descr=descr, loc=loc, t1=t1, t2=t2, outroot=tmp_path + name, tagname=tagname, loc=loc, t1=t1, t2=t2, outroot=tmp_path ) assert stem == expectedstem