diff --git a/.pylintrc b/.pylintrc index 6f02476..bc3c0ed 100644 --- a/.pylintrc +++ b/.pylintrc @@ -59,7 +59,8 @@ disable=raw-checker-failed, broad-except, unsubscriptable-object, unsupported-membership-test, - consider-using-f-string + consider-using-f-string, + unsupported-binary-operation # Enable the message, report, category or checker with the given id(s). You can diff --git a/AUTHORS.md b/AUTHORS.md index b525133..b38e793 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -22,6 +22,7 @@ This file keeps track of authors contributions. * Natalia Jimenez * Quentin Fardet * Jerome Lebreton +* Tommy Calendini Update here with new contributors. diff --git a/CHANGELOG.md b/CHANGELOG.md index 9fd407a..730d172 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,45 @@ # Changelog +## 0.4.0 (October 2024) + +### Added +- Add unit tests for optical flows. [#114] +- Subpix taken into account in the dichotomy. [#148] +- Adding requirement numbers to the test docstring. [#143] +- Add requirement on html-report test. [#167] +- Variable initial disparity added to the configuration file. [#76] +- Adding cardinal sine module in interpolation filter. [#146] +- Mask added to the configuration file. [#157] +- Variable disparity taken into account in matching cost step. [#152] +- Add constant.py and criteria.py files for masks. [#159] +- Add right-disparity-outside criterion. [#162] +- Variable disparity taken into account in dichotomy. [#154] +- Add accuracy tests for dichotomy. [#126] +- Variable disparity taken into account in optical flow. [#158] +- Add first resource tests. [#174] +- Setting up disparity grids at inputs. [#165] +- Add left_nodata and right_nodata criteria. [#160] +- Add left_invalid and right_invalid criteria. [#161] +- Add criteria dataarray. [#163] +- Add profiling. [#175] + +### Fixed +- Fix the use of a step with the optical flow method in the refinement step. [#119] +- Fix ROI coordinates when the first point is within the margin. [#142] +- Fix sphinx errors. [#168] +- Remove np.inf on cost volume. [#170] + +### Changed +- State machine callback changed from after to before. [#144] +- Update pylint version. [#153] +- Documentation updated with new parameters for variable initial disparity. [#150] +- Update numpy version. [#145] +- Removal of disparity grids in the state machine. [#171] +- Removal of the disparity condition with the interpolation step. [#169] +- Update dichotomy documentation. [#166] +- Pixel size output updated as a function of step size. [#164] + + ## 0.3.0 (June 2024) ### Added diff --git a/data_samples/images/maricopa.zip b/data_samples/images/maricopa.zip index ffabbbf..1611d3b 100644 Binary files a/data_samples/images/maricopa.zip and b/data_samples/images/maricopa.zip differ diff --git a/data_samples/json_conf_files/a_basic_pipeline.json b/data_samples/json_conf_files/a_basic_pipeline.json index ef5b582..95f8b28 100644 --- a/data_samples/json_conf_files/a_basic_pipeline.json +++ b/data_samples/json_conf_files/a_basic_pipeline.json @@ -8,8 +8,8 @@ "img": "./maricopa/right.tif", "nodata": -9999 }, - "col_disparity": [-2, 2], - "row_disparity": [-2, 2] + "col_disparity": {"init": 0, "range": 2}, + "row_disparity": {"init": 0, "range": 2} }, "pipeline": { "matching_cost": { diff --git a/data_samples/json_conf_files/a_dichotomy_pipeline.json b/data_samples/json_conf_files/a_dichotomy_pipeline.json index ca2bdd3..f527964 100644 --- a/data_samples/json_conf_files/a_dichotomy_pipeline.json +++ b/data_samples/json_conf_files/a_dichotomy_pipeline.json @@ -8,8 +8,8 @@ "img": "./maricopa/right.tif", "nodata": -9999 }, - "col_disparity": [-2, 2], - "row_disparity": [-2, 2] + "col_disparity": {"init": 0, "range": 2}, + "row_disparity": {"init": 0, "range": 2} }, "pipeline": { "matching_cost": { @@ -24,7 +24,7 @@ "refinement": { "refinement_method": "dichotomy", "iterations": 2, - "filter": "bicubic" + "filter": { "method": "bicubic"} } } - } \ No newline at end of file + } diff --git a/data_samples/json_conf_files/a_variable_disparity_pipeline.json b/data_samples/json_conf_files/a_variable_disparity_pipeline.json new file mode 100644 index 0000000..0d828ab --- /dev/null +++ b/data_samples/json_conf_files/a_variable_disparity_pipeline.json @@ -0,0 +1,30 @@ +{ + "input": { + "left": { + "img": "./maricopa/left.tif", + "nodata": -9999 + }, + "right": { + "img": "./maricopa/right.tif", + "nodata": -9999 + }, + "col_disparity": {"init": "./maricopa/init_col_disparity_grid.tif", "range": 5}, + "row_disparity": {"init": "./maricopa/init_row_disparity_grid.tif", "range": 5} + }, + "pipeline": { + "matching_cost": { + "matching_cost_method": "zncc", + "window_size": 5 + }, + "disparity": { + "disparity_method": "wta", + "invalid_disparity": "NaN" + }, + "refinement": { + "refinement_method": "dichotomy", + "iterations": 2, + "filter": {"method": "bicubic"} + } + } + } + \ No newline at end of file diff --git a/docs/source/Images/estimation_schema.png b/docs/source/Images/estimation_schema.png index 2c50417..af0cbc6 100644 Binary files a/docs/source/Images/estimation_schema.png and b/docs/source/Images/estimation_schema.png differ diff --git a/docs/source/Images/range_schema.png b/docs/source/Images/range_schema.png index 6d9cdcc..024b1e2 100644 Binary files a/docs/source/Images/range_schema.png and b/docs/source/Images/range_schema.png differ diff --git a/docs/source/conf.py b/docs/source/conf.py index 6636687..73b08c5 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -59,6 +59,7 @@ "sphinx_rtd_theme", "sphinx.ext.imgmath", "autoapi.extension", + "sphinx_tabs.tabs", ] autoapi_dirs = ["../../pandora2d"] diff --git a/docs/source/exploring_the_field/initial_disparity.rst b/docs/source/exploring_the_field/initial_disparity.rst index 1e011d0..6e0d59e 100644 --- a/docs/source/exploring_the_field/initial_disparity.rst +++ b/docs/source/exploring_the_field/initial_disparity.rst @@ -9,8 +9,7 @@ There are two available methods to do this. Setting an interval ------------------- -In the configuration file, the user is required to enter disparity range, as a list with two elements each, indicating -the minimum and maximum values for both row and columns disparity. +In the configuration file, the user is required to enter disparity range for rows and columns, as a dictionary with 2 keys "init" and "range". .. code:: json :name: Setting disparity ranges example @@ -18,12 +17,27 @@ the minimum and maximum values for both row and columns disparity. { "input": { - "col_disparity": [-2, 2], - "row_disparity": [-2, 2] + "col_disparity": {"init": 0, "range": 2}, + "row_disparity": {"init": 0, "range": 2} } } - +.. note:: + The "init" key can be either: + - an integer if the initial disparity is common to every point in the image. + - a path to a disparity grid if each point has its own initial disparity value. + + (see :ref:`inputs`) + +The min and max disparity would then be equal to (example for columns): + +.. code:: python + :name: Calculating the min and the max of the disparity + + min_col_disparity = col_disparity["init"] - col_disparity["range"] + max_col_disparity = col_disparity["init"] + col_disparity["range"] + + .. figure:: ../Images/range_schema.png diff --git a/docs/source/exploring_the_field/interpolation_filters.rst b/docs/source/exploring_the_field/interpolation_filters.rst index 3e190aa..71fdaa9 100644 --- a/docs/source/exploring_the_field/interpolation_filters.rst +++ b/docs/source/exploring_the_field/interpolation_filters.rst @@ -38,3 +38,20 @@ For both points to interpolate, the `fractional_shift` is `0.5`. Thus, the coeff :math:`x` values for which kernel is not null are available for :math:`a` ranging from :math:`-1` to :math:`2`. Thus, for an image in 2D, the filter is applied on an array of shape :math:`4 \times 4` where coefficients are applied on columns then lines. + +Sinc +---- + +This filter use a cardinal sine of the form: + +.. math:: + \frac{\sin(x\ \pi)}{x\ \pi} + +Where :math:`x` is the fractional shift. + +Computed coefficients are windowed by a Gaussian of form: + +.. math:: + \exp\left(\frac{-2\ \pi\ x^2}{\sigma^2}\right) + +Where :math:`\sigma` correspond to the size of the filter (half width of the window). diff --git a/docs/source/userguide.rst b/docs/source/userguide.rst index 500a79a..7b4cd87 100644 --- a/docs/source/userguide.rst +++ b/docs/source/userguide.rst @@ -14,6 +14,7 @@ Userguide userguide/output.rst userguide/as_an_api.rst userguide/faq.rst + userguide/expert_mode.rst diff --git a/docs/source/userguide/as_an_api.rst b/docs/source/userguide/as_an_api.rst index 9e3f1f9..651fe2b 100644 --- a/docs/source/userguide/as_an_api.rst +++ b/docs/source/userguide/as_an_api.rst @@ -34,8 +34,8 @@ Pandora2D provides a full python API which can be used to compute disparity maps 'img': img_right_path, 'nodata': np.nan, }, - "col_disparity": [-2, 2], - "row_disparity": [-2, 2], + "col_disparity": {"init": 0, "range": 2}, + "row_disparity": {"init": 0, "range": 2}, }, # define pipeline configuration 'pipeline':{ @@ -200,4 +200,4 @@ In this case, matching cost cannot be computed for this pixel and the value will Then bit 1 will be set : *The point is invalid: the disparity interval to explore is absent in the right image* and the point disparity will be set to *invalid_disparity*. Moreover, everytime Pandora2D shifts the right image it introduces a new line set at *nodata_right* value. The matching -cost cannot be computed for this line to. \ No newline at end of file +cost cannot be computed for this line to. diff --git a/docs/source/userguide/expert_mode.rst b/docs/source/userguide/expert_mode.rst new file mode 100644 index 0000000..7b0a5cd --- /dev/null +++ b/docs/source/userguide/expert_mode.rst @@ -0,0 +1,75 @@ +.. _Expert_mode: + +Expert mode +=========== + +Resume +****** + +The profiling expert mode is intended for users who want to measure the performance of Pandora2D on their personal computer. +In the output folder, they can obtain a number of charts that calculate averages and other metrics for each step throughout the executions. + +How to profile more functions ? +******************************* + + +This option requires the user to be familiar with the pandora2d code. + +First, when they activate the `expert_mode` key in the configuration, they have access by default to performance +information related to each stage of the state machine. +All data is stored in the code in a `pandas.DataFrame` and locally in a CSV file, then presented as a graph in a PDF file. + +If the user wants to analyze the performance of another function, they can add the decorator +`@mem_time_profile_profile(name="Function name")` above that function. +If they want to obtain more metrics, they need to add them to the "metrics_list" in the `profiling.py` file. + +The graphs are handled by the `generate_figure` function. + +.. note:: + Profiling certain functions can significantly increase execution times. + + + +Parameters and configuration : +############################## + +Expert mode profiling section is composed of the following keys: + + OptionalKey("folder_name"): str + +.. list-table:: Expert mode section + :header-rows: 1 + + * - Name + - Description + - Type + - Default value + - Required + * - *folder_name* + - + - str + - + - Yes + +**Example** + +.. code:: json + :name: Input example + + { + "input": + { + // inputs content + } + , + "pipeline" : + { + // pipeline content + }, + "expert_mode": + { + "profiling": + { + "folder_name": "expert_mode_outputs" + } + } \ No newline at end of file diff --git a/docs/source/userguide/faq.rst b/docs/source/userguide/faq.rst index 7219176..da1c0e5 100644 --- a/docs/source/userguide/faq.rst +++ b/docs/source/userguide/faq.rst @@ -22,8 +22,8 @@ It is possible to add a step parameter in the configuration file. This parameter "img": img_right_path, "nodata": "NaN", }, - "col_disparity": [-3, 3], - "row_disparity": [-3, 3], + "col_disparity": {"init": 0, "range": 3}, + "row_disparity": {"init": 0, "range": 3}, }, "pipeline": { "matching_cost": { @@ -60,8 +60,8 @@ It is possible to work on only one section of the image with an ROI. For this, t "img": img_right_path, "nodata": "NaN", }, - "col_disparity": [-3, 3], - "row_disparity": [-3, 3], + "col_disparity": {"init": 0, "range": 3}, + "row_disparity": {"init": 0, "range": 3}, }, "ROI": { "col": {"first": 10, "last": 100}, diff --git a/docs/source/userguide/input.rst b/docs/source/userguide/input.rst index cfe6594..a47f578 100644 --- a/docs/source/userguide/input.rst +++ b/docs/source/userguide/input.rst @@ -30,45 +30,87 @@ Input section is composed of the following keys: - - Yes * - *col_disparity* - - Minimal and Maximal disparities for columns - - [int, int] + - The disparities for columns (see description below) + - dict - - If the estimation step is not present * - *row_disparity* - - Minimal and Maximal disparities for rows - - [int, int] + - The disparities for rows (see description below) + - dict - - If the estimation step is not present -.. warning:: - If interpolation is used as refinement method, row_disparity and col_disparity ranges must have a size greater than or equal to 5. +Image (left and right) and disparity (col_disparity and row_disparity) properties are composed of the following keys: + +.. tabs:: + + .. tab:: Image properties + + .. list-table:: + :header-rows: 1 + + * - Name + - Description + - Type + - Default value + - Required + * - *img* + - Path to the image + - string + - + - Yes + * - *nodata* + - Nodata value of the image + - int, "NaN" or "inf" + - -9999 + - No + * - *mask* + - Path to the mask + - string + - none + - No + + .. tab:: Disparity properties + + .. list-table:: + :header-rows: 1 + + * - Name + - Description + - Type + - Default value + - Required + * - *init* + - Initial point or path to initial grid + - int or str + - + - Yes + * - *range* + - The search radius (see :ref:`initial_disparity`) + - int >= 0 + - + - Yes + +.. note:: + The initial disparity can be either: + - constant for each point in the image, in which case *init* dictionary key is an integer + - variable, in which case *init* is a string which returns the path to a grid containing + an integer initial value for each point in the image. -Left and Right properties are composed of the following keys: +.. warning:: + With sad/ssd matching_cost_method in the pipeline (see :ref:`Sequencing`) , `nodata` only accepts `int` type. -.. list-table:: Left and Right properties - :header-rows: 1 +.. note:: + Only one-band masks are accepted by pandora2d. Mask must comply with the following convention : + - Value equal to 0 for valid pixel + - Value not equal to 0 for invalid pixel - * - Name - - Description - - Type - - Default value - - Required - * - *img* - - Path to the image - - string - - - - Yes - * - *nodata* - - Nodata value of the image - - int, "NaN" or "inf" - - -9999 - - No -.. warning:: - With sad/ssd matching_cost_method in the pipeline (see :ref:`Sequencing`) , `nodata` only accepts `int` type. +Examples +******** -**Example** +**Input with constant initial disparity** .. code:: json :name: Input example @@ -78,14 +120,15 @@ Left and Right properties are composed of the following keys: { "left": { "img": "./data/left.tif", - "nodata": -9999 + "nodata": -9999, + "mask": "./data/mask_left.tif" }, "right": { "img": "/data/right.tif", "nodata": -9999 }, - "col_disparity": [-3, 3], - "row_disparity": [-3, 3] + "col_disparity": {"init": 0, "range": 3}, + "row_disparity": {"init": 0, "range": 3} } , "pipeline" : @@ -94,3 +137,29 @@ Left and Right properties are composed of the following keys: } } +**Input with variable initial disparity** + +.. code:: json + :name: Input example with disparity grid + + { + "input": + { + "left": { + "img": "./data/left.tif", + "nodata": -9999, + "mask": "./data/mask_left.tif" + }, + "right": { + "img": "/data/right.tif", + "nodata": -9999 + }, + "col_disparity": {"init": "./data/col_disparity_grid.tif", "range": 3}, + "row_disparity": {"init": "./data/row_disparity_grid.tif", "range": 3} + } + , + "pipeline" : + { + // pipeline content + } + } diff --git a/docs/source/userguide/overview.rst b/docs/source/userguide/overview.rst index a1ccb82..c1dfc67 100644 --- a/docs/source/userguide/overview.rst +++ b/docs/source/userguide/overview.rst @@ -103,8 +103,8 @@ Example "img": "./data/right.tif", "nodata": -9999 }, - "col_disparity": [-2, 2], - "row_disparity": [-2, 2] + "col_disparity": {"init": 0, "range": 2}, + "row_disparity": {"init": 0, "range": 2} }, "pipeline": { "matching_cost": { diff --git a/docs/source/userguide/sequencing.rst b/docs/source/userguide/sequencing.rst index 79141fc..0cc593a 100644 --- a/docs/source/userguide/sequencing.rst +++ b/docs/source/userguide/sequencing.rst @@ -48,8 +48,8 @@ interpolation method. "right": { "img": "img_left.png" }, - "col_disparity": [-2, 2], - "row_disparity": [-2, 2] + "col_disparity": {"init": 0, "range": 2}, + "row_disparity": {"init": 0, "range": 2} }, "pipeline": { @@ -68,4 +68,4 @@ interpolation method. "refinement_method": "optical_flow" } } - } \ No newline at end of file + } diff --git a/docs/source/userguide/step_by_step/matching_cost.rst b/docs/source/userguide/step_by_step/matching_cost.rst index 1e0c49c..0a27803 100644 --- a/docs/source/userguide/step_by_step/matching_cost.rst +++ b/docs/source/userguide/step_by_step/matching_cost.rst @@ -62,6 +62,12 @@ Configuration and parameters - 1 - [1,2,4] - No + * - spline_order + - Spline order used for interpolation when subpix > 1 + - int + - 1 + - > 0 and < 6 + - No .. note:: diff --git a/docs/source/userguide/step_by_step/refinement.rst b/docs/source/userguide/step_by_step/refinement.rst index 5c7bf0c..df4c609 100644 --- a/docs/source/userguide/step_by_step/refinement.rst +++ b/docs/source/userguide/step_by_step/refinement.rst @@ -106,63 +106,216 @@ Available filters are described in :ref:`interpolation_filters`. Configuration and parameters ---------------------------- -.. list-table:: Configuration and parameters - :header-rows: 1 - - * - Name - - Description - - Type - - Default value - - Available value - - Required - * - *refinement_method* - - Refinement method - - string - - - - | "interpolation", - | "dichotomy", - | "optical_flow" - - Yes - * - *iterations* - - Number of iterations (not available for interpolation) - - integer - - 4 for **optical_flow** method - - | **Dichotomy** - | 1 to 9 - | *if above, will be bound to 9* - | **Optical flow** - | >0 - - | **Dichotomy** - | Yes - | **Optical flow** - | No - * - *filter* - - Name of the filter to use - - str - - - - | "sinc", - | "bicubic", - | **Only available if "dichotomy" method** - - Yes - -**Example** - -.. code:: json - :name: Refinement example - - { - "input" : - { - // input content - }, - "pipeline" : - { - // ... - "refinement": +.. tabs:: + + .. tab:: Interpolation + + Parameters : + + .. list-table:: + :header-rows: 1 + + * - Name + - Description + - Type + - Available value + - Required + * - *refinement_method* + - Refinement method + - string + - "interpolation" + - Yes + + Configuration example with interpolation : + + .. code:: json + + { + "input" : + { + // input content + }, + "pipeline" : + { + // ... + "refinement": + { + "refinement_method": "interpolation" + }, + // ... + } + } + + .. tab:: Optical-flow + + Parameters : + + .. list-table:: + :header-rows: 1 + + * - Name + - Description + - Type + - Default value + - Available value + - Required + * - *refinement_method* + - Refinement method + - string + - + - "optical_flow" + - Yes + * - *iterations* + - Number of iterations + - integer + - 4 + - >0 + - No + + Configuration example with optical_flow : + + .. code:: json + { - "refinement_method": "optical_flow" - }, - // ... - } - } + "input" : + { + // input content + }, + "pipeline" : + { + // ... + "refinement": + { + "refinement_method": "optical_flow", + "iterations" : 7 + }, + // ... + } + } + + .. tab:: Dichotomy + + .. tabs:: + + .. tab:: Bicubic + + Parameters : + + .. list-table:: + :header-rows: 1 + + * - Name + - Description + - Type + - Default value + - Available value + - Required + * - *refinement_method* + - Refinement method + - string + - + - "dichotomy" + - Yes + * - *iterations* + - Number of iterations + - integer + - + - | 1 to 9 + | *if above, will be bound to 9* + - Yes + * - *filter* + - | Configuration of the filter + | used for interpolation + - | dict with key: + | - "method" + - + - {"method": "bicubic"} + - Yes + + Configuration example with dichotomy : + + .. code:: json + + { + "input" : + { + // input content + }, + "pipeline" : + { + // ... + "refinement": + { + "refinement_method": "dichotomy", + "filter": {"method": "bicubic"}, + "iterations" : 7 + }, + // ... + } + } + + .. tab:: Cardinal sine + + Parameters : + + .. list-table:: + :header-rows: 1 + + * - Name + - Description + - Type + - Default value + - Available value + - Required + * - *refinement_method* + - Refinement method + - string + - + - "dichotomy" + - Yes + * - *iterations* + - Number of iterations + - integer + - + - | 1 to 9 + | *if above, will be bound to 9* + - Yes + * - *filter* + - | Configuration of the filter + | used for interpolation + - | dict with keys: + | - "method" + | - "size" + - + - | { + | "method": "sinc", + | "size" : 6 to 21, + | } + - Yes + + Configuration example with dichotomy : + + .. code:: json + + { + "input" : + { + // input content + }, + "pipeline" : + { + // ... + "refinement": + { + "refinement_method": "dichotomy", + "filter": { + "method": "sinc", + "size": 9 + }, + "iterations" : 7 + }, + // ... + } + } + diff --git a/notebooks/introduction_and_basic_usage.ipynb b/notebooks/introduction_and_basic_usage.ipynb index 08f168a..e44772b 100644 --- a/notebooks/introduction_and_basic_usage.ipynb +++ b/notebooks/introduction_and_basic_usage.ipynb @@ -237,8 +237,8 @@ "input_config = {\n", " \"left\": {\"img\": img_left_path, \"nodata\": np.nan},\n", " \"right\": {\"img\": img_right_path, \"nodata\": np.nan},\n", - " \"col_disparity\": [-2, 2],\n", - " \"row_disparity\": [-2, 2],\n", + " \"col_disparity\": {\"init\": 0, \"range\": 2},\n", + " \"row_disparity\": {\"init\": 0, \"range\": 2},\n", "}" ] }, @@ -334,8 +334,8 @@ " \"right\": {\n", " \"img\": \"data/right.tif\",\n", " },\n", - " \"col_disparity\": [-2, 2],\n", - " \"row_disparity\": [-2, 2],\n", + " \"col_disparity\": {\"init\": 0, \"range\": 2},\n", + " \"row_disparity\": {\"init\": 0, \"range\": 2},\n", " },\n", " \"pipeline\":{\n", " \"matching_cost\" : {\n", @@ -547,8 +547,8 @@ " \"right\": {\n", " \"img\": \"data/right.tif\",\n", " },\n", - " \"col_disparity\": [-2, 2],\n", - " \"row_disparity\": [-2, 2],\n", + " \"col_disparity\": {\"init\": 0, \"range\": 2},\n", + " \"row_disparity\": {\"init\": 0, \"range\": 2},\n", " },\n", " \"pipeline\":{\n", " \"matching_cost\" : {\n", @@ -789,7 +789,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/notebooks/usage_dichotomy.ipynb b/notebooks/usage_dichotomy.ipynb index 1825e6f..5aa8fe1 100644 --- a/notebooks/usage_dichotomy.ipynb +++ b/notebooks/usage_dichotomy.ipynb @@ -180,8 +180,8 @@ " \"img\": img_right_path,\n", " \"nodata\": np.nan,\n", " },\n", - " \"col_disparity\": [-3, 3],\n", - " \"row_disparity\": [-3, 3],\n", + " \"col_disparity\": {\"init\": 0, \"range\": 3},\n", + " \"row_disparity\": {\"init\": 0, \"range\": 3},\n", "}" ] }, @@ -294,7 +294,7 @@ "user_cfg_with_refinement[\"pipeline\"][\"refinement\"] = {\n", " \"refinement_method\": \"dichotomy\",\n", " \"iterations\": 1,\n", - " \"filter\": \"bicubic\",\n", + " \"filter\": {\"method\": \"bicubic\"},\n", "}" ] }, @@ -309,7 +309,7 @@ "user_cfg_with_two_refinements[\"pipeline\"][\"refinement\"] = {\n", " \"refinement_method\": \"dichotomy\",\n", " \"iterations\": 2,\n", - " \"filter\": \"bicubic\",\n", + " \"filter\": {\"method\": \"bicubic\"},\n", "}" ] }, @@ -432,12 +432,16 @@ "from matplotlib import colors\n", "\n", "plt.rcParams[\"figure.figsize\"] = (20, 8)\n", + "min_max_disp_col = [input_config[\"col_disparity\"][\"init\"] - input_config[\"col_disparity\"][\"range\"], \n", + " input_config[\"col_disparity\"][\"init\"] + input_config[\"col_disparity\"][\"range\"]]\n", "\n", + "min_max_disp_row = [input_config[\"row_disparity\"][\"init\"] - input_config[\"row_disparity\"][\"range\"], \n", + " input_config[\"row_disparity\"][\"init\"] + input_config[\"row_disparity\"][\"range\"]]\n", "\n", "def plot_result(\n", " dataset,\n", - " dmin=min(input_config[\"row_disparity\"][0], input_config[\"col_disparity\"][0]),\n", - " dmax=max(input_config[\"row_disparity\"][1], input_config[\"col_disparity\"][1]),\n", + " dmin=min(min_max_disp_row[0], min_max_disp_col[0]),\n", + " dmax=max(min_max_disp_row[1], min_max_disp_col[1]),\n", " palette=plt.cm.bwr.with_extremes(over=\"y\", under=\"g\", bad=\"k\"),\n", " title=\"Without refinement.\",\n", " colorbarlabel=\"Disparity\",\n", @@ -566,7 +570,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/notebooks/usage_step_roi_config.ipynb b/notebooks/usage_step_roi_config.ipynb index d941d61..6fb2d39 100644 --- a/notebooks/usage_step_roi_config.ipynb +++ b/notebooks/usage_step_roi_config.ipynb @@ -171,8 +171,8 @@ "input_config = {\n", " \"left\": {\"img\": img_left_path, \"nodata\": np.nan},\n", " \"right\": {\"img\": img_right_path, \"nodata\": np.nan},\n", - " \"col_disparity\": [-2, 2],\n", - " \"row_disparity\": [-2, 2],\n", + " \"col_disparity\": {\"init\": 0, \"range\": 2},\n", + " \"row_disparity\": {\"init\": 0, \"range\": 2},\n", "}" ] }, @@ -261,8 +261,8 @@ " \"img\": img_right_path,\n", " \"nodata\": \"NaN\",\n", " },\n", - " \"col_disparity\": [-3, 3],\n", - " \"row_disparity\": [-3, 3],\n", + " \"col_disparity\": {\"init\": 0, \"range\": 3},\n", + " \"row_disparity\": {\"init\": 0, \"range\": 3},\n", " },\n", " \"pipeline\":{\n", " \"matching_cost\" : {\n", @@ -448,8 +448,8 @@ " \"img\": img_right_path,\n", " \"nodata\": \"NaN\",\n", " },\n", - " \"col_disparity\": [-3, 3],\n", - " \"row_disparity\": [-3, 3],\n", + " \"col_disparity\": {\"init\": 0, \"range\": 3},\n", + " \"row_disparity\": {\"init\": 0, \"range\": 3},\n", " },\n", " \"pipeline\":{\n", " \"matching_cost\" : {\n", @@ -636,8 +636,8 @@ " \"img\": img_right_path,\n", " \"nodata\": \"NaN\",\n", " },\n", - " \"col_disparity\": [-3, 3],\n", - " \"row_disparity\": [-3, 3],\n", + " \"col_disparity\": {\"init\": 0, \"range\": 3},\n", + " \"row_disparity\": {\"init\": 0, \"range\": 3},\n", " },\n", " \"ROI\":{\n", " \"col\": {\"first\": 10, \"last\": 100},\n", @@ -820,7 +820,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.16" + "version": "3.12.3" } }, "nbformat": 4, diff --git a/pandora2d/__init__.py b/pandora2d/__init__.py index 4ee2b2a..b6bfa86 100644 --- a/pandora2d/__init__.py +++ b/pandora2d/__init__.py @@ -36,6 +36,8 @@ from pandora2d.state_machine import Pandora2DMachine from pandora2d import reporting from pandora2d.reporting import NumpyPrimitiveEncoder +from pandora2d.profiling import generate_summary +from pandora2d import profiling def run( @@ -81,6 +83,8 @@ def main(cfg_path: str, path_output: str, verbose: bool) -> None: :param cfg_path: path to the json configuration file :type cfg_path: string + :param path_output: output directory + :type path_output: str :param verbose: verbose mode :type verbose: bool :return: None @@ -95,6 +99,7 @@ def main(cfg_path: str, path_output: str, verbose: bool) -> None: pandora2d_machine = Pandora2DMachine() cfg = check_conf(user_cfg, pandora2d_machine) + profiling.expert_mode_config.enable = "expert_mode" in cfg setup_logging(verbose) @@ -129,3 +134,7 @@ def main(cfg_path: str, path_output: str, verbose: bool) -> None: completed_cfg["margins"] = pandora2d_machine.margins.to_dict() # save config save_config(path_output, completed_cfg) + + # Profiling results + if "expert_mode" in completed_cfg: + generate_summary(path_output, completed_cfg["expert_mode"]["profiling"]) diff --git a/pandora2d/check_configuration.py b/pandora2d/check_configuration.py index 662f56b..97b343a 100644 --- a/pandora2d/check_configuration.py +++ b/pandora2d/check_configuration.py @@ -25,16 +25,15 @@ from __future__ import annotations -from typing import Dict, List - +from typing import Dict import numpy as np import xarray as xr -from json_checker import And, Checker, Or +from json_checker import And, Checker, Or, MissKeyCheckerError +from rasterio.io import DatasetReader -from pandora.img_tools import get_metadata +from pandora.img_tools import get_metadata, rasterio_open from pandora.check_configuration import ( check_dataset, - check_disparities_from_input, check_images, concat_conf, get_config_input, @@ -42,6 +41,7 @@ update_conf, ) +from pandora.check_configuration import rasterio_can_open from pandora2d.state_machine import Pandora2DMachine @@ -101,35 +101,119 @@ def check_input_section(user_cfg: Dict[str, dict], estimation_config: dict = Non check_images(cfg["input"]) if estimation_config is None: - check_disparities_from_input(cfg["input"]["col_disparity"], None) - check_disparities_from_input(cfg["input"]["row_disparity"], None) left_image_metadata = get_metadata(cfg["input"]["left"]["img"]) - check_disparity_ranges_are_inside_image( - left_image_metadata, cfg["input"]["row_disparity"], cfg["input"]["col_disparity"] - ) + check_disparity(left_image_metadata, cfg["input"]) return cfg +def check_disparity(image_metadata: xr.Dataset, input_cfg: Dict) -> None: + """ + All checks on disparity + + :param image_metadata: only metadata on the left image + :type image_metadata: xr.Dataset + :param input_cfg: input configuration + :type input_cfg: Dict + + """ + + # Check that disparities are dictionaries or grids + if not (isinstance(input_cfg["row_disparity"], dict) and isinstance(input_cfg["col_disparity"], dict)): + raise AttributeError("The disparities in rows and columns must be given as 2 dictionaries.") + + if isinstance(input_cfg["row_disparity"]["init"], str) and isinstance(input_cfg["col_disparity"]["init"], str): + + # Read disparity grids + disparity_row_reader = rasterio_open(input_cfg["row_disparity"]["init"]) + disparity_col_reader = rasterio_open(input_cfg["col_disparity"]["init"]) + + # Check disparity grids size and number of bands + check_disparity_grids(image_metadata, disparity_row_reader) + check_disparity_grids(image_metadata, disparity_col_reader) + + # Get correct disparity dictionaries from init disparity grids to give as input of + # the check_disparity_ranges_are_inside_image method + row_disp_dict = get_dictionary_from_init_grid(disparity_row_reader, input_cfg["row_disparity"]["range"]) + col_disp_dict = get_dictionary_from_init_grid(disparity_col_reader, input_cfg["col_disparity"]["range"]) + + elif isinstance(input_cfg["row_disparity"]["init"], int) and isinstance(input_cfg["col_disparity"]["init"], int): + row_disp_dict = input_cfg["row_disparity"] + col_disp_dict = input_cfg["col_disparity"] + + else: + raise ValueError("Initial columns and row disparity values must be two strings or two integers") + + # Check that disparity ranges are not totally out of the image + check_disparity_ranges_are_inside_image(image_metadata, row_disp_dict, col_disp_dict) + + +def check_disparity_grids(image_metadata: xr.Dataset, disparity_reader: DatasetReader) -> None: + """ + Check that disparity grids contains two bands and are + the same size as the input image + + :param image_metadata: + :type image_metadata: xr.Dataset + :param disparity_reader: disparity grids + :type disparity_reader: rasterio.io.DatasetReader + """ + + # Check that disparity grids are 1-channel grids + if disparity_reader.count != 1: + raise AttributeError("Initial disparity grid must be a 1-channel grid") + + # Check that disparity grids are the same size as the input image + if (disparity_reader.height, disparity_reader.width) != ( + image_metadata.sizes["row"], + image_metadata.sizes["col"], + ): + raise AttributeError("Initial disparity grids and image must have the same size") + + +def get_dictionary_from_init_grid(disparity_reader: DatasetReader, disp_range: int) -> Dict: + """ + Get correct dictionaries to give as input of check_disparity_ranges_are_inside_image method + from initial disparity grids. + + :param disparity_reader: initial disparity grid + :type disparity_reader: rasterio.io.DatasetReader + :param disp_range: range of exploration + :type disp_range: int + :return: a disparity dictionary to give to check_disparity_ranges_are_inside_image() method + :rtype: Dict + """ + + init_disp_grid = disparity_reader.read(1) + + # Get dictionary with integer init value corresponding to the maximum absolute value of init_disp_grid + disp_dict = { + "init": np.max(np.abs(init_disp_grid)), + "range": disp_range, + } + + return disp_dict + + def check_disparity_ranges_are_inside_image( - image_metadata: xr.Dataset, row_disparity_range: List, col_disparity_range: List + image_metadata: xr.Dataset, row_disparity: Dict, col_disparity: Dict ) -> None: """ Raise an error if disparity ranges are out off image. :param image_metadata: :type image_metadata: xr.Dataset - :param row_disparity_range: - :type row_disparity_range: List - :param col_disparity_range: - :type col_disparity_range: List + :param row_disparity: + :type row_disparity: Dict + :param col_disparity: + :type col_disparity: Dict :return: None :rtype: None :raises: ValueError """ - if np.abs(row_disparity_range).min() > image_metadata.sizes["row"]: + if np.abs(row_disparity["init"]) - row_disparity["range"] > image_metadata.sizes["row"]: raise ValueError("Row disparity range out of image") - if np.abs(col_disparity_range).min() > image_metadata.sizes["col"]: + if np.abs(col_disparity["init"]) - col_disparity["range"] > image_metadata.sizes["col"]: raise ValueError("Column disparity range out of image") @@ -195,14 +279,38 @@ def check_pipeline_section(user_cfg: Dict[str, dict], pandora2d_machine: Pandora return pipeline_cfg +def check_expert_mode_section(user_cfg: Dict[str, dict]) -> Dict[str, dict]: + """ + Complete and check if the dictionary is correct + + :param user_cfg: user configuration + :type user_cfg: dict + :return: cfg: global configuration + :rtype: cfg: dict + """ + + if "profiling" not in user_cfg: + raise MissKeyCheckerError("Please be sure to set the profiling dictionary") + + # check profiling schema + profiling_mode_cfg = user_cfg["profiling"] + checker = Checker(expert_mode_profiling) + checker.validate(profiling_mode_cfg) + + profiling_mode_cfg = {"expert_mode": user_cfg} + + return profiling_mode_cfg + + def check_conf(user_cfg: Dict, pandora2d_machine: Pandora2DMachine) -> dict: """ Complete and check if the dictionary is correct :param user_cfg: user configuration :type user_cfg: dict - :param pandora2d_machine: instance of PandoraMachine - :type pandora2d_machine: PandoraMachine + :param pandora2d_machine: instance of Pandora2DMachine + :type pandora2d_machine: Pandora2DMachine + :return: cfg: global configuration :rtype: cfg: dict """ @@ -221,15 +329,11 @@ def check_conf(user_cfg: Dict, pandora2d_machine: Pandora2DMachine) -> dict: if "matching_cost" in cfg_pipeline["pipeline"]: check_right_nodata_condition(cfg_input, cfg_pipeline) - # The refinement step with interpolation method is not allowed with disparity ranges of size lower than 5 - if ( - "refinement" in cfg_pipeline["pipeline"] - and cfg_pipeline["pipeline"]["refinement"]["refinement_method"] == "interpolation" - ): - check_disparity_range_size(cfg_input["input"]["col_disparity"], "Column") - check_disparity_range_size(cfg_input["input"]["row_disparity"], "Row") + cfg_expert_mode = user_cfg.get("expert_mode", {}) + if cfg_expert_mode != {}: + cfg_expert_mode = check_expert_mode_section(cfg_expert_mode) - cfg = concat_conf([cfg_input, cfg_roi, cfg_pipeline]) + cfg = concat_conf([cfg_input, cfg_roi, cfg_pipeline, cfg_expert_mode]) return cfg @@ -251,26 +355,6 @@ def check_right_nodata_condition(cfg_input: Dict, cfg_pipeline: Dict) -> None: ) -def check_disparity_range_size(disparity: list[int] | str, title: str) -> None: - """ - Check that disparity ranges with a size < 5 are not allowed for interpolation refinement method. - - :param disparity: disparity range - :type disparity: list[int] | str - :param cfg_pipeline: pipeline section of configuration - :type cfg_pipeline: Dict - """ - - if isinstance(disparity, list): - if (abs(disparity[1] - disparity[0]) + 1) < 5: - raise ValueError( - title + " disparity range with a size < 5 are not allowed with interpolation refinement method" - ) - - if isinstance(disparity, str): - raise TypeError("Grid disparities are not yet handled by Pandora2D") - - def check_roi_coherence(roi_cfg: dict) -> None: """ Check that the first ROI coords are lower than the last. @@ -306,29 +390,37 @@ def get_roi_config(user_cfg: Dict[str, dict]) -> Dict[str, dict]: "left": { "img": And(str, rasterio_can_open_mandatory), "nodata": Or(int, lambda input: np.isnan(input), lambda input: np.isinf(input)), + "mask": And(Or(str, lambda input: input is None), rasterio_can_open), }, "right": { "img": And(str, rasterio_can_open_mandatory), "nodata": Or(int, lambda input: np.isnan(input), lambda input: np.isinf(input)), + "mask": And(Or(str, lambda input: input is None), rasterio_can_open), }, - "col_disparity": [int, int], - "row_disparity": [int, int], + "col_disparity": {"init": Or(int, rasterio_can_open), "range": And(int, lambda x: x >= 0)}, + "row_disparity": {"init": Or(int, rasterio_can_open), "range": And(int, lambda x: x >= 0)}, } default_short_configuration_input = { "input": { "left": { "nodata": -9999, + "mask": None, }, "right": { "nodata": -9999, + "mask": None, }, } } -default_configuration_disp = {"input": {"col_disparity": [-9999, -9995], "row_disparity": [-9999, -9995]}} +default_configuration_disp = { + "input": {"col_disparity": {"init": -9997, "range": 2}, "row_disparity": {"init": -9997, "range": 2}} +} roi_configuration_schema = { "row": {"first": And(int, lambda x: x >= 0), "last": And(int, lambda x: x >= 0)}, "col": {"first": And(int, lambda x: x >= 0), "last": And(int, lambda x: x >= 0)}, } + +expert_mode_profiling = {"folder_name": str} diff --git a/pandora2d/common.py b/pandora2d/common.py index 82aed18..9bb2b1a 100644 --- a/pandora2d/common.py +++ b/pandora2d/common.py @@ -34,14 +34,16 @@ from xarray import Coordinate as Coordinates import os -from typing import Dict +from typing import Dict, Union, Tuple, List import xarray as xr import numpy as np +from numpy.typing import NDArray from rasterio import Affine from pandora.common import mkdir_p, write_data_array from pandora2d.img_tools import remove_roi_margins +from pandora2d.constants import Criteria def save_dataset(dataset: xr.Dataset, cfg: Dict, output: str) -> None: @@ -63,8 +65,8 @@ def save_dataset(dataset: xr.Dataset, cfg: Dict, output: str) -> None: # remove ROI margins to save only user ROI in tif files if "ROI" in cfg: dataset = remove_roi_margins(dataset, cfg) - # Translate georeferencement origin to ROI origin: - dataset.attrs["transform"] *= Affine.translation(cfg["ROI"]["col"]["first"], cfg["ROI"]["row"]["first"]) + if dataset.attrs["transform"] is not None: + adjust_georeferencement(dataset, cfg) # create output dir mkdir_p(output) @@ -93,6 +95,52 @@ def save_dataset(dataset: xr.Dataset, cfg: Dict, output: str) -> None: ) +def adjust_georeferencement(dataset: xr.Dataset, cfg: Dict) -> None: + """ + Change origin in case a ROI is present and set pixel size to the matching cost step. + + :param dataset: dataset to configure. + :type dataset: xr.Dataset + :param cfg: configuration + :type cfg: Dict + """ + if "ROI" in cfg: + # Translate georeferencement origin to ROI origin: + dataset.attrs["transform"] *= Affine.translation(cfg["ROI"]["col"]["first"], cfg["ROI"]["row"]["first"]) + row_step, col_step = get_step(cfg) + set_pixel_size(dataset, row_step, col_step) + + +def get_step(cfg: Dict) -> Tuple[int, int]: + """ + Get step from matching cost or retun default value. + :param cfg: configuration + :type cfg: Dict + :return: row_step, col_step + :rtype: Tuple[int, int] + """ + try: + return cfg["pipeline"]["matching_cost"]["step"] + except KeyError: + return 1, 1 + + +def set_pixel_size(dataset: xr.Dataset, row_step: int = 1, col_step: int = 1) -> None: + """ + Set the pixel size according to the step used in calculating the matching cost. + + This ensures that all pixels are well geo-referenced in case a step is applied. + + :param dataset: Data to save + :type dataset: xr.Dataset + :param row_step: step used in row + :type row_step: int + :param col_step: step used in column + :type col_step: int + """ + dataset.attrs["transform"] *= Affine.scale(col_step, row_step) + + def dataset_disp_maps( delta_row: np.ndarray, delta_col: np.ndarray, @@ -145,3 +193,98 @@ def dataset_disp_maps( dataset.attrs = attributes return dataset + + +def set_out_of_row_disparity_range_to_other_value( + data: xr.DataArray, + min_disp_grid: NDArray[np.floating], + max_disp_grid: NDArray[np.floating], + value: Union[int, float, Criteria], + global_disparity_range: Union[None, List[int]] = None, +) -> None: + """ + Put special value in data where the disparity is out of the range defined by disparity grids. + + The operation is done inplace. + + :param data: cost_volumes or criteria_dataarray to modify. + :type data: xr.DataArray 4D + :param min_disp_grid: grid of min disparity. + :type min_disp_grid: NDArray[np.floating] + :param max_disp_grid: grid of max disparity. + :type max_disp_grid: NDArray[np.floating] + :param value: value to set on data. + :type value: Union[int, float, Criteria] + :param global_disparity_range: + :type global_disparity_range: + """ + # WARNING: if one day we switch disp_row with disp_col index should be -2 + ndisp_row = data.shape[-1] + + # We want to put special value on points that are not in the global disparity range (row_disparity_source) + for disp_row in range(ndisp_row): + if global_disparity_range is not None: # Case we are working with cost volume + masking = np.nonzero( + np.logical_or( + (data.coords["disp_row"].data[disp_row] < min_disp_grid) + & (data.coords["disp_row"].data[disp_row] >= global_disparity_range[0]), + (data.coords["disp_row"].data[disp_row] > max_disp_grid) + & (data.coords["disp_row"].data[disp_row] <= global_disparity_range[1]), + ) + ) + else: + masking = np.nonzero( + np.logical_or( + data.coords["disp_row"].data[disp_row] < min_disp_grid, + data.coords["disp_row"].data[disp_row] > max_disp_grid, + ) + ) + data.data[masking[0], masking[1], :, disp_row] = value + + +def set_out_of_col_disparity_range_to_other_value( + data: xr.DataArray, + min_disp_grid: NDArray[np.floating], + max_disp_grid: NDArray[np.floating], + value: Union[int, float, Criteria], + global_disparity_range: Union[None, List[int]] = None, +) -> None: + """ + Put special value in data (cost_volumes or criteria_dataarray) where the disparity is out of the range defined + by disparity grids. + + The operation is done inplace. + + :param data: cost_volumes or criteria_dataarray to modify. + :type data: xr.DataArray 4D + :param min_disp_grid: grid of min disparity. + :type min_disp_grid: NDArray[np.floating] + :param max_disp_grid: grid of max disparity. + :type max_disp_grid: NDArray[np.floating] + :param value: value to set on data. + :type value: Union[int, float, Criteria] + :param global_disparity_range: + :type global_disparity_range: + """ + # WARNING: if one day we switch disp_row with disp_col index should be -1 + ndisp_col = data.shape[-2] + + # We want to put special value on points that are not in the global disparity range (col_disparity_source) + for disp_col in range(ndisp_col): + if global_disparity_range is not None: # Case we are working with cost volume + masking = np.nonzero( + np.logical_or( + (data.coords["disp_col"].data[disp_col] < min_disp_grid) + & (data.coords["disp_col"].data[disp_col] >= global_disparity_range[0]), + (data.coords["disp_col"].data[disp_col] > max_disp_grid) + & (data.coords["disp_col"].data[disp_col] <= global_disparity_range[1]), + ) + ) + else: + masking = np.nonzero( + np.logical_or( + data.coords["disp_col"].data[disp_col] < min_disp_grid, + data.coords["disp_col"].data[disp_col] > max_disp_grid, + ) + ) + data.data[masking[0], masking[1], disp_col, :] = value diff --git a/pandora2d/constants.py b/pandora2d/constants.py new file mode 100644 index 0000000..ce8d4f6 --- /dev/null +++ b/pandora2d/constants.py @@ -0,0 +1,51 @@ +# Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). +# +# This file is part of PANDORA2D +# +# https://github.com/CNES/Pandora2D +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" +This module contains all the parameters related to the criteria dataset, defining each bit. +""" + +from enum import Flag, auto + + +class Criteria(Flag): + """ + Criteria class + """ + + VALID = 0 + + PANDORA2D_MSK_PIXEL_LEFT_BORDER = auto() + """The pixel is invalid : border of left image according to window size.""" + PANDORA2D_MSK_PIXEL_LEFT_NODATA = auto() + """The pixel is invalid : nodata in left mask.""" + PANDORA2D_MSK_PIXEL_RIGHT_NODATA = auto() + """The pixel is invalid : nodata in right mask.""" + PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE = auto() + """The pixel is invalid : disparity is out the right image.""" + PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_LEFT = auto() + """The pixel is invalid : invalidated by validity mask of left image.""" + PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT = auto() + """The pixel is invalid : invalidated by validity mask of right image.""" + PANDORA2D_MSK_PIXEL_PEAK_ON_EDGE = auto() + """ + The pixel is invalid : The correlation peak is at the edge of disparity range. + The calculations stopped at the pixellic stage. + """ + PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED = auto() + """The disparity is not processed because not included in the disparity range of the current point.""" diff --git a/pandora2d/criteria.py b/pandora2d/criteria.py new file mode 100644 index 0000000..3eae676 --- /dev/null +++ b/pandora2d/criteria.py @@ -0,0 +1,308 @@ +# Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). +# +# This file is part of PANDORA2D +# +# https://github.com/CNES/Pandora2D +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" +This module contains functions associated to the validity mask and criteria dataarray created in the cost volume step. +""" +import itertools +from typing import Union +import xarray as xr +import numpy as np +from numpy.typing import NDArray + +from pandora.criteria import binary_dilation_msk +from pandora2d.constants import Criteria +from pandora2d.common import ( + set_out_of_col_disparity_range_to_other_value, + set_out_of_row_disparity_range_to_other_value, +) + + +def allocate_criteria_dataarray( + cv: xr.Dataset, value: Union[int, float, Criteria] = Criteria.VALID, data_type: Union[np.dtype, None] = None +) -> xr.DataArray: + """ + This method creates the criteria_dataarray with the same dimensions as cost_volumes (cv). + Initially, all points are considered valid and have the value XX. + + :param cv: cost_volumes + :type cv: 4D xarray.Dataset + :param value: value representing the valid criteria, by default Criteria.VALID = 0 + :type value: Union[int, float, Criteria] + :param data_type: the desired data-type for the criteria_dataarray. + :type data_type: Union[np.dtype, None], by default None + :return: criteria_dataarray: 4D DataArray containing the criteria + :rtype: criteria_dataarray: xr.DataArray + """ + return xr.DataArray( + np.full(cv.cost_volumes.shape, value, data_type), + coords={"row": cv.row.data, "col": cv.col.data, "disp_col": cv.disp_col.data, "disp_row": cv.disp_row.data}, + dims=["row", "col", "disp_col", "disp_row"], + ) + + +def get_criteria_dataarray(left_image: xr.Dataset, right_image: xr.Dataset, cv: xr.Dataset) -> xr.DataArray: + """ + This method fill the criteria dataarray with the different criteria obtained thanks to + the methods implemented in this file + """ + + # Allocate criteria dataarray + criteria_dataarray = allocate_criteria_dataarray(cv) + + if "msk" in left_image.data_vars: + + # Raise criteria PANDORA2D_MSK_PIXEL_LEFT_NODATA + # for points having no data in left mask, for each disparity + mask_left_no_data(left_image, cv.attrs["window_size"], criteria_dataarray) + # Raise criteria PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_LEFT + # for points having invalid in left mask, for each disparity + mask_left_invalid(left_image, criteria_dataarray) + + if "msk" in right_image.data_vars: + + # Raise criteria PANDORA2D_MSK_PIXEL_RIGHT_NODATA + # for points having no data in right mask according to disparity value + mask_right_no_data(right_image, cv.attrs["window_size"], criteria_dataarray) + # Raise criteria PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT + # for points having invalid in right mask according to disparity value + mask_right_invalid(right_image, criteria_dataarray) + + # Raise criteria PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + # for points for which window is outside right image according to disparity value + mask_disparity_outside_right_image(cv.attrs["offset_row_col"], criteria_dataarray) + + # Raise criteria PANDORA2D_MSK_PIXEL_LEFT_BORDER + # on the border according to offset value, for each disparity + mask_border(cv.attrs["offset_row_col"], criteria_dataarray) + + # Get columns disparity grid + d_min_col_grid = left_image["col_disparity"].sel(band_disp="min").data.copy() + d_max_col_grid = left_image["col_disparity"].sel(band_disp="max").data.copy() + + # Get rows disparity grid + d_min_row_grid = left_image["row_disparity"].sel(band_disp="min").data.copy() + d_max_row_grid = left_image["row_disparity"].sel(band_disp="max").data.copy() + + # Put PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED + # on points for which corresponding disparity is not processed + set_unprocessed_disp(criteria_dataarray, d_min_col_grid, d_max_col_grid, d_min_row_grid, d_max_row_grid) + + return criteria_dataarray + + +def set_unprocessed_disp( + criteria_dataarray: xr.DataArray, + min_grid_col: NDArray[np.floating], + max_grid_col: NDArray[np.floating], + min_grid_row: NDArray[np.floating], + max_grid_row: NDArray[np.floating], +): + """ + This method sets PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED to points for disparities that will not be processed, + based on the disparity grids provided. + + :param criteria_dataarray: 4D DataArray containing the criteria + :type criteria_dataarray: xr.DataArray 4D + :param min_grid_col: grid of min disparity col + :type min_grid_col: NDArray[np.floating] + :param max_grid_col: grid of max disparity col + :type max_grid_col: NDArray[np.floating] + :param min_grid_row: grid of min disparity row + :type min_grid_row: NDArray[np.floating] + :param max_grid_row: grid of max disparity row + :type max_grid_row: NDArray[np.floating] + """ + # Check col disparity + set_out_of_col_disparity_range_to_other_value( + criteria_dataarray, min_grid_col, max_grid_col, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED + ) + # Check row disparity + set_out_of_row_disparity_range_to_other_value( + criteria_dataarray, min_grid_row, max_grid_row, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED + ) + + +def mask_border(offset: int, criteria_dataarray: xr.DataArray) -> None: + """ + This method raises PANDORA2D_MSK_PIXEL_LEFT_BORDER criteria on the edges of the criteria_dataarray + for each of the disparities. + + PANDORA2D_MSK_PIXEL_LEFT_BORDER criteria is non-cumulative, so this method will be called last. + + :param offset: offset + :type offset: int + :param criteria_dataarray: 4D xarray.DataArray with all criteria + :type criteria_dataarray: 4D xarray.DataArray + """ + + if offset > 0: + + # Raise criteria 0 on border of criteria_disp_col according to offset value + criteria_dataarray.data[:offset, :, :, :] = Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER + criteria_dataarray.data[-offset:, :, :, :] = Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER + criteria_dataarray.data[:, :offset, :, :] = Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER + criteria_dataarray.data[:, -offset:, :, :] = Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER + + +def mask_disparity_outside_right_image(offset: int, criteria_dataarray: xr.DataArray) -> None: + """ + This method raises PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE criteria for points with disparity dimension outside + the right image + + :param offset: offset + :type offset: int + :param criteria_dataarray: 4D xarray.DataArray with all criteria + :type criteria_dataarray: 4D xarray.DataArray + """ + col_coords = criteria_dataarray.col.values + row_coords = criteria_dataarray.row.values + + # Condition where the window is outside the image + condition = ( + (criteria_dataarray.row + criteria_dataarray.disp_row < row_coords[0] + offset) + | (criteria_dataarray.row + criteria_dataarray.disp_row > row_coords[-1] - offset) + | (criteria_dataarray.col + criteria_dataarray.disp_col < col_coords[0] + offset) + | (criteria_dataarray.col + criteria_dataarray.disp_col > col_coords[-1] - offset) + ) + + # Swapaxes to have same shape as cost_volumes and criteria_dataarray + condition_swap = condition.data.swapaxes(1, 3).swapaxes(1, 2) + + # Update criteria dataarray + criteria_dataarray.data[condition_swap] = ( + criteria_dataarray.data[condition_swap] | Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + ) + + +def mask_left_no_data(left_image: xr.Dataset, window_size: int, criteria_dataaray: xr.DataArray) -> None: + """ + Set Criteria.PANDORA2D_MSK_PIXEL_LEFT_NODATA on pixels where a no_data is present in the window around its + position in the mask. + + :param left_image: left image with `msk` data var. + :type left_image: xr.Dataset + :param window_size: window size + :type window_size: int + :param criteria_dataaray: criteria dataarray to update + :type criteria_dataaray: xr.DataArray + """ + dilated_mask = binary_dilation_msk(left_image, window_size) + criteria_dataaray.data[dilated_mask, ...] |= Criteria.PANDORA2D_MSK_PIXEL_LEFT_NODATA + + +def mask_right_no_data(img_right: xr.Dataset, window_size: int, criteria_dataarray: xr.DataArray) -> None: + """ + Set Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA on pixels where a no_data is present in the window around its + position in the mask shift by its disparity. + + :param img_right: right image with `msk` data var. + :type img_right: xr.Dataset + :param window_size: window size + :type window_size: int + :param criteria_dataarray: + :type criteria_dataarray: + """ + right_criteria_mask = np.full_like(img_right["msk"], Criteria.VALID, dtype=Criteria) + right_binary_mask = binary_dilation_msk(img_right, window_size) + right_criteria_mask[right_binary_mask] |= Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA + + apply_right_criteria_mask(criteria_dataarray, right_criteria_mask) + + +def mask_left_invalid(left_image: xr.Dataset, criteria_dataarray: xr.DataArray) -> None: + """ + This method raises PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_LEFT criteria for points having + an invalid point in the left image mask. + A point is considered invalid if its value in the msk of the left image + is different from the values of the valid_pixels and no_data_mask attributes. + + :param left_image: left image with `msk` data var. + :type left_image: xr.Dataset + :param criteria_dataaray: criteria dataarray to update + :type criteria_dataaray: xr.DataArray + """ + invalid_left_mask = get_invalid_mask(left_image) + + criteria_dataarray.data[invalid_left_mask, ...] |= Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_LEFT + + +def mask_right_invalid(right_image: xr.Dataset, criteria_dataarray: xr.DataArray) -> None: + """ + This method raises PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT criteria for points having + an invalid point in the right image mask shift by its disparity. + A point is considered invalid if when we shift it by its disparity, the obtained value + is different from the values of the valid_pixels and no_data_mask attributes. + + :param right_image: right image with `msk` data var. + :type right_image: xr.Dataset + :param criteria_dataaray: criteria dataarray to update + :type criteria_dataaray: xr.DataArray + """ + right_criteria_mask = np.full_like(right_image["msk"], Criteria.VALID, dtype=Criteria) + + invalid_right_mask = get_invalid_mask(right_image) + + right_criteria_mask[invalid_right_mask] |= Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT + + apply_right_criteria_mask(criteria_dataarray, right_criteria_mask) + + +def get_invalid_mask(image: xr.Dataset) -> NDArray: + """ + Get mask for points of the image that are neither valid + or no data. + + :param image: image with `msk` data var. + :type image: xr.Dataset + :return: invalid_mask: mask containing invalid points + :rtype: invalid_mask: NDArray + """ + + invalid_mask = (image.msk.data != image.attrs["no_data_mask"]) & (image.msk.data != image.attrs["valid_pixels"]) + return invalid_mask + + +def apply_right_criteria_mask(criteria_dataarray: xr.DataArray, right_criteria_mask: NDArray): + """ + This method apply right_criteria_mask array on criteria_dataarray according + to row and column disparities. + + :param criteria_dataaray: criteria dataarray to update + :type criteria_dataaray: xr.DataArray + :param right_criteria_mask: mask to apply to criteria dataarray + :type right_criteria_mask: np.NDArray + """ + for row_disp, col_disp in itertools.product( + criteria_dataarray.coords["disp_row"], criteria_dataarray.coords["disp_col"] + ): + row_disp, col_disp = row_disp.data, col_disp.data + # We arrange tests to avoid the slice [:0], which doesn’t work, while [0:] is fine. + msk_row_slice = np.s_[:row_disp] if row_disp < 0 else np.s_[row_disp:] + msk_col_slice = np.s_[:col_disp] if col_disp < 0 else np.s_[col_disp:] + + criteria_row_slice = np.s_[-row_disp:] if row_disp <= 0 else np.s_[:-row_disp] + criteria_col_slice = np.s_[-col_disp:] if col_disp <= 0 else np.s_[:-col_disp] + criteria_dataarray.loc[ + { + "row": criteria_dataarray.coords["row"][criteria_row_slice], + "col": criteria_dataarray.coords["col"][criteria_col_slice], + "disp_col": col_disp, + "disp_row": row_disp, + } + ] |= right_criteria_mask[msk_row_slice, msk_col_slice] diff --git a/pandora2d/disparity/disparity.py b/pandora2d/disparity/disparity.py index 9a24030..bcd8c3e 100644 --- a/pandora2d/disparity/disparity.py +++ b/pandora2d/disparity/disparity.py @@ -25,12 +25,12 @@ from typing import Dict, Tuple, Callable from json_checker import Or, And, Checker -from pandora.margins.descriptors import NullMargins -from pandora.margins import Margins - import numpy as np import xarray as xr +from pandora.margins.descriptors import NullMargins +from pandora.margins import Margins + class Disparity: """ @@ -269,6 +269,7 @@ def compute_disp_maps(self, cost_volumes: xr.Dataset) -> Tuple[np.ndarray, np.nd score_map = self.get_score(maps_min_row, np.min) invalid_mc = np.all(indices_nan, axis=(2, 3)) + cost_volumes_user["cost_volumes"].data[indices_nan] = np.nan disp_map_col = disp_map_col.astype("float32") disp_map_row = disp_map_row.astype("float32") diff --git a/pandora2d/estimation/estimation.py b/pandora2d/estimation/estimation.py index d7a933d..30c7c32 100644 --- a/pandora2d/estimation/estimation.py +++ b/pandora2d/estimation/estimation.py @@ -104,7 +104,7 @@ def desc(self) -> None: print(f"{self._estimation_method} estimation measure") @abstractmethod - def compute_estimation(self, img_left: xr.Dataset, img_right: xr.Dataset) -> Tuple[list, list, np.ndarray, dict]: + def compute_estimation(self, img_left: xr.Dataset, img_right: xr.Dataset) -> Tuple[Dict, Dict, np.ndarray, dict]: """ Compute the phase cross correlation method @@ -114,26 +114,26 @@ def compute_estimation(self, img_left: xr.Dataset, img_right: xr.Dataset) -> Tup :param img_right: xarray.Dataset containing : - im : 2D (row, col) xarray.DataArray :type img_right: xr.Dataset - :return:row disparity: list - col disparity: list + :return:row disparity: Dict + col disparity: Dict Calculated shifts: list Extra information about estimation : dict - :rtype: list, list, np.ndarray, dict + :rtype: dict, dict, np.ndarray, dict """ @staticmethod def update_cfg_with_estimation( - cfg: Dict, disp_col: list, disp_row: list, shifts: np.ndarray, extra_dict: dict = None + cfg: Dict, disp_col: Dict, disp_row: Dict, shifts: np.ndarray, extra_dict: dict = None ) -> Dict: """ Save calculated shifts in a configuration dictionary :param cfg: user configuration :type cfg: dict - :param disp_col: list of min and max disparity in column - :type disp_col: [int, int] - :param disp_row: list of min and max disparity in row - :type disp_row: [int, int] + :param disp_col: dict with init and range for disparity in column + :type disp_col: {"init" : int, "range" : int >= 0} + :param disp_row: dict with init and range for disparity in row + :type disp_row: {"init" : int, "range" : int >= 0} :param shifts: computed global shifts between left and right :type shifts: [np.float32, np.float32] :param extra_dict: Dictionary containing extra information about estimation diff --git a/pandora2d/estimation/phase_cross_correlation.py b/pandora2d/estimation/phase_cross_correlation.py index c295e95..e4b3e0c 100644 --- a/pandora2d/estimation/phase_cross_correlation.py +++ b/pandora2d/estimation/phase_cross_correlation.py @@ -39,10 +39,6 @@ class PhaseCrossCorrelation(estimation.AbstractEstimation): PhaseCrossCorrelation class allows to perform estimation """ - _range_col = None - _range_row = None - _sample_factor = None - # Default configuration, do not change these values _RANGE_COL = 5 _RANGE_ROW = 5 @@ -94,7 +90,7 @@ def check_conf(self, cfg: Dict) -> Dict: return cfg - def compute_estimation(self, img_left: xr.Dataset, img_right: xr.Dataset) -> Tuple[list, list, np.ndarray, dict]: + def compute_estimation(self, img_left: xr.Dataset, img_right: xr.Dataset) -> Tuple[Dict, Dict, np.ndarray, dict]: """ Compute the phase cross correlation method @@ -120,14 +116,8 @@ def compute_estimation(self, img_left: xr.Dataset, img_right: xr.Dataset) -> Tup # reformat outputs phasediff = "{:.{}e}".format(phasediff, 8) # -shifts because of pandora2d convention - min_col = round(-shifts[1]) - int(self._range_col) - max_col = round(-shifts[1]) + int(self._range_col) - - min_row = round(-shifts[0]) - int(self._range_row) - max_row = round(-shifts[0]) + int(self._range_row) - - row_disparity = [min_row, max_row] - col_disparity = [min_col, max_col] + row_disparity = {"init": round(-shifts[0]), "range": int(self._range_row)} + col_disparity = {"init": round(-shifts[1]), "range": int(self._range_col)} logging.info("Estimation result is %s in columns and %s in row", -shifts[1], -shifts[0]) logging.debug("Translation invariant normalized RMS error between left and right is %s", error) diff --git a/pandora2d/img_tools.py b/pandora2d/img_tools.py index ffbff16..eb48c78 100644 --- a/pandora2d/img_tools.py +++ b/pandora2d/img_tools.py @@ -34,10 +34,10 @@ from xarray import Coordinate as Coordinates import copy -from collections.abc import Sequence -from typing import List, Dict, Union, NamedTuple, Any - +from typing import List, Dict, Union, NamedTuple, Any, Tuple from math import floor +from numpy.typing import NDArray + import xarray as xr import numpy as np from scipy.ndimage import shift, zoom @@ -81,15 +81,15 @@ def create_datasets_from_inputs(input_config: Dict, roi: Dict = None, estimation if estimation_cfg is None: check_disparities(input_config) else: - input_config["col_disparity"] = [-9999, -9999] - input_config["row_disparity"] = [-9999, -9999] + input_config["col_disparity"] = {"init": -9999, "range": 0} + input_config["row_disparity"] = {"init": -9999, "range": 0} return Datasets( pandora_img_tools.create_dataset_from_inputs(input_config["left"], roi).pipe( - add_left_disparity_grid, input_config + add_disparity_grid, input_config["col_disparity"], input_config["row_disparity"] ), pandora_img_tools.create_dataset_from_inputs(input_config["right"], roi).pipe( - add_right_disparity_grid, input_config + add_disparity_grid, input_config["col_disparity"], input_config["row_disparity"], True ), ) @@ -106,7 +106,6 @@ def check_disparities(input_config: Dict) -> None: check_disparity_presence(input_config) for disparity in [input_config["col_disparity"], input_config["row_disparity"]]: check_disparity_types(disparity) - check_min_max_disparity(disparity) def check_disparity_presence(input_config): @@ -127,84 +126,120 @@ def check_disparity_presence(input_config): def check_disparity_types(disparity: Any) -> None: """ - Check that disparity is a Sequence of length 2. + Check that disparity a dictionary with keys "init" and range" + where "init" is either: + + - a integer + - a path to a grid with integer values + :param disparity: disparity to check :type disparity: Any :raises SystemExit: if it does not meet requirements """ - if disparity is None or not isinstance(disparity, Sequence) or len(disparity) != 2: - raise ValueError("Disparity should be iterable of length 2", disparity) + # Check disparity type + if disparity is None or not isinstance(disparity, Dict): + raise ValueError("Disparity should be a dictionary") -def check_min_max_disparity(disparity: List[int]) -> None: - """ - Check that min disparity is lower than max disparity. + # Check that dictionary keys are correct + if not set(disparity.keys()) == {"init", "range"}: + raise ValueError("Disparity dictionary should contains keys : init and range", disparity) + # Check that init is an integer or a path to a grid + if not isinstance(disparity["init"], (int, str)): + raise ValueError("Disparity init should be an integer or a path to a grid") - :param disparity: disparity to check - :type disparity: List[int] + # Check that range value is a postive integer + if disparity["range"] < 0 or not isinstance(disparity["range"], int): + raise ValueError("Disparity range should be an integer greater or equal to 0") - :raises SystemExit: if min > max - """ - if disparity[0] > disparity[1]: - raise ValueError(f"Min disparity ({disparity[0]}) should be lower than Max disparity ({disparity[1]})") - -def add_left_disparity_grid(dataset: xr.Dataset, configuration: Dict) -> xr.Dataset: +def add_disparity_grid(dataset: xr.Dataset, col_disparity: Dict, row_disparity: Dict, right=False): """ - Add left disparity to dataset. + Add disparity to dataset - :param dataset: dataset to add disparity grid to + :param dataset: xarray dataset :type dataset: xr.Dataset - :param configuration: configuration with information about disparity - :type configuration: Dict + :param col_disparity: Disparity interval for columns + :type col_disparity: Dict + :param row_disparity: Disparity interval for rows + :type row_disparity: Dict + :param right: indicates whether the disparity grid is added to the right dataset + :type right: bool + :return: dataset : updated dataset :rtype: xr.Dataset """ - col_disparity = configuration["col_disparity"] - row_disparity = configuration["row_disparity"] - return add_disparity_grid(dataset, col_disparity, row_disparity) + # Creates min and max disparity grids + col_disp_min_max, col_disp_interval = get_min_max_disp_from_dicts(dataset, col_disparity, right) + row_disp_min_max, row_disp_interval = get_min_max_disp_from_dicts(dataset, row_disparity, right) -def add_right_disparity_grid(dataset: xr.Dataset, configuration: Dict) -> xr.Dataset: - """ - Add right disparity to dataset. + # Add disparity grids to dataset + for key, disparity_data, source in zip( + ["col_disparity", "row_disparity"], [col_disp_min_max, row_disp_min_max], [col_disp_interval, row_disp_interval] + ): + dataset[key] = xr.DataArray( + disparity_data, + dims=["band_disp", "row", "col"], + coords={"band_disp": ["min", "max"]}, + ) - :param dataset: dataset to add disparity grid to - :type dataset: xr.Dataset - :param configuration: configuration with information about disparity - :type configuration: Dict - :return: dataset : updated dataset - :rtype: xr.Dataset - """ - col_disparity = sorted(-1 * value for value in configuration["col_disparity"]) - row_disparity = sorted(-1 * value for value in configuration["row_disparity"]) - return add_disparity_grid(dataset, col_disparity, row_disparity) + dataset.attrs[f"{key}_source"] = source + return dataset -def add_disparity_grid(dataset: xr.Dataset, col_disparity: List[int], row_disparity: List[int]) -> xr.Dataset: +def get_min_max_disp_from_dicts(dataset: xr.Dataset, disparity: Dict, right: bool = False) -> Tuple[NDArray, List]: """ - Add disparity to dataset + Transforms input disparity dicts with constant init into min/max disparity grids :param dataset: xarray dataset :type dataset: xr.Dataset - :param col_disparity: Disparity interval for columns - :type col_disparity: List of ints - :param row_disparity: Disparity interval for rows - :type row_disparity: List of ints - - :return: dataset : updated dataset - :rtype: xr.Dataset + :param disparity: input disparity + :type disparity: Dict + :param right: indicates whether the disparity grid is added to the right dataset + :type right: bool + :return: 3D numpy array containing min/max disparity grids and list with disparity source + :rtype: Tuple[NDArray, List] """ - shape = (dataset.sizes["row"], dataset.sizes["col"]) - for key, disparity_interval in zip(["col_disparity", "row_disparity"], [col_disparity, row_disparity]): - dataset[key] = xr.DataArray( - np.array([np.full(shape, disparity) for disparity in disparity_interval]), - dims=["band_disp", "row", "col"], - coords={"band_disp": ["min", "max"]}, + + # Creates min and max disparity grids if initial disparity is constant (int) + if isinstance(disparity["init"], int): + + shape = (dataset.sizes["row"], dataset.sizes["col"]) + + disp_interval = [ + disparity["init"] * pow(-1, right) - disparity["range"], + disparity["init"] * pow(-1, right) + disparity["range"], + ] + + disp_min_max = np.array([np.full(shape, disparity) for disparity in disp_interval]) + + # Creates min and max disparity grids if initial disparities are variable (grid) + elif isinstance(disparity["init"], str): + + # Get dataset coordinates to select correct zone of disparity grids if we are using a ROI + rows = dataset.row.data + cols = dataset.col.data + + # Get disparity data + disp_data = pandora_img_tools.rasterio_open(disparity["init"]).read()[ + :, rows[0] : rows[-1] + 1, cols[0] : cols[-1] + 1 + ] + + # Use disparity data to creates min/max grids + disp_min_max = np.squeeze( + np.array( + [ + disp_data * pow(-1, right) - disparity["range"], + disp_data * pow(-1, right) + disparity["range"], + ] + ) ) - dataset.attrs[f"{key}_source"] = disparity_interval - return dataset + + disp_interval = [np.min(disp_min_max[0, ::]), np.max(disp_min_max[1, ::])] + + return disp_min_max, disp_interval def shift_disp_row_img(img_right: xr.Dataset, dec_row: int) -> xr.Dataset: @@ -246,7 +281,7 @@ def shift_disp_row_img(img_right: xr.Dataset, dec_row: int) -> xr.Dataset: return img_right_shift -def get_roi_processing(roi: dict, col_disparity: List[int], row_disparity: List[int]) -> dict: +def get_roi_processing(roi: dict, col_disparity: Dict, row_disparity: Dict) -> dict: """ Return a roi which takes disparities into account. Update cfg roi with new margins. @@ -258,20 +293,34 @@ def get_roi_processing(roi: dict, col_disparity: List[int], row_disparity: List[ "margins": [, , , ] with margins : left, up, right, down - :param col_disparity: min and max disparities for columns. - :type col_disparity: List[int] - :param row_disparity: min and max disparities for rows. - :type row_disparity: List[int] + :param col_disparity: init and range for disparities in columns. + :type col_disparity: Dict + :param row_disparity: init and range for disparities in rows. + :type row_disparity: Dict :type roi: Dict """ new_roi = copy.deepcopy(roi) - new_roi["margins"] = ( - max(abs(col_disparity[0]), roi["margins"][0]), - max(abs(row_disparity[0]), roi["margins"][1]), - max(abs(col_disparity[1]), roi["margins"][2]), - max(abs(row_disparity[1]), roi["margins"][3]), - ) + if isinstance(col_disparity["init"], str) and isinstance(row_disparity["init"], str): + + # Read disparity grids + disparity_row_init = pandora_img_tools.rasterio_open(row_disparity["init"]).read() + disparity_col_init = pandora_img_tools.rasterio_open(col_disparity["init"]).read() + + new_roi["margins"] = ( + int(max(abs(np.min(disparity_col_init - col_disparity["range"])), roi["margins"][0])), + int(max(abs(np.min(disparity_row_init - row_disparity["range"])), roi["margins"][1])), + int(max(abs(np.max(disparity_col_init + col_disparity["range"])), roi["margins"][2])), + int(max(abs(np.max(disparity_row_init + row_disparity["range"])), roi["margins"][3])), + ) + + else: + new_roi["margins"] = ( + max(abs(col_disparity["init"] - col_disparity["range"]), roi["margins"][0]), + max(abs(row_disparity["init"] - row_disparity["range"]), roi["margins"][1]), + max(abs(col_disparity["init"] + col_disparity["range"]), roi["margins"][2]), + max(abs(row_disparity["init"] + row_disparity["range"]), roi["margins"][3]), + ) # Update user ROI with new margins. roi["margins"] = new_roi["margins"] @@ -340,7 +389,7 @@ def remove_roi_margins(dataset: xr.Dataset, cfg: Dict): def row_zoom_img( - img: np.ndarray, ny: int, subpix: int, coords: Coordinates, ind: int, no_data: Union[int, str] + img: np.ndarray, ny: int, subpix: int, coords: Coordinates, ind: int, no_data: Union[int, str], order: int = 1 ) -> xr.Dataset: """ Return a list that contains the shifted right images in row @@ -359,20 +408,24 @@ def row_zoom_img( :type ind: int :param no_data: no_data value in img :type no_data: Union[int, str] + :param order: The order of the spline interpolation, default is 1. The order has to be in the range 0-5. + :type order: int, optional :return: an array that contains the shifted right images in row :rtype: array of xarray.Dataset """ shift = 1 / subpix # For each index, shift the right image for subpixel precision 1/subpix*index - data = zoom(img, ((ny * subpix - (subpix - 1)) / float(ny), 1), order=1)[ind::subpix, :] + data = zoom(img, ((ny * subpix - (subpix - 1)) / float(ny), 1), order=order)[ind::subpix, :] # Add a row full of no data at the end of data have the same shape as img # It enables to use Pandora's compute_cost_volume() methods, # which only accept left and right images of the same shape. data = np.pad(data, ((0, 1), (0, 0)), "constant", constant_values=no_data) - row = np.arange(coords.get("row")[0] + shift * ind, coords.get("row")[-1] + 1, step=1) # type: np.ndarray + row = np.arange( + coords.get("row").values[0] + shift * ind, coords.get("row").values[-1] + 1, step=1 + ) # type: np.ndarray return xr.Dataset( {"im": (["row", "col"], data)}, @@ -381,7 +434,7 @@ def row_zoom_img( def col_zoom_img( - img: np.ndarray, nx: int, subpix: int, coords: Coordinates, ind: int, no_data: Union[int, str] + img: np.ndarray, nx: int, subpix: int, coords: Coordinates, ind: int, no_data: Union[int, str], order: int = 1 ) -> xr.Dataset: """ Return a list that contains the shifted right images in col @@ -400,27 +453,31 @@ def col_zoom_img( :type ind: int :param no_data: no_data value in img :type no_data: Union[int, str] + :param order: The order of the spline interpolation, default is 1. The order has to be in the range 0-5. + :type order: int, optional :return: an array that contains the shifted right images in col :rtype: array of xarray.Dataset """ shift = 1 / subpix # For each index, shift the right image for subpixel precision 1/subpix*index - data = zoom(img, (1, (nx * subpix - (subpix - 1)) / float(nx)), order=1)[:, ind::subpix] + data = zoom(img, (1, (nx * subpix - (subpix - 1)) / float(nx)), order=order)[:, ind::subpix] # Add a col full of no data at the end of data to have the same shape as img # It enables to use Pandora's compute_cost_volume() methods, # which only accept left and right images of the same shape. data = np.pad(data, ((0, 0), (0, 1)), "constant", constant_values=no_data) - col = np.arange(coords.get("col")[0] + shift * ind, coords.get("col")[-1] + 1, step=1) # type: np.ndarray + col = np.arange( + coords.get("col").values[0] + shift * ind, coords.get("col").values[-1] + 1, step=1 + ) # type: np.ndarray return xr.Dataset( {"im": (["row", "col"], data)}, coords={"row": coords.get("row"), "col": col}, ) -def shift_subpix_img(img_right: xr.Dataset, subpix: int, row: bool = True) -> List[xr.Dataset]: +def shift_subpix_img(img_right: xr.Dataset, subpix: int, row: bool = True, order: int = 1) -> List[xr.Dataset]: """ Return an array that contains the shifted right images @@ -430,6 +487,8 @@ def shift_subpix_img(img_right: xr.Dataset, subpix: int, row: bool = True) -> Li :type subpix: int :param column: column to shift (otherwise row) :type column: bool + :param order: The order of the spline interpolation, default is 1. The order has to be in the range 0-5. + :type order: int, optional :return: an array that contains the shifted right images :rtype: array of xarray.Dataset """ @@ -446,6 +505,7 @@ def shift_subpix_img(img_right: xr.Dataset, subpix: int, row: bool = True) -> Li img_right.coords, ind, img_right.attrs["no_data_img"], + order, ).assign_attrs(img_right.attrs) ) else: @@ -457,6 +517,7 @@ def shift_subpix_img(img_right: xr.Dataset, subpix: int, row: bool = True) -> Li img_right.coords, ind, img_right.attrs["no_data_img"], + order, ).assign_attrs(img_right.attrs) ) diff --git a/pandora2d/interpolation_filter/__init__.py b/pandora2d/interpolation_filter/__init__.py index 1b32a3f..09dd05f 100644 --- a/pandora2d/interpolation_filter/__init__.py +++ b/pandora2d/interpolation_filter/__init__.py @@ -22,4 +22,4 @@ """ from .interpolation_filter import AbstractFilter -from . import bicubic +from . import bicubic, cardinal_sine diff --git a/pandora2d/interpolation_filter/bicubic.py b/pandora2d/interpolation_filter/bicubic.py index c7c6b50..e6884f6 100644 --- a/pandora2d/interpolation_filter/bicubic.py +++ b/pandora2d/interpolation_filter/bicubic.py @@ -16,8 +16,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -"""This module contains bicubic interpolation filter. -""" +"""This module contains bicubic interpolation filter.""" + from functools import lru_cache import numpy as np @@ -36,6 +36,8 @@ class Bicubic(AbstractFilter): _ALPHA = -0.5 _SIZE = 4 + schema = {"method": "bicubic"} + @property def margins(self) -> Margins: """Return filter's Margins.""" diff --git a/pandora2d/interpolation_filter/cardinal_sine.py b/pandora2d/interpolation_filter/cardinal_sine.py new file mode 100644 index 0000000..a7e0959 --- /dev/null +++ b/pandora2d/interpolation_filter/cardinal_sine.py @@ -0,0 +1,107 @@ +# Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). +# This file is part of PANDORA2D +# +# https://github.com/CNES/Pandora2D +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""This module contains cardinal sine interpolation filter.""" + +from __future__ import annotations + +from typing import Dict + +import numpy as np +from json_checker import And, OptionalKey +from numpy.typing import NDArray + +from pandora.margins import Margins + +from .interpolation_filter import AbstractFilter + + +@AbstractFilter.register_subclass("sinc") +class CardinalSine(AbstractFilter): + """Implementation of the Normalized Cardinal Sine filter.""" + + schema = {"method": "sinc", OptionalKey("size"): And(int, lambda a: 6 <= a <= 21)} + + def __init__(self, cfg: Dict, fractional_shift: float = 0.5): + """ + + :param cfg: config + :type cfg: dict + :param fractional_shift: interval between each interpolated point, sometimes referred to as precision. + Expected value in the range [0,1[. + :type fractional_shift: float + """ + super().__init__(cfg) + self._check_fractional_shift(fractional_shift) + self._HALF_SIZE = self.cfg.get("size", 6) # pylint:disable=invalid-name + self._SIZE = 1 + self._HALF_SIZE * 2 # pylint:disable=invalid-name + self.fractional_shifts = np.arange(0, 1, fractional_shift) + self.coeffs = compute_coefficient_table(filter_size=self._HALF_SIZE, fractional_shifts=self.fractional_shifts) + + @staticmethod + def _check_fractional_shift(fractional_shift: float) -> None: + if not 0 <= fractional_shift < 1: + raise ValueError(f"fractional_shift greater than 0 and lower than 1 expected, got {fractional_shift}") + + @property + def margins(self) -> Margins: + """Return filter's Margins.""" + return Margins(self._HALF_SIZE, self._HALF_SIZE, self._HALF_SIZE, self._HALF_SIZE) + + def get_coeffs(self, fractional_shift: float) -> NDArray[np.floating]: + index = self.fractional_shifts.searchsorted(fractional_shift) + return self.coeffs[index] + + +def compute_coefficient_table(filter_size: int, fractional_shifts: NDArray[np.floating]) -> NDArray[np.floating]: + """ + Compute normalized cardinal sine coefficients windowed by a Gaussian. + + Will compute the `2 * filter_size + 1` coefficients for each given fractional_shift in `fractional_shifts` and + store them in the returned NDArray where: + + - Each row corresponds to a specific fractional shift value. + - Each column corresponds to a coefficient at a specific position. + + The Gaussian window width correspond to the size of the filter. + + :param filter_size: Half number of coefficients to compute. + :type filter_size: int + :param fractional_shifts: At which fractional shifts to compute coefficients + :type fractional_shifts: NDArray[np.floating] + :return: 2D array with computed coefficients + :rtype: NDArray[np.floating] + """ + sigma = filter_size + aux1 = (-2.0 * np.pi) / (sigma * sigma) + coeff_range = np.arange(-filter_size, filter_size + 1) + # The np.meshgrid function creates a grid of indices corresponding to the positions of the coefficients. It + # generates two 2D arrays (xv and yv) where each element represents a combination of indices. In this case, + # xv contains the indices of the coefficients, and yv contains the fractional shift values + xv, yv = np.meshgrid(coeff_range, fractional_shifts, sparse=True) + + # (yv- xv) gives: + # array([[ 6. , 5. , 4. , 3. , 2. , 1. , 0. , -1. , -2. , + # -3. , -4. , -5. , -6. ], + # [ 6.25, 5.25, 4.25, 3.25, 2.25, 1.25, 0.25, -0.75, -1.75, + # -2.75, -3.75, -4.75, -5.75], + # [ 6.5 , 5.5 , 4.5 , 3.5 , 2.5 , 1.5 , 0.5 , -0.5 , -1.5 , + # -2.5 , -3.5 , -4.5 , -5.5 ], + # [ 6.75, 5.75, 4.75, 3.75, 2.75, 1.75, 0.75, -0.25, -1.25, + # -2.25, -3.25, -4.25, -5.25]]) + aux = yv - xv + tab_coeffs = np.sinc(aux) * np.exp(aux1 * aux * aux) + return tab_coeffs / np.nansum(tab_coeffs, axis=1, keepdims=True) diff --git a/pandora2d/interpolation_filter/interpolation_filter.py b/pandora2d/interpolation_filter/interpolation_filter.py index b12a8dc..d022e23 100644 --- a/pandora2d/interpolation_filter/interpolation_filter.py +++ b/pandora2d/interpolation_filter/interpolation_filter.py @@ -19,6 +19,7 @@ """ This module contains functions associated to the interpolation filters. """ + from __future__ import annotations import logging from typing import Dict, Tuple, List @@ -26,6 +27,7 @@ import math import numpy as np +from json_checker import Checker from pandora.margins.descriptors import NullMargins @@ -37,11 +39,11 @@ class AbstractFilter(ABC): interpolation_filter_methods_avail: Dict = {} _interpolation_filter_method = None - cfg = None margins = NullMargins() _SIZE = 4 + schema = {} - def __new__(cls, filter_method: str | None = None): + def __new__(cls, cfg: dict | None = None, **kwargs): """ Return the plugin associated with the interpolation filter given in the configuration @@ -50,7 +52,8 @@ def __new__(cls, filter_method: str | None = None): """ if cls is AbstractFilter: - if isinstance(filter_method, str): + if isinstance(cfg["method"], str): + filter_method = cfg["method"] try: return super(AbstractFilter, cls).__new__(cls.interpolation_filter_methods_avail[filter_method]) except KeyError: @@ -58,6 +61,29 @@ def __new__(cls, filter_method: str | None = None): raise KeyError return super(AbstractFilter, cls).__new__(cls) + def __init__(self, cfg: Dict, **_) -> None: + """ + :param cfg: optional configuration, {} + :type cfg: dict + :return: None + """ + self.cfg = self.check_conf(cfg) + + @classmethod + def check_conf(cls, cfg: Dict) -> Dict: + """ + Check the refinement method configuration. + + :param cfg: user_config for refinement method + :type cfg: dict + :return: cfg: global configuration + :rtype: cfg: dict + """ + checker = Checker(cls.schema) + checker.validate(cfg) + + return cfg + def desc(self) -> None: """ Describes the interpolation filter @@ -141,7 +167,6 @@ def interpolate( eps = np.finfo(np.float32).eps for pos_col, pos_row in zip(*positions): - # get_coeffs method receives positive coefficients fractional_row = abs(math.modf(pos_row)[0]) fractional_col = abs(math.modf(pos_col)[0]) diff --git a/pandora2d/matching_cost/matching_cost.py b/pandora2d/matching_cost/matching_cost.py index d8c67f0..aea8a96 100644 --- a/pandora2d/matching_cost/matching_cost.py +++ b/pandora2d/matching_cost/matching_cost.py @@ -23,6 +23,7 @@ """ This module contains functions associated to the matching cost computation step. """ + import copy from typing import Dict, List, cast, Union from json_checker import And, Checker @@ -37,6 +38,10 @@ from pandora2d import img_tools import pandora2d.schema as cst_schema +from pandora2d.common import ( + set_out_of_row_disparity_range_to_other_value, + set_out_of_col_disparity_range_to_other_value, +) class MatchingCost: @@ -78,6 +83,7 @@ def cfg(self) -> Dict[str, Union[str, int, List[int]]]: "step": self._step, "window_size": self._window_size, "subpix": self._subpix, + "spline_order": self._spline_order, } @property @@ -110,6 +116,16 @@ def _subpix(self) -> int: """ return self.pandora_matching_cost_._subpix # pylint: disable=W0212 protected-access + @property + def _spline_order(self) -> int: + """ + Get spline_order, parameter specific to pandora + + :return: spline_order: spline_order used + :rtype: spline_order: int + """ + return self.pandora_matching_cost_._spline_order # pylint: disable=W0212 protected-access + @property def margins(self) -> Margins: """ @@ -204,8 +220,6 @@ def allocate_cost_volume_pandora( self, img_left: xr.Dataset, img_right: xr.Dataset, - grid_min_col: np.ndarray, - grid_max_col: np.ndarray, cfg: Dict, margins: Margins = None, ) -> None: @@ -217,10 +231,6 @@ def allocate_cost_volume_pandora( - im : 2D (row, col) xarray.DataArray - msk : 2D (row, col) xarray.DataArray :type img_left: xr.Dataset - :param grid_min_col: grid containing min disparities for columns. - :type grid_min_col: np.ndarray - :param grid_max_col: grid containing max disparities for columns. - :type grid_max_col: np.ndarray :param cfg: matching_cost computation configuration :type cfg: Dict :param margins: refinement margins @@ -229,32 +239,52 @@ def allocate_cost_volume_pandora( """ # Adapt Pandora matching cost configuration img_left.attrs["disparity_source"] = img_left.attrs["col_disparity_source"] + grid_min_col = img_left["col_disparity"].sel(band_disp="min").data.copy() + grid_max_col = img_left["col_disparity"].sel(band_disp="max").data.copy() if margins is not None: grid_min_col -= margins.left grid_max_col += margins.right + # Get updated ROI left margin for pandora method get_coordinates() + # To get right coordinates in cost_volume when initial left_margin > cfg["ROI"]["col"]["first"] + # We need to have left_margin = cfg["ROI"]["col"]["first"] + cfg_for_get_coordinates = copy.deepcopy(cfg) + if "ROI" in cfg: + cfg_for_get_coordinates["ROI"]["margins"] = ( + min(cfg["ROI"]["margins"][0], cfg["ROI"]["col"]["first"]), + cfg["ROI"]["margins"][1], + cfg["ROI"]["margins"][2], + cfg["ROI"]["margins"][3], + ) + # Initialize pandora an empty grid for cost volume - self.grid_ = self.pandora_matching_cost_.allocate_cost_volume(img_left, (grid_min_col, grid_max_col), cfg) + self.grid_ = self.pandora_matching_cost_.allocate_cost_volume( + img_left, (grid_min_col, grid_max_col), cfg_for_get_coordinates + ) # Compute validity mask to identify invalid points in cost volume self.grid_ = validity_mask(img_left, img_right, self.grid_) # Add ROI margins in attributes # Enables to compute cost volumes row coordinates later by using pandora.matching_cost.get_coordinates() + # Get updated ROI up margin for pandora method get_coordinates() + # To get right coordinates in cost_volume when initial up_margin > cfg["ROI"]["row"]["first"] + # We need to have up_margin = cfg["ROI"]["row"]["first"] if "ROI" in cfg: - self.grid_.attrs["ROI_margins"] = cfg["ROI"]["margins"] + self.grid_.attrs["ROI_margins_for_cv"] = ( + cfg["ROI"]["margins"][0], + min(cfg["ROI"]["margins"][1], cfg["ROI"]["row"]["first"]), + cfg["ROI"]["margins"][2], + cfg["ROI"]["margins"][3], + ) else: - self.grid_.attrs["ROI_margins"] = None + self.grid_.attrs["ROI_margins_for_cv"] = None def compute_cost_volumes( self, img_left: xr.Dataset, img_right: xr.Dataset, - grid_min_col: np.ndarray, - grid_max_col: np.ndarray, - grid_min_row: np.ndarray, - grid_max_row: np.ndarray, margins: Margins = None, ) -> xr.Dataset: """ @@ -287,8 +317,14 @@ def compute_cost_volumes( # Adapt Pandora matching cost configuration img_left.attrs["disparity_source"] = img_left.attrs["col_disparity_source"] + grid_min_col = img_left["col_disparity"].sel(band_disp="min").data.copy() + grid_max_col = img_left["col_disparity"].sel(band_disp="max").data.copy() + grid_min_row = img_left["row_disparity"].sel(band_disp="min").data.copy() + grid_max_row = img_left["row_disparity"].sel(band_disp="max").data.copy() if margins is not None: + grid_min_col -= margins.left + grid_max_col += margins.right grid_min_row -= margins.up grid_max_row += margins.down @@ -304,7 +340,7 @@ def compute_cost_volumes( row_index = None # Contains the shifted right images (with subpixel) - imgs_right_shift_subpixel = img_tools.shift_subpix_img(img_right, self._subpix) + imgs_right_shift_subpixel = img_tools.shift_subpix_img(img_right, self._subpix, order=self._spline_order) for idx, disp_row in enumerate(disps_row): i_right = int((disp_row % 1) * self._subpix) @@ -332,10 +368,10 @@ def compute_cost_volumes( self.pandora_matching_cost_.cv_masked(img_left, img_right_shift, cost_volume, grid_min_col, grid_max_col) # If first iteration, initialize cost_volumes dataset if idx == 0: - img_row_coordinates = img_left["im"].coords["row"] + img_row_coordinates = img_left["im"].coords["row"].values # Case without a ROI: we only take the step into account to compute row coordinates. - if self.grid_.attrs["ROI_margins"] is None: + if self.grid_.attrs["ROI_margins_for_cv"] is None: row_coords = np.arange(img_row_coordinates[0], img_row_coordinates[-1] + 1, self._step_row) # Case with a ROI: we use pandora get_coordinates() method to compute row coordinates. @@ -343,7 +379,7 @@ def compute_cost_volumes( # This ensures that the first point of the ROI given by the user is computed in the cost volume. else: row_coords = self.pandora_matching_cost_.get_coordinates( - margin=self.grid_.attrs["ROI_margins"][1], + margin=self.grid_.attrs["ROI_margins_for_cv"][1], img_coordinates=img_row_coordinates, step=self._step_row, ) @@ -369,6 +405,21 @@ def compute_cost_volumes( cost_volumes.attrs["step"] = self._step # Delete ROI_margins attributes which we used to calculate the row coordinates in the cost_volumes - del cost_volumes.attrs["ROI_margins"] + del cost_volumes.attrs["ROI_margins_for_cv"] + + set_out_of_row_disparity_range_to_other_value( + cost_volumes["cost_volumes"], + img_left["row_disparity"].sel(band_disp="min").data, + img_left["row_disparity"].sel(band_disp="max").data, + np.nan, + cost_volumes.attrs["row_disparity_source"], + ) + set_out_of_col_disparity_range_to_other_value( + cost_volumes["cost_volumes"], + img_left["col_disparity"].sel(band_disp="min").data, + img_left["col_disparity"].sel(band_disp="max").data, + np.nan, + cost_volumes.attrs["col_disparity_source"], + ) return cost_volumes diff --git a/pandora2d/profiling.py b/pandora2d/profiling.py new file mode 100644 index 0000000..669d2aa --- /dev/null +++ b/pandora2d/profiling.py @@ -0,0 +1,386 @@ +# Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). +# +# This file is part of PANDORA2D +# +# https://github.com/CNES/Pandora2D +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +""" +Contains functions for profiling pandora2d +""" +import datetime +import logging +import os +import shutil +import time +from dataclasses import dataclass +from functools import wraps +from multiprocessing import Pipe +from pathlib import Path +from threading import Thread +import csv +import matplotlib.pyplot as plt +import pandas as pd +import psutil +from matplotlib.backends.backend_pdf import PdfPages +from matplotlib.figure import Figure + +THREAD_TIMEOUT = 2 + + +@dataclass +class EpertModeConfig: + """ + Expert mode config class + """ + + enable: bool = False + + +class Data: + """ + Data class + """ + + def __init__(self): + self._data = [] + self.timestamp = datetime.datetime.now().strftime("%Y-%m-%d %Hh%Mm%Ss") + + def append(self, line): + self._data.append(line) + + def reset(self): + self._data.clear() + + @property + def timestamp(self): + return self._timestamp + + @timestamp.setter + def timestamp(self, value): + self._timestamp = value + + +expert_mode_config = EpertModeConfig() +data = Data() + + +def get_current_memory(): + """ + Get current memory of process + + :return: memory + :rtype: float + + """ + + # Use psutil to capture python process memory as well + process = psutil.Process() + process_memory = process.memory_info().rss + + # Convert nbytes size for logger (in MiB) + process_memory = float(process_memory) / 1000000 + + return process_memory + + +class MemProf(Thread): + """ + MemProf + + Profiling thread with time and memory performances in seconds and MiB + """ + + def __init__(self, pid, pipe, interval=0.1): + """ + Init function of Pandora2dMemProf + """ + super().__init__() + self.pipe = pipe + self.interval = interval + self.cpu_interval = 0.1 + self.process = psutil.Process(pid) + + def run(self): + """ + Run + """ + + try: + max_mem = 0 + max_cpu = 0 + + # tell parent profiling is ready + self.pipe.send(0) + stop = False + while not stop: + # Get memory + current_mem = self.process.memory_info().rss + max_mem = max(max_mem, current_mem) + + # Get cpu max + current_cpu = self.process.cpu_percent(interval=self.cpu_interval) + max_cpu = max(max_cpu, int(current_cpu)) + + stop = self.pipe.poll(self.interval) + + # Convert nbytes size for logger + self.pipe.send(float(max_mem) / 1000000) + self.pipe.send(max_cpu) + + except BrokenPipeError: + logging.debug("broken pipe error in log wrapper ") + + +def mem_time_profile(name=None, interval=0.1): + """ + Pandora2d profiling decorator + + :param: func: function to monitor + + """ + + def decorator_generator(func): + """ + Inner function + """ + + @wraps(func) + def wrapper_profile(*args, **kwargs): + """ + Profiling wrapper + + Generate profiling logs of function, run + + :return: func(*args, **kwargs) + + """ + if not expert_mode_config.enable: + return func(*args, **kwargs) + + # Launch memory profiling thread + child_pipe, parent_pipe = Pipe() + thread_monitoring = MemProf(os.getpid(), child_pipe, interval=interval) + thread_monitoring.start() + if parent_pipe.poll(THREAD_TIMEOUT): + parent_pipe.recv() + + start_time = time.perf_counter() + start_cpu_time = time.process_time() + + memory_start = get_current_memory() + + result = func(*args, **kwargs) + + total_time = time.perf_counter() - start_time + total_cpu_time = time.process_time() - start_cpu_time + + # end memprofiling monitoring + parent_pipe.send(0) + max_memory, max_cpu = None, None + if parent_pipe.poll(THREAD_TIMEOUT): + max_memory = parent_pipe.recv() + max_cpu = parent_pipe.recv() + + memory_end = get_current_memory() + + func_name = func.__name__.capitalize() if name is None else name + + # Prepare data to write to the CSV + performance_data = [func_name, total_time, total_cpu_time, max_memory, memory_start, memory_end, max_cpu] + + # Check if the file already exists + file_exists = os.path.exists(f"{data.timestamp}_profiling.csv") + + # Write to CSV using the csv module + with open(f"{data.timestamp}_profiling.csv", mode="a", newline="", encoding="utf-8") as file: + writer = csv.writer(file) + + # Write header only if the file does not exist + if not file_exists: + writer.writerow( + [ + "Function_name", + "Time (s)", + "CPU Time (s)", + "Max_Memory (MiB)", + "Start_Ram (MiB)", + "End_Ram (MiB)", + "Max_CPU", + ] + ) + + # Write the performance data + writer.writerow(performance_data) + + return result + + return wrapper_profile + + return decorator_generator + + +def generate_figure( + fig_type: str, + dataframe, + values=None, + title: str = "", + xlabel: str = "", + ylabel: str = "", +) -> Figure: + """ + Generic function to generate different types of plots. + + :param fig_type: Type of figure ('pie', 'box', 'barh') + :type fig_type: str + :param dataframe: DataFrame containing the data + :type dataframe: pd.DataFrame + :param values: Values for bar chart + + :param title: Title of the chart + :type title: str + :param xlabel: Label for x-axis + :type xlabel: str + :param ylabel: Label for y-axis + :type ylabel: str + :return: Performance graph + :rtype: plt.Figure + """ + fig = plt.figure(figsize=(12, 12)) + plt.tight_layout() + + if fig_type == "box": + dataframe.T.boxplot(vert=False, showfliers=False) + plt.xlabel(xlabel) + plt.ylabel(ylabel) + + # Get median and quartiles + stats = dataframe.T.describe() + for idx, col in enumerate(dataframe.T.columns): + q1 = stats[col]["25%"] + median = stats[col]["50%"] + q3 = stats[col]["75%"] + plt.text(median, idx + 1, f"Med: {median:.2f}", va="center", ha="center", color="black", fontsize=8) + plt.text(q1, idx + 1, f"Q1: {q1:.2f}", va="center", ha="center", color="blue", fontsize=8) + plt.text(q3, idx + 1, f"Q3: {q3:.2f}", va="center", ha="center", color="blue", fontsize=8) + + elif fig_type == "barh": + hbar = plt.barh(values, dataframe, alpha=0.6) + small_hbar = [f"{d:.2f}" if d <= (max(dataframe) / 2) else "" for d in dataframe] + large_hbar = [f"{d:.2f}" if d > (max(dataframe) / 2) else "" for d in dataframe] + plt.bar_label(hbar, small_hbar, padding=5, fmt="%.2f", color="black") + plt.bar_label(hbar, large_hbar, padding=-35, fmt="%.2f", color="black") + + plt.title(title) + return fig + + +def generate_summary(path_output: str, expert_mode_cfg: dict): + """ + Generate graphs referencing memory management and time for each step. + + :param path_output: output directory + :type path_output: str + :param expert_mode_cfg: Dictionary containing expert_mode parameters + :type expert_mode_cfg: dict + """ + + # Copy memory_profiling results in the correct folder + folder_name = Path(path_output) / expert_mode_cfg.get("folder_name") + Path.mkdir(folder_name, exist_ok=True) + + csv_data_path = f"{folder_name}/{data.timestamp}_profiling.csv" + + shutil.copy(f"{data.timestamp}_profiling.csv", csv_data_path) + os.remove(f"{data.timestamp}_profiling.csv") + + # Transform csv to a panda.DataFrame + resumed_performance_df = pd.read_csv(csv_data_path) + grouped = resumed_performance_df.groupby("Function_name") + + metrics_list = ["mean", "sum"] + + dict_perf = { + "Time": {"df": grouped["Time (s)"].agg(metrics_list), "unit": "seconds"}, # type: ignore + "Process time": {"df": grouped["CPU Time (s)"].agg(metrics_list), "unit": "seconds"}, # type: ignore + "Maximum_memory": {"df": grouped["Max_Memory (MiB)"].agg(metrics_list), "unit": "MiB"}, # type: ignore + "Start_RAM": {"df": grouped["Start_Ram (MiB)"].agg(metrics_list), "unit": "MiB"}, # type: ignore + "End_RAM": {"df": grouped["End_Ram (MiB)"].agg(metrics_list), "unit": "MiB"}, # type: ignore + "MAX_CPU": {"df": grouped["Max_CPU"].agg(metrics_list), "unit": "unit"}, # type: ignore + } + + # Time graphics + histo_mean_time = generate_figure( + "barh", + dict_perf["Time"]["df"]["mean"], # type: ignore + values=dict_perf["Time"]["df"].index, # type: ignore + title="Mean time", + ylabel="Function name", + ) + histo_total_time = generate_figure( + "barh", + dict_perf["Time"]["df"]["sum"], # type: ignore + values=dict_perf["Time"]["df"].index, # type: ignore + title="Total time", + ylabel="Function name", + ) + histo_mean_cpu_time = generate_figure( + "barh", + dict_perf["Process time"]["df"]["mean"], # type: ignore + values=dict_perf["Process time"]["df"].index, # type: ignore + title="Mean CPU time", + ylabel="Function name", + ) + histo_total_cpu_time = generate_figure( + "barh", + dict_perf["Process time"]["df"]["sum"], # type: ignore + values=dict_perf["Process time"]["df"].index, # type: ignore + title="Total CPU time", + ylabel="Function name", + ) + + # Memory graphics + max_cpu = generate_figure( + "box", + dict_perf["MAX_CPU"]["df"], + title="Max CPU", + xlabel=str(dict_perf["Maximum_memory"]["unit"]), + ylabel="Function name", + ) + + max_mem = generate_figure( + "box", + dict_perf["Maximum_memory"]["df"], + title="Maximum memory per task", + xlabel=str(dict_perf["Maximum_memory"]["unit"]), + ylabel="Function name", + ) + + # Calls graphics + occurrences = grouped["Function_name"].value_counts().reset_index() + occ = generate_figure( + "barh", + occurrences["count"], + values=occurrences["Function_name"], + title="Number of calls", + ylabel="Function name", + ) + + # Save all figures in PDF file + figures = [histo_mean_time, histo_total_time, histo_mean_cpu_time, histo_total_cpu_time, max_cpu, max_mem, occ] + pdf_filename = f"{folder_name}/{data.timestamp}_graph_perf.pdf" + with PdfPages(pdf_filename) as pdf: + for fig in figures: + pdf.savefig(fig) diff --git a/pandora2d/refinement/dichotomy.py b/pandora2d/refinement/dichotomy.py index 0af0430..5132584 100644 --- a/pandora2d/refinement/dichotomy.py +++ b/pandora2d/refinement/dichotomy.py @@ -42,11 +42,9 @@ class Dichotomy(refinement.AbstractRefinement): schema = { "refinement_method": And(str, lambda x: x in ["dichotomy"]), "iterations": And(int, lambda it: it > 0), - "filter": And(str, lambda x: x in ["sinc", "bicubic"]), + "filter": And(dict, lambda x: x["method"] in AbstractFilter.interpolation_filter_methods_avail), } - _filter = None - def __init__(self, cfg: dict = None, _: list = None, __: int = 5) -> None: """ :param cfg: optional configuration, {} @@ -55,9 +53,9 @@ def __init__(self, cfg: dict = None, _: list = None, __: int = 5) -> None: """ super().__init__(cfg) - - self._filter = AbstractFilter( # type: ignore[abstract] # pylint: disable=abstract-class-instantiated - self.cfg["filter"] + fractional_shift = 2 ** -self.cfg["iterations"] + self.filter = AbstractFilter( # type: ignore[abstract] # pylint: disable=abstract-class-instantiated + self.cfg["filter"], fractional_shift=fractional_shift ) @classmethod @@ -90,9 +88,9 @@ def margins(self): It will be used for ROI and for dichotomy window extraction from cost volumes. """ - return self._filter.margins + return self.filter.margins - def refinement_method( + def refinement_method( # pylint: disable=too-many-locals self, cost_volumes: xr.Dataset, disp_map: xr.Dataset, img_left: xr.Dataset, img_right: xr.Dataset ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: """ @@ -139,11 +137,24 @@ def refinement_method( invalid_disparity_map_mask = invalid_row_disparity_map_mask | invalid_col_disparity_map_mask cost_values[invalid_disparity_map_mask] = np.nan - # row_disparity_source and col_disparity_sources contain the user disparity range - row_disparity_source = cost_volumes.attrs["row_disparity_source"] - col_disparity_source = cost_volumes.attrs["col_disparity_source"] - - precisions = [1 / 2 ** (it + 1) for it in range(self.cfg["iterations"])] + # Get disparities grid + # Column's min, max disparities + disp_min_col = img_left["col_disparity"].sel(band_disp="min").data + disp_max_col = img_left["col_disparity"].sel(band_disp="max").data + # Row's min, max disparities + disp_min_row = img_left["row_disparity"].sel(band_disp="min").data + disp_max_row = img_left["row_disparity"].sel(band_disp="max").data + + # start iterations after subpixel precision: `subpixel.bit_length() - 1` found which power of 2 subpixel is, + # and we add 1 to start at next iteration + first_iteration = cost_volumes.attrs["subpixel"].bit_length() + precisions = [1 / 2**it for it in range(first_iteration, self.cfg["iterations"] + 1)] + if first_iteration >= 0: + logging.info( + "With subpixel of `%s` the `%s` first dichotomy iterations will be skipped.", + cost_volumes.attrs["subpixel"], + first_iteration - 1, + ) # Convert disparity maps to np.array to optimise performance row_map = row_map.to_numpy() @@ -152,18 +163,39 @@ def refinement_method( # See usage of np.nditer: # https://numpy.org/doc/stable/reference/arrays.nditer.html#modifying-array-values with np.nditer( - [cost_values, row_map, col_map], - op_flags=[["readwrite"], ["readwrite"], ["readwrite"]], + [cost_values, row_map, col_map, disp_min_row, disp_max_row, disp_min_col, disp_max_col], + op_flags=[ + ["readwrite"], + ["readwrite"], + ["readwrite"], + ["readonly"], + ["readonly"], + ["readonly"], + ["readonly"], + ], ) as iterators: - for cost_surface, (cost_value, disp_row_init, disp_col_init) in zip(cost_surfaces, iterators): + for cost_surface, ( + cost_value, + disp_row_init, + disp_col_init, + d_row_min, + d_row_max, + d_col_min, + d_col_max, + ) in zip(cost_surfaces, iterators): # Invalid value if np.isnan(cost_value): continue - # If the best candidate found at the disparity step is at the edge of the disparity range + # If the best candidate found at the disparity step is at the edge of the row disparity range # we do no enter the dichotomy loop - if (disp_row_init in row_disparity_source) or (disp_col_init in col_disparity_source): + if disp_row_init in (d_row_min, d_row_max): + continue + + # If the best candidate found at the disparity step is at the edge of the col disparity range + # we do no enter the dichotomy loop + if disp_col_init in (d_col_min, d_col_max): continue # pos_disp_col_init corresponds to the position in the cost surface @@ -200,17 +232,19 @@ def refinement_method( # Syntax disp_row_init[...] is for assign value back to row_map with np.nditer (pos_disp_row_init, pos_disp_col_init), disp_row_init[...], disp_col_init[...], cost_value[...] = ( search_new_best_point( - cost_surface.data, + cost_surface, precision, (disp_row_init, disp_col_init), # type: ignore # Reason: is 0 dim array (pos_disp_row_init, pos_disp_col_init), cost_value, # type: ignore # Reason: is 0 dim array - self._filter, + self.filter, cost_selection_method, ) ) - logging.info("Dichotomy precision reached: %s", precisions[-1]) + logging.info( + "Dichotomy precision reached: %s", precisions[-1] if precisions else 1 / 2 ** (first_iteration - 1) + ) return col_map, row_map, cost_values @@ -235,6 +269,7 @@ def __init__(self, cost_volumes: xr.Dataset): :type disparity_margins: Margins """ self.cost_volumes = cost_volumes + self.cost_volumes["cost_volumes"].attrs.update({"subpixel": cost_volumes.attrs["subpixel"]}) def __getitem__(self, item): """Get cost surface of coordinates item where item is (row, col).""" @@ -261,7 +296,7 @@ def all_same(sequence): def search_new_best_point( - cost_surface: np.ndarray, + cost_surface: xr.DataArray, precision: float, initial_disparity: Union[Tuple[np.floating, np.floating], Tuple[int, int]], initial_position: Union[Tuple[np.floating, np.floating], Tuple[int, int]], @@ -273,8 +308,8 @@ def search_new_best_point( Find best position and cost after interpolation of cost surface for given precision. :param cost_surface: Disparities in rows and cols of a point - :type cost_surface: np.ndarray - :param precision: subpixellic precision to use + :type cost_surface: xr.Dataarray with subpix attribute + :param precision: subpixellic disparity precision to use :type precision: float :param initial_disparity: initial disparities (disp_row, disp_col) :type initial_disparity: Union[Tuple[np.floating, np.floating], Tuple[int, int]] @@ -302,17 +337,32 @@ def search_new_best_point( disp_row_shifts = np.array([-1, -1, -1, 0, 0, 0, 1, 1, 1], dtype=np.float32) * precision disp_col_shifts = np.array([-1, 0, 1, -1, 0, 1, -1, 0, 1], dtype=np.float32) * precision + # Whatever the cost_surface.attrs["subpixel"] value, the first precision in the cost surface is always 0.5 + # Then we multiply by cost_surface.attrs["subpixel"] to get right new_cols and new_rows + + # When there is no subpixel (it equals to 1), precision shift and index shift match: + # the precision shift between two points is 1. So shifting from 0.5 precision corresponds to shift index of 0.5. + # But when there is a subpixel, they do not match anymore: + # in this case, the precision shift between two points is 1/subpix. + # So to get the index corresponding to a given precision shift, we need to multiply this value by subpix. + # For example when subix equals 2, the precision shift between two points is 0.5 while the index shift is still 1. + # So in this case, shifting from 0.5 precision corresponds to shift index of 1 + # (`index_shift = 1 = 0.5 * 2 = precision_shift * subpix`) + # In the same way, shifting from 0.25 precision corresponds to shift index of 0.5 + # (`index_shift = 0.5 = 0.25 * 2 = precision_shift * subpix`) + # disp_row are along columns in cost_surface, then new_cols are computed from initial_pos_disp_row - new_cols = disp_row_shifts + initial_pos_disp_row + new_cols = disp_row_shifts * cost_surface.attrs["subpixel"] + initial_pos_disp_row + # disp_col are along rows in cost_surface, then new_rows are computed from initial_pos_disp_col - new_rows = disp_col_shifts + initial_pos_disp_col + new_rows = disp_col_shifts * cost_surface.attrs["subpixel"] + initial_pos_disp_col # New subpixel disparity values new_rows_disp = disp_row_shifts + initial_disp_row new_cols_disp = disp_col_shifts + initial_disp_col # Interpolate points at positions (new_rows[i], new_cols[i]) - candidates = filter_dicho.interpolate(cost_surface, (new_cols, new_rows)) + candidates = filter_dicho.interpolate(cost_surface.data, (new_cols, new_rows)) # In case a NaN is present in the kernel, candidates will be all-NaNs. Let’s restore initial_position value so # that best candidate search will be able to find it. diff --git a/pandora2d/refinement/optical_flow.py b/pandora2d/refinement/optical_flow.py index a7e3117..5ffcced 100644 --- a/pandora2d/refinement/optical_flow.py +++ b/pandora2d/refinement/optical_flow.py @@ -21,11 +21,13 @@ """ This module contains functions associated to the optical flow method used in the refinement step. """ -from typing import Dict, Tuple + +from typing import Dict, Tuple, List import numpy as np import xarray as xr from json_checker import And +from numpy.typing import NDArray from scipy.ndimage import map_coordinates from pandora.margins import Margins @@ -39,7 +41,6 @@ class OpticalFlow(refinement.AbstractRefinement): OpticalFLow class allows to perform the subpixel cost refinement step """ - _iterations = None _invalid_disp = None _ITERATIONS = 4 @@ -97,6 +98,7 @@ def reshape_to_matching_cost_window( self, img: xr.Dataset, cost_volumes: xr.Dataset, + coordinates: Tuple[List, List], disp_row: np.ndarray = None, disp_col: np.ndarray = None, ): @@ -107,6 +109,8 @@ def reshape_to_matching_cost_window( :type img: xr.Dataset :param cost_volumes: cost_volumes 4D row, col, disp_col, disp_row :type cost_volumes: xarray.Dataset + :param coordinates: min and max index coordinate for row and col [(first_row,last_row),(first_col,last_col)] + :type coordinates: tuple :param disp_row: array dim [] containing all the row shift :type disp_row: np.ndarray :param disp_col: array dim [] containing all the columns shift @@ -118,44 +122,67 @@ def reshape_to_matching_cost_window( # get numpy array datas for image img_data = img["im"].data - offset = max(self.margins.astuple()) + # get general offset value + offset = cost_volumes.offset_row_col - computable_col = cost_volumes.col.data[offset:-offset] - computable_row = cost_volumes.row.data[offset:-offset] + # get cost volume sub xarray with offset coordinates values + offset_row, offset_col = coordinates + cost_volumes_sub = cost_volumes.sel( + row=slice(offset_row[0], offset_row[-1]), col=slice(offset_col[0], offset_col[-1]) + ) - one_dim_size = len(computable_row) * len(computable_col) + # get computable cost volume data in row and col + computable_col = cost_volumes_sub.col.data + computable_row = cost_volumes_sub.row.data if disp_row is None and disp_col is None: + # define image patches in one dim patches = np.lib.stride_tricks.sliding_window_view(img_data, [self._window_size, self._window_size]) - patches = patches.reshape((one_dim_size, self._window_size, self._window_size)).transpose((1, 2, 0)) - else: - # initiate values for right reshape computation - offset = max(self.margins.astuple()) - patches = np.ndarray((self._window_size, self._window_size, one_dim_size)) - idx = 0 - - for row in computable_row: - for col in computable_col: - shift_col = ( - 0 if np.isnan(disp_col[idx]) or disp_col[idx] == self._invalid_disp else int(disp_col[idx]) - ) - shift_row = ( - 0 if np.isnan(disp_row[idx]) or disp_row[idx] == self._invalid_disp else int(disp_row[idx]) - ) - - # get right pixel with his matching cost window - patch = img_data[ - row - offset + shift_row : row + offset + 1 + shift_row, - col - offset + shift_col : col + offset + 1 + shift_col, - ] - - # stock matching_cost window - if patch.shape == (self._window_size, self._window_size): - patches[:, :, idx] = patch - else: - patches[:, :, idx] = np.ones([self._window_size, self._window_size]) * np.nan - - idx += 1 + flattened_patches = patches.reshape(-1, self._window_size, self._window_size) + + # get patches id from original image + id_patches_img = [ + int(c_row * img.sizes["col"]) + c_col + for c_row in img["row"].data[offset:-offset] + for c_col in img["col"].data[offset:-offset] + ] + + # Associate each patches of the one dim image to the id of the true image patches + patch_dict = {id_patches_img[i]: flattened_patches[i] for i in range(len(id_patches_img))} + id_patches = [int(c_row * img.sizes["col"]) + c_col for c_row in computable_row for c_col in computable_col] + + # Filter patches to keep only id calculated with offset and step + filtered_patches_list = [patch_dict[key] for key in id_patches if key in patch_dict] + reshaped_patches = np.stack(filtered_patches_list, axis=-1).reshape( + (self._window_size, self._window_size, len(filtered_patches_list)) + ) + return reshaped_patches + + # initiate values for right reshape computation + offset = self._window_size // 2 + patches = np.ndarray((self._window_size, self._window_size, len(computable_row) * len(computable_col))) + idx = 0 + + for row in computable_row: + for col in computable_col: + shift_col = 0 if np.isnan(disp_col[idx]) or disp_col[idx] == self._invalid_disp else int(disp_col[idx]) + shift_row = 0 if np.isnan(disp_row[idx]) or disp_row[idx] == self._invalid_disp else int(disp_row[idx]) + + # get right pixel with his matching cost window + patch_row_start = row - offset + shift_row + patch_row_end = row + offset + shift_row + patch_col_start = col - offset + shift_col + patch_col_end = col + offset + shift_col + patch = img.sel(row=slice(patch_row_start, patch_row_end), col=slice(patch_col_start, patch_col_end)) + patch = patch["im"].data + + # stock matching_cost window + if patch.shape == (self._window_size, self._window_size): + patches[:, :, idx] = patch + else: + patches[:, :, idx] = np.ones([self._window_size, self._window_size]) * np.nan + + idx += 1 return patches @@ -246,7 +273,6 @@ def optical_flow( final_dec_col = np.zeros(left_img.shape[2]) for idx in list_idx_to_compute: - left_matching_cost = left_img[:, :, idx] right_matching_cost = right_img[:, :, idx] @@ -268,6 +294,26 @@ def optical_flow( return final_dec_row, final_dec_col, new_list_to_compute + @staticmethod + def find_nearest_column(value, data, direction): + """ + Return the nearest column from initial column index coordinate in a given direction + + :param value: initial column index + :type value: int + :param data: cost volume coordinates + :type data: np.ndarray + :param direction: direction sign (must be + or -) + :type direction: string + """ + + if direction == "+": + return data[np.searchsorted(data, value, side="left")] + if direction == "-": + return data[np.searchsorted(data, value, side="right") - 1] + + raise ValueError("Direction must be '+' or '-'") + def refinement_method( self, cost_volumes: xr.Dataset, disp_map: xr.Dataset, img_left: xr.Dataset, img_right: xr.Dataset ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: @@ -290,21 +336,40 @@ def refinement_method( self._invalid_disp = disp_map.attrs["invalid_disp"] # get offset - offset = max(self.margins.astuple()) - - # get displacement map from disparity state - initial_delta_row = disp_map["row_map"].data - initial_delta_col = disp_map["col_map"].data - - delta_col = initial_delta_col[offset:-offset, offset:-offset].flatten() - delta_row = initial_delta_row[offset:-offset, offset:-offset].flatten() + offset = cost_volumes.offset_row_col + + # get first and last coordinates for row and col in cost volume dataset + first_col_coordinate = cost_volumes.col.data[0] + offset + last_col_coordinate = cost_volumes.col.data[-1] - offset + col_extrema_coordinates = [ + self.find_nearest_column(first_col_coordinate, cost_volumes.col.data, "+"), + self.find_nearest_column(last_col_coordinate, cost_volumes.col.data, "-"), + ] + + first_row_coordinate = cost_volumes.row.data[0] + offset + last_row_coordinate = cost_volumes.row.data[-1] - offset + row_extrema_coordinates = [ + self.find_nearest_column(first_row_coordinate, cost_volumes.row.data, "+"), + self.find_nearest_column(last_row_coordinate, cost_volumes.row.data, "-"), + ] + + # get displacement map in row and col - from disparity min/max coordinates + row_slice = slice(row_extrema_coordinates[0], row_extrema_coordinates[-1]) + col_slice = slice(col_extrema_coordinates[0], col_extrema_coordinates[-1]) + cost_volume_sub = cost_volumes.sel(row=row_slice, col=col_slice) + disp_map_sub = disp_map.sel(row=cost_volume_sub.row, col=cost_volume_sub.col) + delta_row = disp_map_sub["row_map"].data.flatten() + delta_col = disp_map_sub["col_map"].data.flatten() # reshape left and right datas # from (nbcol, nbrow) to (window_size, window_size, nbcol*nbrow) - reshaped_left = self.reshape_to_matching_cost_window(img_left, cost_volumes) + reshaped_left = self.reshape_to_matching_cost_window( + img_left, cost_volumes, (row_extrema_coordinates, col_extrema_coordinates) + ) reshaped_right = self.reshape_to_matching_cost_window( img_right, cost_volumes, + (row_extrema_coordinates, col_extrema_coordinates), delta_row, delta_col, ) @@ -323,21 +388,46 @@ def refinement_method( delta_col = delta_col - computed_dcol delta_row = delta_row - computed_drow - # get finals disparity map dimensions - nb_row, nb_col = initial_delta_col.shape - nb_valid_points_row = nb_row - 2 * offset - nb_valid_points_col = nb_col - 2 * offset + # get finals disparity map dimensions, add +1 because it began at 0 + nb_valid_points_row = int((row_extrema_coordinates[-1] - row_extrema_coordinates[0]) / cost_volumes.step[0] + 1) + nb_valid_points_col = int((col_extrema_coordinates[-1] - col_extrema_coordinates[0]) / cost_volumes.step[1] + 1) delta_col = delta_col.reshape([nb_valid_points_row, nb_valid_points_col]) delta_row = delta_row.reshape([nb_valid_points_row, nb_valid_points_col]) - # add borders - delta_col = np.pad(delta_col, pad_width=offset, constant_values=self._invalid_disp) - delta_row = np.pad(delta_row, pad_width=offset, constant_values=self._invalid_disp) + # add border + padding_top = (disp_map.sizes["row"] - delta_row.shape[0]) // 2 + padding_bottom = disp_map.sizes["row"] - delta_row.shape[0] - padding_top + padding_left = (disp_map.sizes["col"] - delta_row.shape[1]) // 2 + padding_right = disp_map.sizes["col"] - delta_row.shape[1] - padding_left + + delta_row = np.pad( + delta_row, + pad_width=((padding_top, padding_bottom), (padding_left, padding_right)), + constant_values=self._invalid_disp, + ) + delta_col = np.pad( + delta_col, + pad_width=((padding_top, padding_bottom), (padding_left, padding_right)), + constant_values=self._invalid_disp, + ) - delta_col[delta_col <= img_left.attrs["col_disparity_source"][0]] = self._invalid_disp - delta_col[delta_col >= img_left.attrs["col_disparity_source"][1]] = self._invalid_disp - delta_row[delta_row <= img_left.attrs["row_disparity_source"][0]] = self._invalid_disp - delta_row[delta_row >= img_left.attrs["row_disparity_source"][1]] = self._invalid_disp + self._invalid_out_of_grid_disparities(cost_volumes.attrs["step"], delta_col, img_left["col_disparity"]) + self._invalid_out_of_grid_disparities(cost_volumes.attrs["step"], delta_row, img_left["row_disparity"]) return delta_col, delta_row, disp_map["correlation_score"].data + + def _invalid_out_of_grid_disparities(self, step: List, delta: NDArray[np.floating], disparity: xr.DataArray): + """ + Replace delta values by invalid_disp value when it is outside the corresponding disparity range defined by + the disparity grid. + + :param step: [row_step, col_step] + :type step: list + :param delta: refined disparity map + :type delta: np.NDArray + :param disparity: pixelic disparity grids with min and max `band_disp` coordinates. + :type disparity: xr.DataArray + """ + delta[delta <= disparity.sel(band_disp="min").data[:: step[0], :: step[1]]] = self._invalid_disp + delta[delta >= disparity.sel(band_disp="max").data[:: step[0], :: step[1]]] = self._invalid_disp diff --git a/pandora2d/refinement/refinement.py b/pandora2d/refinement/refinement.py index 76ba5d1..209b704 100644 --- a/pandora2d/refinement/refinement.py +++ b/pandora2d/refinement/refinement.py @@ -43,7 +43,6 @@ class AbstractRefinement: refinement_methods_avail: Dict = {} _refinement_method = None - cfg = None margins = NullMargins() schema: Dict # This will raise an AttributeError if not override in subclasses diff --git a/pandora2d/state_machine.py b/pandora2d/state_machine.py index 04443dc..c706bfa 100644 --- a/pandora2d/state_machine.py +++ b/pandora2d/state_machine.py @@ -27,7 +27,6 @@ import logging from typing import TYPE_CHECKING, Dict, List, Literal, Optional, TypedDict, Union -import numpy as np import xarray as xr from typing_extensions import Annotated @@ -45,9 +44,11 @@ from transitions import Machine from transitions import MachineError - from pandora.margins import GlobalMargins -from pandora2d import common, disparity, estimation, matching_cost, refinement, img_tools + + +from pandora2d import common, disparity, estimation, img_tools, matching_cost, refinement +from pandora2d.profiling import mem_time_profile class MarginsProperties(TypedDict): @@ -63,40 +64,38 @@ class Pandora2DMachine(Machine): # pylint:disable=too-many-instance-attributes """ _transitions_run = [ - {"trigger": "estimation", "source": "begin", "dest": "assumption", "after": "estimation_run"}, + {"trigger": "estimation", "source": "begin", "dest": "assumption", "before": "estimation_run"}, { "trigger": "matching_cost", "source": "begin", "dest": "cost_volumes", "prepare": "matching_cost_prepare", - "after": "matching_cost_run", + "before": "matching_cost_run", }, { "trigger": "matching_cost", "source": "assumption", "dest": "cost_volumes", "prepare": "matching_cost_prepare", - "after": "matching_cost_run", + "before": "matching_cost_run", }, - {"trigger": "disparity", "source": "cost_volumes", "dest": "disp_maps", "after": "disp_maps_run"}, - {"trigger": "refinement", "source": "disp_maps", "dest": "disp_maps", "after": "refinement_run"}, + {"trigger": "disparity", "source": "cost_volumes", "dest": "disp_maps", "before": "disp_maps_run"}, + {"trigger": "refinement", "source": "disp_maps", "dest": "disp_maps", "before": "refinement_run"}, ] _transitions_check = [ - {"trigger": "estimation", "source": "begin", "dest": "assumption", "after": "estimation_check_conf"}, - {"trigger": "matching_cost", "source": "begin", "dest": "cost_volumes", "after": "matching_cost_check_conf"}, + {"trigger": "estimation", "source": "begin", "dest": "assumption", "before": "estimation_check_conf"}, + {"trigger": "matching_cost", "source": "begin", "dest": "cost_volumes", "before": "matching_cost_check_conf"}, { "trigger": "matching_cost", "source": "assumption", "dest": "cost_volumes", - "after": "matching_cost_check_conf", + "before": "matching_cost_check_conf", }, - {"trigger": "disparity", "source": "cost_volumes", "dest": "disp_maps", "after": "disparity_check_conf"}, - {"trigger": "refinement", "source": "disp_maps", "dest": "disp_maps", "after": "refinement_check_conf"}, + {"trigger": "disparity", "source": "cost_volumes", "dest": "disp_maps", "before": "disparity_check_conf"}, + {"trigger": "refinement", "source": "disp_maps", "dest": "disp_maps", "before": "refinement_check_conf"}, ] - margins = GlobalMargins() - def __init__( self, ) -> None: @@ -109,12 +108,6 @@ def __init__( self.left_img: Optional[xr.Dataset] = None # Right image self.right_img: Optional[xr.Dataset] = None - # Column's min, max disparities - self.disp_min_col: np.ndarray = None - self.disp_max_col: np.ndarray = None - # Row's min, max disparities - self.disp_min_row: np.ndarray = None - self.disp_max_row: np.ndarray = None self.pipeline_cfg: Dict = {"pipeline": {}} self.completed_cfg: Dict = {} @@ -124,6 +117,7 @@ def __init__( # For communication between matching_cost and refinement steps self.step: list = None self.window_size: int = None + self.margins = GlobalMargins() # Define available states states_ = ["begin", "assumption", "cost_volumes", "disp_maps"] @@ -162,12 +156,6 @@ def run_prepare(self, img_left: xr.Dataset, img_right: xr.Dataset, cfg: dict) -> self.left_img = img_left self.right_img = img_right - # Column's min, max disparities - self.disp_min_col = img_left["col_disparity"].sel(band_disp="min").data.copy() - self.disp_max_col = img_left["col_disparity"].sel(band_disp="max").data.copy() - # Row's min, max disparities - self.disp_min_row = img_left["row_disparity"].sel(band_disp="min").data.copy() - self.disp_max_row = img_left["row_disparity"].sel(band_disp="max").data.copy() self.completed_cfg = copy.copy(cfg) self.add_transitions(self._transitions_run) @@ -320,9 +308,10 @@ def matching_cost_prepare(self, cfg: Dict[str, dict], input_step: str) -> None: self.matching_cost_ = matching_cost.MatchingCost(cfg["pipeline"][input_step]) self.matching_cost_.allocate_cost_volume_pandora( - self.left_img, self.right_img, self.disp_min_col, self.disp_max_col, cfg, self.margins.get("refinement") + self.left_img, self.right_img, cfg, self.margins.get("refinement") ) + @mem_time_profile(name="Estimation step") def estimation_run(self, cfg: Dict[str, dict], input_step: str) -> None: """ Shift's estimation step @@ -340,17 +329,12 @@ def estimation_run(self, cfg: Dict[str, dict], input_step: str) -> None: row_disparity, col_disparity, shifts, extra_dict = estimation_.compute_estimation(self.left_img, self.right_img) self.left_img = img_tools.add_disparity_grid(self.left_img, col_disparity, row_disparity) - # Column's min, max disparities - self.disp_min_col = self.left_img["col_disparity"].sel(band_disp="min").data - self.disp_max_col = self.left_img["col_disparity"].sel(band_disp="max").data - # Row's min, max disparities - self.disp_min_row = self.left_img["row_disparity"].sel(band_disp="min").data - self.disp_max_row = self.left_img["row_disparity"].sel(band_disp="max").data self.completed_cfg = estimation_.update_cfg_with_estimation( cfg, col_disparity, row_disparity, shifts, extra_dict ) + @mem_time_profile(name="Matching cost step") def matching_cost_run(self, _, __) -> None: """ Matching cost computation @@ -363,13 +347,10 @@ def matching_cost_run(self, _, __) -> None: self.cost_volumes = self.matching_cost_.compute_cost_volumes( self.left_img, self.right_img, - self.disp_min_col, - self.disp_max_col, - self.disp_min_row, - self.disp_max_row, self.margins.get("refinement"), ) + @mem_time_profile(name="Disparity step") def disp_maps_run(self, cfg: Dict[str, dict], input_step: str) -> None: """ Disparity computation and validity mask @@ -397,6 +378,7 @@ def disp_maps_run(self, cfg: Dict[str, dict], input_step: str) -> None: }, ) + @mem_time_profile(name="Refinement step") def refinement_run(self, cfg: Dict[str, dict], input_step: str) -> None: """ Subpixel disparity refinement diff --git a/pytest.ini b/pytest.ini index a640a62..aa439d2 100644 --- a/pytest.ini +++ b/pytest.ini @@ -8,5 +8,6 @@ markers = performance_tests: accuracy tests plugin_tests: MCCNN unit test notebook_tests: Notebook unit tests + metrics: compute metrics for resource tests norecursedirs = .git doc conf .gitlab generate_report_on_test = True diff --git a/setup.cfg b/setup.cfg index a7ef3af..2d22339 100644 --- a/setup.cfg +++ b/setup.cfg @@ -56,17 +56,20 @@ setup_requires = #Pandora2d packages dependencies install_requires = - numpy<2.0 + numpy rasterio transitions json-checker xarray numba>=0.55.2;python_version>'3.7' numba>=0.47.0;python_version<'3.8' - pandora==1.6.1 + pandora==1.6.2 typing_extensions scikit-image scipy<1.14 + pandas + psutil + matplotlib package_dir = . = pandora2d @@ -83,17 +86,19 @@ dev = pre-commit isort>=5.8.0 # Check imports black>=21.5b0 # PEP8 format code - pylint>=2.8.2, <3.2.4 # General linter with more rules + pylint>=2.8.2 # General linter with more rules setuptools_scm # version from git tag mypy # static type checker + pandas-stubs + types-psutil docs = sphinx sphinx_rtd_theme sphinx_autoapi + sphinx_tabs notebook = - matplotlib graphviz notebook diff --git a/tests/conftest.py b/tests/conftest.py index 1ddae4d..e11ad83 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -21,11 +21,17 @@ """ # pylint: disable=redefined-outer-name - import pathlib import re +import json +import numpy as np import pytest +import rasterio +import xarray as xr +from pandora.common import write_data_array + +import pandora2d def pytest_collection_modifyitems(config, items): @@ -56,9 +62,11 @@ def pytest_html_results_table_header(cells): 1. Category : with values {'TU', 'TF', 'TP', 'TR'} 2. Function tested : basename of python test file + 3. Requirement : validating the Pandora2D tool, string with EX_* """ cells.insert(1, "Category") cells.insert(2, "Function tested") + cells.insert(3, "Requirement") def pytest_html_results_table_row(report, cells): @@ -69,12 +77,25 @@ def pytest_html_results_table_row(report, cells): 1. CATEGORY : with values {'TU', 'TF', 'TP', 'TR'} 2. FUNCTION : basename of python test file + 3. REQUIREMENT : with values EX_* """ type_dict = {"unit": "TU", "functional": "TF", "resource": "TR", "performance": "TP"} pattern = r"tests/(?P\w+)_tests.*test_(?P\w+)\.py" match = re.match(pattern, report.nodeid) cells.insert(1, f"{type_dict[match.groupdict()['type']]}") cells.insert(2, f"{match.groupdict()['function']}") + cells.insert(3, f"{'
'.join(report.requirement)}") + + +@pytest.hookimpl(hookwrapper=True) +def pytest_runtest_makereport(item, call): # pylint: disable=unused-argument + """ + Parse test docstrings and retrieve strings in EX_*. + """ + outcome = yield + report = outcome.get_result() + pattern = r"(EX_\w*)" + report.requirement = re.findall(pattern, str(item.function.__doc__)) @pytest.fixture() @@ -113,8 +134,8 @@ def correct_input_cfg(left_img_path, right_img_path): "right": { "img": right_img_path, }, - "col_disparity": [-2, 2], - "row_disparity": [-2, 2], + "col_disparity": {"init": 1, "range": 2}, + "row_disparity": {"init": 1, "range": 2}, } } @@ -130,7 +151,188 @@ def correct_multiband_input_cfg(left_rgb_path, right_rgb_path): "right": { "img": right_rgb_path, }, - "col_disparity": [-2, 2], - "row_disparity": [-2, 2], + "col_disparity": {"init": 1, "range": 2}, + "row_disparity": {"init": 1, "range": 2}, } } + + +@pytest.fixture +def mask_path(left_img_path, tmp_path): + """Create a mask and save it in tmp""" + + with rasterio.open(left_img_path) as src: + width = src.width + height = src.height + + mask = xr.DataArray(data=0, dims=["height", "width"], coords={"height": range(height), "width": range(width)}) + mask[0 : int(height / 2), 0 : int(width / 2)] = 1 + + path = tmp_path / "mask_left.tif" + + write_data_array( + data_array=mask, + filename=str(path), + ) + + return path + + +@pytest.fixture +def correct_input_with_left_mask(left_img_path, right_img_path, mask_path): + return { + "input": { + "left": {"img": left_img_path, "nodata": -9999, "mask": str(mask_path)}, + "right": { + "img": right_img_path, + }, + "col_disparity": {"init": 0, "range": 2}, + "row_disparity": {"init": 0, "range": 2}, + } + } + + +@pytest.fixture +def correct_input_with_right_mask(left_img_path, right_img_path, mask_path): + return { + "input": { + "left": { + "img": left_img_path, + "nodata": -9999, + }, + "right": {"img": right_img_path, "mask": str(mask_path)}, + "col_disparity": {"init": 0, "range": 2}, + "row_disparity": {"init": 0, "range": 2}, + } + } + + +@pytest.fixture() +def random_seed(): + """ + Seed generated with: + + >>> import secrets + >>> secrets.randbits(128) + """ + return 160187526967402499820683775418299155210 + + +@pytest.fixture() +def random_generator(random_seed): + return np.random.default_rng(random_seed) + + +@pytest.fixture() +def run_pipeline(tmp_path): + """Fixture that returns a function to run a pipeline and which returns the output directory path.""" + + def run(configuration, output_dir="output"): + config_path = tmp_path / "config.json" + with config_path.open("w", encoding="utf-8") as file_: + json.dump(configuration, file_, indent=2) + + pandora2d.main(str(config_path), str(tmp_path / output_dir), verbose=False) + return tmp_path + + return run + + +@pytest.fixture() +def constant_initial_disparity(): + """ + Create a correct disparity dictionary + with constant initial disparity + """ + return {"init": 1, "range": 3} + + +@pytest.fixture() +def second_constant_initial_disparity(): + """ + Create a correct disparity dictionary + with constant initial disparity + """ + return {"init": 0, "range": 2} + + +@pytest.fixture() +def make_input_cfg(left_img_path, right_img_path, request): + """Get input configuration with given disparities""" + + input_cfg = { + "left": { + "img": left_img_path, + "nodata": -9999, + }, + "right": {"img": right_img_path, "nodata": -9999}, + "col_disparity": request.getfixturevalue(request.param["col_disparity"]), + "row_disparity": request.getfixturevalue(request.param["row_disparity"]), + } + + return input_cfg + + +@pytest.fixture +def left_img_shape(left_img_path): + """ + Get shape of left image stored in left_img_path fixture + """ + + with rasterio.open(left_img_path) as src: + width = src.width + height = src.height + + return (height, width) + + +@pytest.fixture +def create_disparity_grid_fixture(tmp_path): + """ + Creates initial disparity grid and save it in tmp. + """ + + def create_disparity_grid(data, disp_range, suffix_path, band=False, disp_type=rasterio.dtypes.int64): + + if not band: + disparity_grid = xr.DataArray(data, dims=["row", "col"]) + else: + disparity_grid = xr.DataArray(data, dims=["row", "col", "band"]) + + path = tmp_path / suffix_path + + write_data_array(data_array=disparity_grid, filename=str(path), dtype=disp_type) + + return {"init": str(path), "range": disp_range} + + return create_disparity_grid + + +@pytest.fixture +def correct_grid(left_img_shape, create_disparity_grid_fixture): + """Create a correct initial disparity grid and save it in tmp""" + + height, width = left_img_shape + + # Array of size (height, width) with alternating rows of 2, 0 and 3 + init_band = np.tile([[2], [0], [3]], (height // 3 + 1, width))[:height, :] + + return create_disparity_grid_fixture(init_band, 5, "disparity.tif") + + +@pytest.fixture +def second_correct_grid(left_img_shape, create_disparity_grid_fixture): + """Create a correct initial disparity grid and save it in tmp""" + + height, width = left_img_shape + + # Array of size (height, width) with alternating columns of 5, -21 and -1 + init_band = np.tile([[5, -21, -1]], (height, width // 3 + 1))[:, :width] + + return create_disparity_grid_fixture(init_band, 5, "second_disparity.tif") + + +@pytest.fixture() +def reset_profiling(): + pandora2d.profiling.data.reset() + pandora2d.profiling.expert_mode_config.enable = False diff --git a/tests/data/json_conf_files/classic_cfg.json b/tests/data/json_conf_files/classic_cfg.json index 8709511..a9c2777 100644 --- a/tests/data/json_conf_files/classic_cfg.json +++ b/tests/data/json_conf_files/classic_cfg.json @@ -7,8 +7,8 @@ "right": { "img": "./tests/data/images/cones/monoband/right.png" }, - "col_disparity": [-2, 2], - "row_disparity": [-2, 2] + "col_disparity": {"init": 0, "range": 2}, + "row_disparity": {"init": 0, "range": 2} }, "pipeline": { "matching_cost": { diff --git a/tests/functional_tests/conftest.py b/tests/functional_tests/conftest.py index c964077..3ec0992 100644 --- a/tests/functional_tests/conftest.py +++ b/tests/functional_tests/conftest.py @@ -18,27 +18,8 @@ # """Module with global test fixtures.""" -import json - import pytest -import pandora2d - - -@pytest.fixture() -def run_pipeline(tmp_path): - """Fixture that returns a function to run a pipeline and which returns the output directory path.""" - - def run(configuration, output_dir="output"): - config_path = tmp_path / "config.json" - with config_path.open("w", encoding="utf-8") as file_: - json.dump(configuration, file_, indent=2) - - pandora2d.main(str(config_path), str(tmp_path / output_dir), verbose=False) - return tmp_path - - return run - @pytest.fixture() def correct_pipeline_without_refinement(): @@ -59,3 +40,14 @@ def correct_pipeline_with_optical_flow(): "refinement": {"refinement_method": "optical_flow"}, } } + + +@pytest.fixture() +def correct_pipeline_with_dichotomy(): + return { + "pipeline": { + "matching_cost": {"matching_cost_method": "zncc", "window_size": 5}, + "disparity": {"disparity_method": "wta", "invalid_disparity": -99}, + "refinement": {"refinement_method": "dichotomy", "iterations": 2, "filter": {"method": "bicubic"}}, + } + } diff --git a/tests/functional_tests/matching_cost/test_disparity_margins.py b/tests/functional_tests/matching_cost/test_disparity_margins.py index 9244aca..880947f 100644 --- a/tests/functional_tests/matching_cost/test_disparity_margins.py +++ b/tests/functional_tests/matching_cost/test_disparity_margins.py @@ -48,16 +48,16 @@ def create_datasets(self): coords={"row": np.arange(data.shape[0]), "col": np.arange(data.shape[1])}, ) - add_disparity_grid(left, [1, 3], [-2, 2]) - - left.attrs = { - "no_data_img": -9999, - "valid_pixels": 0, - "no_data_mask": 1, - "crs": None, - "col_disparity_source": [1, 3], - "row_disparity_source": [-2, 2], - } + add_disparity_grid(left, {"init": 2, "range": 1}, {"init": 0, "range": 2}) + + left.attrs.update( + { + "no_data_img": -9999, + "valid_pixels": 0, + "no_data_mask": 1, + "crs": None, + } + ) data = np.full((10, 10), 1) right = xr.Dataset( @@ -70,8 +70,6 @@ def create_datasets(self): "valid_pixels": 0, "no_data_mask": 1, "crs": None, - "col_disparity_source": [1, 3], - "row_disparity_source": [-2, 2], } return left, right @@ -98,7 +96,7 @@ def config(self, subpix, refinement_config, matching_cost_method): [ pytest.param( 1, - {"refinement_method": "dichotomy", "iterations": 1, "filter": "bicubic"}, + {"refinement_method": "dichotomy", "iterations": 1, "filter": {"method": "bicubic"}}, (10, 10, 6, 8), [0, 1, 2, 3, 4, 5], [-3, -2, -1, 0, 1, 2, 3, 4], @@ -126,7 +124,7 @@ def config(self, subpix, refinement_config, matching_cost_method): ), pytest.param( 2, - {"refinement_method": "dichotomy", "iterations": 1, "filter": "bicubic"}, + {"refinement_method": "dichotomy", "iterations": 1, "filter": {"method": "bicubic"}}, (10, 10, 11, 15), [0, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5], [-3, -2.5, -2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4], @@ -154,7 +152,7 @@ def config(self, subpix, refinement_config, matching_cost_method): ), pytest.param( 4, - {"refinement_method": "dichotomy", "iterations": 1, "filter": "bicubic"}, + {"refinement_method": "dichotomy", "iterations": 1, "filter": {"method": "bicubic"}}, (10, 10, 21, 29), np.arange(0, 5.25, 0.25), np.arange(-3, 4.25, 0.25), diff --git a/tests/functional_tests/matching_cost/test_subpix.py b/tests/functional_tests/matching_cost/test_subpix.py index d1633a0..a571918 100644 --- a/tests/functional_tests/matching_cost/test_subpix.py +++ b/tests/functional_tests/matching_cost/test_subpix.py @@ -46,17 +46,17 @@ def create_datasets(self, data_left, data_right): coords={"row": np.arange(data_left.shape[0]), "col": np.arange(data_left.shape[1])}, ) - add_disparity_grid(left, [-2, 2], [-2, 2]) + add_disparity_grid(left, {"init": 1, "range": 2}, {"init": 1, "range": 2}) - left.attrs = { - "no_data_img": -9999, - "valid_pixels": 0, - "no_data_mask": 1, - "crs": None, - "col_disparity_source": [-2, 2], - "row_disparity_source": [-2, 2], - "transform": None, - } + left.attrs.update( + { + "no_data_img": -9999, + "valid_pixels": 0, + "no_data_mask": 1, + "crs": None, + "transform": None, + } + ) right = xr.Dataset( {"im": (["row", "col"], data_right)}, diff --git a/tests/functional_tests/refinement/dichotomy/test_dichotomy_pipeline.py b/tests/functional_tests/refinement/dichotomy/test_dichotomy_pipeline.py new file mode 100644 index 0000000..004952b --- /dev/null +++ b/tests/functional_tests/refinement/dichotomy/test_dichotomy_pipeline.py @@ -0,0 +1,177 @@ +# Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). +# +# This file is part of PANDORA2D +# +# https://github.com/CNES/Pandora2D +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Test the refinement.dichotomy pipeline. +""" +import copy +import pytest + +import numpy as np + + +import pandora2d +from pandora2d.state_machine import Pandora2DMachine +from pandora2d.check_configuration import check_conf +from pandora2d.img_tools import create_datasets_from_inputs, get_roi_processing + +# Make pylint happy with fixtures: +# pylint: disable=redefined-outer-name + + +@pytest.fixture() +def make_cfg_for_dichotomy( + left_img_path, right_img_path, method, subpix, iterations, roi, col_disparity, row_disparity +): + """ + Creates user configuration to test dichotomy loop + """ + + user_cfg = { + "input": { + "left": { + "img": str(left_img_path), + "nodata": "NaN", + }, + "right": { + "img": str(right_img_path), + "nodata": "NaN", + }, + "col_disparity": col_disparity, + "row_disparity": row_disparity, + }, + "ROI": roi, + "pipeline": { + "matching_cost": { + "matching_cost_method": "zncc", + "window_size": 7, + "subpix": subpix, + }, + "disparity": { + "disparity_method": "wta", + "invalid_disparity": -9999, + }, + "refinement": { + "refinement_method": "dichotomy", + "iterations": iterations, + "filter": {"method": method}, + }, + }, + } + + return user_cfg + + +@pytest.mark.parametrize("method", ["bicubic", "sinc"]) +@pytest.mark.parametrize("subpix", [1, 2, 4]) +@pytest.mark.parametrize("iterations", [1, 2]) +@pytest.mark.parametrize("roi", [{"col": {"first": 100, "last": 120}, "row": {"first": 100, "last": 120}}]) +@pytest.mark.parametrize("col_disparity", [{"init": 0, "range": 1}]) +@pytest.mark.parametrize("row_disparity", [{"init": 0, "range": 3}]) +def test_dichotomy_execution(make_cfg_for_dichotomy): + """ + Description : Test that execution of Pandora2d with a dichotomy refinement does not fail. + Data : + * Left_img : cones/monoband/left.png + * Right_img : cones/monoband/right.png + Requirement : + * EX_REF_BCO_00 + * EX_REF_SINC_00 + """ + pandora2d_machine = Pandora2DMachine() + + cfg = check_conf(make_cfg_for_dichotomy, pandora2d_machine) + + cfg["ROI"]["margins"] = pandora2d_machine.margins.global_margins.astuple() + roi = get_roi_processing(cfg["ROI"], cfg["input"]["col_disparity"], cfg["input"]["row_disparity"]) + + image_datasets = create_datasets_from_inputs(input_config=cfg["input"], roi=roi) + + dataset_disp_maps, _ = pandora2d.run(pandora2d_machine, image_datasets.left, image_datasets.right, cfg) + + # Checking that resulting disparity maps are not full of nans + with np.testing.assert_raises(AssertionError): + assert np.all(np.isnan(dataset_disp_maps.row_map.data)) + assert np.all(np.isnan(dataset_disp_maps.col_map.data)) + + +@pytest.mark.parametrize("method", ["bicubic"]) +@pytest.mark.parametrize("subpix", [1]) +@pytest.mark.parametrize("iterations", [1, 2]) +# This ROI has been chosen because its corresponding disparity maps +# contain extrema disparity range values and subpixel values after refinement. +@pytest.mark.parametrize("roi", [{"col": {"first": 30, "last": 40}, "row": {"first": 160, "last": 170}}]) +# We use small disparity intervals to obtain extrema of disparity ranges in the disparity maps. +# Once the variable disparity grids have been introduced into pandora2d, +# this type of disparity will also need to be tested here. +@pytest.mark.parametrize("col_disparity", [{"init": -1, "range": 1}]) +@pytest.mark.parametrize("row_disparity", [{"init": 0, "range": 1}]) +def test_extrema_disparities_not_processed(make_cfg_for_dichotomy): + """ + Description : Test that execution of Pandora2d with a dichotomy refinement does not + take into account points for which best cost value is found at the edge of the disparity range. + Data : + * Left_img : cones/monoband/left.png + * Right_img : cones/monoband/right.png + """ + pandora2d_machine = pandora2d.state_machine.Pandora2DMachine() + + cfg = check_conf(make_cfg_for_dichotomy, pandora2d_machine) + + cfg["ROI"]["margins"] = pandora2d_machine.margins.global_margins.astuple() + roi = get_roi_processing(cfg["ROI"], cfg["input"]["col_disparity"], cfg["input"]["row_disparity"]) + + image_datasets = create_datasets_from_inputs(input_config=cfg["input"], roi=roi) + + # Prepare Pandora2D machine + pandora2d_machine.run_prepare(image_datasets.left, image_datasets.right, cfg) + # Run matching cost step + pandora2d_machine.run("matching_cost", cfg) + # Run disparity step + pandora2d_machine.run("disparity", cfg) + # Make a copy of disparity maps before refinement step + copy_disp_maps = copy.deepcopy(pandora2d_machine.dataset_disp_maps) + # Run refinement step + pandora2d_machine.run("refinement", cfg) + + # Get points for which best cost value is at the edge of the row disparity range + mask_min_row = np.nonzero(copy_disp_maps["row_map"].data == image_datasets.left.row_disparity[0, :, :]) + mask_max_row = np.nonzero(copy_disp_maps["row_map"].data == image_datasets.left.row_disparity[1, :, :]) + + # Get points for which best cost value is at the edge of the column disparity range + mask_min_col = np.nonzero(copy_disp_maps["col_map"].data == image_datasets.left.col_disparity[0, :, :]) + mask_max_col = np.nonzero(copy_disp_maps["col_map"].data == image_datasets.left.col_disparity[1, :, :]) + + # Checking that best row disparity is unchanged for points having best cost value at the edge of row disparity range + assert np.all( + pandora2d_machine.dataset_disp_maps["row_map"].data[mask_min_row[0], mask_min_row[1]] + == image_datasets.left.row_disparity.data[0, mask_min_row[0], mask_min_row[1]] + ) + assert np.all( + pandora2d_machine.dataset_disp_maps["row_map"].data[mask_max_row[0], mask_max_row[1]] + == image_datasets.left.row_disparity.data[1, mask_max_row[0], mask_max_row[1]] + ) + + # Checking that best col disparity is unchanged for points having best cost value at the edge of col disparity range + assert np.all( + pandora2d_machine.dataset_disp_maps["col_map"].data[mask_min_col[0], mask_min_col[1]] + == image_datasets.left.col_disparity.data[0, mask_min_col[0], mask_min_col[1]] + ) + assert np.all( + pandora2d_machine.dataset_disp_maps["col_map"].data[mask_max_col[0], mask_max_col[1]] + == image_datasets.left.col_disparity.data[1, mask_max_col[0], mask_max_col[1]] + ) diff --git a/tests/functional_tests/target_grid/refinement/dichotomy/test_dichotomy_bicubic.py b/tests/functional_tests/target_grid/refinement/dichotomy/test_dichotomy_bicubic.py deleted file mode 100644 index df70117..0000000 --- a/tests/functional_tests/target_grid/refinement/dichotomy/test_dichotomy_bicubic.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). -# -# This file is part of PANDORA2D -# -# https://github.com/CNES/Pandora2D -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Test the refinement.dichotomy pipeline. -""" - -import pytest - -import numpy as np - -import pandora2d - - -@pytest.mark.parametrize("subpix", [1, 2, 4]) -@pytest.mark.parametrize("iterations", [1, 2]) -def test_dichotomy_execution(left_img_path, right_img_path, subpix, iterations): - """ - Test that execution of Pandora2d with a dichotomy refinement does not fail. - """ - pandora2d_machine = pandora2d.state_machine.Pandora2DMachine() - user_cfg = { - "input": { - "left": { - "img": str(left_img_path), - "nodata": "NaN", - }, - "right": { - "img": str(right_img_path), - "nodata": "NaN", - }, - "col_disparity": [-3, 3], - "row_disparity": [-3, 3], - }, - "ROI": {"col": {"first": 100, "last": 120}, "row": {"first": 100, "last": 120}}, - "pipeline": { - "matching_cost": { - "matching_cost_method": "zncc", - "window_size": 7, - "subpix": subpix, - }, - "disparity": { - "disparity_method": "wta", - "invalid_disparity": -9999, - }, - "refinement": { - "refinement_method": "dichotomy", - "iterations": iterations, - "filter": "bicubic", - }, - }, - } - cfg = pandora2d.check_configuration.check_conf(user_cfg, pandora2d_machine) - - cfg["ROI"]["margins"] = pandora2d_machine.margins.global_margins.astuple() - roi = pandora2d.img_tools.get_roi_processing( - cfg["ROI"], cfg["input"]["col_disparity"], cfg["input"]["row_disparity"] - ) - - image_datasets = pandora2d.img_tools.create_datasets_from_inputs(input_config=cfg["input"], roi=roi) - - dataset_disp_maps, _ = pandora2d.run(pandora2d_machine, image_datasets.left, image_datasets.right, cfg) - - # Checking that resulting disparity maps are not full of nans - with np.testing.assert_raises(AssertionError): - assert np.all(np.isnan(dataset_disp_maps.row_map.data)) - assert np.all(np.isnan(dataset_disp_maps.col_map.data)) diff --git a/tests/functional_tests/target_grid/roi/test_georeferencement.py b/tests/functional_tests/target_grid/roi/test_georeferencement.py deleted file mode 100644 index 36adb86..0000000 --- a/tests/functional_tests/target_grid/roi/test_georeferencement.py +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). -# -# This file is part of PANDORA2D -# -# https://github.com/CNES/Pandora2D -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Run pandora2d configurations with ROI from end to end. -""" - -# Make pylint happy with fixtures: -# pylint: disable=redefined-outer-name - -import pytest -import xarray as xr -import numpy as np -import rasterio - -from pandora.common import write_data_array - - -@pytest.fixture() -def left_data(): - """Create left data.""" - # Create array of shape (10,10): - # [[ 0, 1, 2, ..., 0], - # [ 1, 2, 3, ..., 10], - # [...], - # [8, 9, 10, ..., 17]] - # [9, 10, 11, ..., 18]] - return xr.DataArray( - data=np.arange(10) + np.arange(10).reshape(-1, 1), - dims=("row", "col"), - coords={"row": np.arange(10), "col": np.arange(10)}, - ) - - -@pytest.fixture() -def right_data(left_data): - return left_data + 1 - - -@pytest.fixture() -def transform(): - return rasterio.Affine(0.5, 0.0, 573083.5, 0.0, -0.5, 4825333.5) - - -@pytest.fixture() -def crs(): - return rasterio.crs.CRS.from_epsg(32631) - - -@pytest.fixture() -def left_path(tmp_path, left_data, crs, transform): - """Write left image and return its path.""" - path = tmp_path / "left.tif" - write_data_array( - data_array=left_data, - filename=str(path), - crs=crs, - transform=transform, - ) - return path - - -@pytest.fixture() -def right_path(tmp_path, right_data, crs, transform): - """Write right image and return its path.""" - path = tmp_path / "right.tif" - write_data_array( - data_array=right_data, - filename=str(path), - crs=crs, - transform=transform, - ) - return path - - -def test_roi_georeferencement(run_pipeline, left_path, right_path, crs, transform, correct_pipeline_without_refinement): - """Test that new georeferencement origin correspond to upper left corner of the ROI.""" - configuration = { - "input": { - "left": { - "img": str(left_path), - }, - "right": { - "img": str(right_path), - }, - "col_disparity": [-1, 3], - "row_disparity": [-1, 3], - }, - "ROI": { - "col": {"first": 3, "last": 7}, - "row": {"first": 5, "last": 8}, - }, - **correct_pipeline_without_refinement, - } - - run_dir = run_pipeline(configuration) - - columns_disparity = rasterio.open(run_dir / "output" / "columns_disparity.tif") - - assert columns_disparity.crs == crs - # assert that new georeferencement origin correspond to upper left corner of the ROI: - assert columns_disparity.transform * (0, 0) == transform * (3, 5) diff --git a/tests/functional_tests/target_grid/test_georeferencement.py b/tests/functional_tests/target_grid/test_georeferencement.py new file mode 100644 index 0000000..7da94aa --- /dev/null +++ b/tests/functional_tests/target_grid/test_georeferencement.py @@ -0,0 +1,212 @@ +# Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). +# +# This file is part of PANDORA2D +# +# https://github.com/CNES/Pandora2D +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Run pandora2d configurations with ROI from end to end. +""" + +# Make pylint happy with fixtures: +# pylint: disable=redefined-outer-name + +import pytest +import xarray as xr +import numpy as np +import rasterio + +from pandora.common import write_data_array + + +@pytest.fixture() +def left_data(): + """Create left data.""" + # Create array of shape (10,10): + # [[ 0, 1, 2, ..., 0], + # [ 1, 2, 3, ..., 10], + # [...], + # [8, 9, 10, ..., 17]] + # [9, 10, 11, ..., 18]] + return xr.DataArray( + data=np.arange(10) + np.arange(10).reshape(-1, 1), + dims=("row", "col"), + coords={"row": np.arange(10), "col": np.arange(10)}, + ) + + +@pytest.fixture() +def right_data(left_data): + return left_data + 1 + + +@pytest.fixture() +def transform(): + return rasterio.Affine(0.5, 0.0, 573083.5, 0.0, -0.5, 4825333.5) + + +@pytest.fixture() +def crs(): + return rasterio.crs.CRS.from_epsg(32631) + + +@pytest.fixture() +def left_path(tmp_path, left_data, crs, transform): + """Write left image and return its path.""" + path = tmp_path / "left.tif" + write_data_array( + data_array=left_data, + filename=str(path), + crs=crs, + transform=transform, + ) + return path + + +@pytest.fixture() +def right_path(tmp_path, right_data, crs, transform): + """Write right image and return its path.""" + path = tmp_path / "right.tif" + write_data_array( + data_array=right_data, + filename=str(path), + crs=crs, + transform=transform, + ) + return path + + +@pytest.fixture() +def configuration(left_path, right_path, correct_pipeline_without_refinement, step): + correct_pipeline_without_refinement["pipeline"]["matching_cost"]["step"] = step + return { + "input": { + "left": { + "img": str(left_path), + }, + "right": { + "img": str(right_path), + }, + "col_disparity": {"init": 1, "range": 2}, + "row_disparity": {"init": 1, "range": 2}, + }, + **correct_pipeline_without_refinement, + } + + +@pytest.mark.parametrize( + [ + "step", + "bottom_right_corner_indexes", # Use transform convention: (col, row) + ], + [ + pytest.param([1, 1], (9, 9), id="No step"), # Disp map corner match ROI corner + pytest.param([2, 3], (9, 8), id="Step < image"), # Disp map corner match ROI corner + pytest.param([11, 11], (0, 0), id="Step > image"), # Disp map corner match ROI corner + ], +) +@pytest.mark.parametrize("output_file", ["columns_disparity.tif", "row_disparity.tif", "correlation_score.tif"]) +def test_georeferencement( + run_pipeline, + configuration, + crs, + transform, + bottom_right_corner_indexes, + output_file, +): + """Test that top left and bottom right corners are well georeferenced.""" + run_dir = run_pipeline(configuration) + + output = rasterio.open(run_dir / "output" / output_file) + bottom_right_disparity_indexes = output.width - 1, output.height - 1 + + assert output.crs == crs + # assert that new georeferencement origin correspond to upper left corner of the ROI: + upper_left_corner_indexes = (0, 0) + assert output.transform * (0, 0) == transform * upper_left_corner_indexes + assert output.transform * bottom_right_disparity_indexes == transform * bottom_right_corner_indexes + + +@pytest.fixture() +def configuration_with_roi(configuration, roi): + configuration["ROI"] = roi + return configuration + + +@pytest.mark.parametrize( + [ + "roi", + "step", + "bottom_right_corner_indexes", # Use transform convention: (col, row) + ], + [ + pytest.param( + {"col": {"first": 3, "last": 7}, "row": {"first": 5, "last": 8}}, [1, 1], (7, 8), id="No step" + ), # Disp map corner match ROI corner + pytest.param( + {"col": {"first": 3, "last": 7}, "row": {"first": 5, "last": 8}}, + [2, 3], + (6, 7), + id="Step < ROI size", + ), # Disp map corner is inside ROI + pytest.param( + {"col": {"first": 3, "last": 7}, "row": {"first": 5, "last": 8}}, + [4, 5], + (3, 5), + id="Step == ROI size", + ), # Only one pixel at ROI origin + pytest.param( + {"col": {"first": 3, "last": 7}, "row": {"first": 5, "last": 8}}, + [5, 6], + (3, 5), + id="Step > ROI size", + ), # Only one pixel at ROI origin + pytest.param( + {"col": {"first": 3, "last": 3}, "row": {"first": 5, "last": 5}}, + [1, 1], + (3, 5), + id="1px ROI - No step", + ), + pytest.param( + {"col": {"first": 3, "last": 3}, "row": {"first": 5, "last": 5}}, + [5, 6], + (3, 5), + id="1px ROI - Step", + ), + ], +) +@pytest.mark.parametrize("output_file", ["columns_disparity.tif", "row_disparity.tif", "correlation_score.tif"]) +def test_roi_georeferencement( + run_pipeline, + configuration_with_roi, + crs, + transform, + bottom_right_corner_indexes, + output_file, +): + """Test that top left and bottom right corners are well georeferenced.""" + run_dir = run_pipeline(configuration_with_roi) + + output = rasterio.open(run_dir / "output" / output_file) + bottom_right_disparity_indexes = output.width - 1, output.height - 1 + + assert output.crs == crs + # assert that new georeferencement origin correspond to upper left corner of the ROI: + upper_left_corner_indexes = ( + configuration_with_roi["ROI"]["col"]["first"], + configuration_with_roi["ROI"]["row"]["first"], + ) + assert output.transform * (0, 0) == transform * upper_left_corner_indexes + assert output.transform * bottom_right_disparity_indexes == transform * bottom_right_corner_indexes diff --git a/tests/functional_tests/test_pipelines.py b/tests/functional_tests/test_pipelines.py index 36b017d..8f14b27 100644 --- a/tests/functional_tests/test_pipelines.py +++ b/tests/functional_tests/test_pipelines.py @@ -19,15 +19,21 @@ """ Run pandora2d configurations from end to end. """ +import glob # pylint: disable=redefined-outer-name import json +import os +import re from copy import deepcopy from typing import Dict import pytest +import numpy as np +import rasterio + def remove_extra_keys(extended: dict, reference: dict): """ @@ -98,9 +104,30 @@ def test_no_common_key(self): assert result == {} -def test_monoband_with_nodata_not_nan(run_pipeline, correct_input_cfg, correct_pipeline_without_refinement): - """Test a configuration with monoband images.""" - configuration = {**correct_input_cfg, **correct_pipeline_without_refinement} +@pytest.mark.parametrize( + "roi", + [ + pytest.param({}, id="No ROI"), + pytest.param( + { + "ROI": { + "col": {"first": 3, "last": 7}, + "row": {"first": 5, "last": 8}, + } + }, + id="With ROI", + ), + ], +) +def test_monoband_with_nodata_not_nan(run_pipeline, correct_input_cfg, correct_pipeline_without_refinement, roi): + """ + Description : Test a configuration with monoband images. + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_00, EX_CONF_06 + """ + configuration = {**correct_input_cfg, **correct_pipeline_without_refinement, **roi} configuration["input"]["left"]["nodata"] = -9999 run_dir = run_pipeline(configuration) @@ -122,7 +149,13 @@ def test_monoband_with_nodata_not_nan(run_pipeline, correct_input_cfg, correct_p @pytest.mark.xfail(reason="saved nan in nodata is not valid json and is not comparable to nan") def test_monoband_with_nan_nodata(run_pipeline, correct_input_cfg, correct_pipeline_without_refinement): - """Test a configuration with monoband images and left nodata set to NaN.""" + """ + Description : Test a configuration with monoband images and left nodata set to NaN. + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_00, EX_CONF_06 + """ configuration = {**correct_input_cfg, **correct_pipeline_without_refinement} run_dir = run_pipeline(configuration) @@ -138,7 +171,13 @@ def test_monoband_with_nan_nodata(run_pipeline, correct_input_cfg, correct_pipel @pytest.mark.xfail(reason="Multiband is not managed") def test_multiband(run_pipeline, correct_multiband_input_cfg, correct_pipeline_without_refinement): - """Test a configuration with multiband images.""" + """ + Description : Test a configuration with multiband images. + Data : + - Left image : cones/multibands/left.tif + - Right image : cones/multibands/right.tif + Requirement : EX_CONF_00, EX_CONF_06, EX_CONF_12 + """ configuration: Dict[str, Dict] = {**correct_multiband_input_cfg, **correct_pipeline_without_refinement} run_dir = run_pipeline(configuration) @@ -153,7 +192,13 @@ def test_multiband(run_pipeline, correct_multiband_input_cfg, correct_pipeline_w def test_optical_flow_configuration(run_pipeline, correct_input_cfg, correct_pipeline_with_optical_flow): - """Test optical_flow configuration has a window_size and a step identical to matching_cost step.""" + """ + Description : Test optical_flow configuration has a window_size and a step identical to matching_cost step. + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_00, EX_CONF_06 + """ configuration: Dict[str, Dict] = {**correct_input_cfg, **correct_pipeline_with_optical_flow} configuration["pipeline"]["refinement"]["iterations"] = 1 @@ -168,3 +213,135 @@ def test_optical_flow_configuration(run_pipeline, correct_input_cfg, correct_pip # Check window_size and step parameters assert matching_cost_cfg["window_size"] == refinement_cfg["window_size"] assert matching_cost_cfg["step"] == refinement_cfg["step"] + + +@pytest.mark.parametrize("input_cfg", ["correct_input_with_left_mask", "correct_input_with_right_mask"]) +def test_configuration_with_mask(run_pipeline, input_cfg, correct_pipeline_without_refinement, request): + """ + Description : Test mask configuration + """ + input_cfg = request.getfixturevalue(input_cfg) + + configuration = {**input_cfg, **correct_pipeline_without_refinement} + + run_dir = run_pipeline(configuration) + + with open(run_dir / "output" / "cfg" / "config.json", encoding="utf8") as output_file: + output_config = json.load(output_file) + + result = remove_extra_keys(output_config, configuration) + + assert result == configuration + assert list(result["pipeline"].keys()) == list(configuration["pipeline"].keys()), "Pipeline order not respected" + + # Test for report + with open(run_dir / "output" / "report.json", encoding="utf8") as report_file: + report = json.load(report_file) + + assert report["statistics"]["disparity"].keys() == {"row", "col"} + + +@pytest.mark.parametrize( + ["make_input_cfg", "pipeline"], + [ + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "second_correct_grid"}, + "correct_pipeline_without_refinement", + id="Pipeline with disparity grids", + ), + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "second_correct_grid"}, + "correct_pipeline_with_dichotomy", + id="Pipeline with disparity grids and dichotomy", + ), + ], + indirect=["make_input_cfg"], +) +def test_disparity_grids(run_pipeline, make_input_cfg, pipeline, request): + """ + Description: Test pipeline with disparity grids + """ + + configuration = { + "input": make_input_cfg, + "ROI": {"col": {"first": 210, "last": 240}, "row": {"first": 210, "last": 240}}, + **request.getfixturevalue(pipeline), + } + configuration["pipeline"]["disparity"]["invalid_disparity"] = np.nan + + run_dir = run_pipeline(configuration) + + with rasterio.open(run_dir / "output" / "row_disparity.tif") as src: + row_map = src.read(1) + with rasterio.open(run_dir / "output" / "columns_disparity.tif") as src: + col_map = src.read(1) + + non_nan_row_map = ~np.isnan(row_map) + non_nan_col_map = ~np.isnan(col_map) + + # Minimal and maximal disparities corresponding to correct_grid_path fixture + min_max_disp_row = np.array( + [ + np.tile([[-3], [-5], [-2]], (375 // 3 + 1, 450))[210:241, 210:241], + np.tile([[7], [5], [8]], (375 // 3 + 1, 450))[210:241, 210:241], + ] + ) + + # Minimal and maximal disparities corresponding to second_correct_grid_path fixture + min_max_disp_col = np.array( + [ + np.tile([[0, -26, -6]], (375, 450 // 3 + 1))[210:241, 210:241], + np.tile([[10, -16, 4]], (375, 450 // 3 + 1))[210:241, 210:241], + ] + ) + + # Checks that the resulting disparities are well within the ranges created from the input disparity grids + assert np.all( + (row_map[non_nan_row_map] >= min_max_disp_row[0, ::][non_nan_row_map]) + & (row_map[non_nan_row_map] <= min_max_disp_row[1, ::][non_nan_row_map]) + ) + assert np.all( + (col_map[non_nan_col_map] >= min_max_disp_col[0, ::][non_nan_col_map]) + & (col_map[non_nan_col_map] <= min_max_disp_col[1, ::][non_nan_col_map]) + ) + + +@pytest.mark.usefixtures("reset_profiling") +@pytest.mark.parametrize( + ["ground_truth", "configuration_expert", "file_exists"], + [ + pytest.param( + [".csv", ".pdf"], + {"expert_mode": {"profiling": {"folder_name": "expert_mode"}}}, + True, + id="Expert mode", + ), + pytest.param([], {}, False, id="No expert mode"), + ], +) +def test_expert_mode( + ground_truth, + configuration_expert, + run_pipeline, + file_exists, + correct_input_cfg, + correct_pipeline_without_refinement, +): + """ + Description : Test default expert mode outputs + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + """ + + configuration = {**correct_input_cfg, **correct_pipeline_without_refinement, **configuration_expert} + + run_dir = run_pipeline(configuration) + + output_expert_dir = run_dir / "output" / "expert_mode" + + assert output_expert_dir.exists() == file_exists + + if output_expert_dir.exists(): + file_extensions = [f.suffix for f in output_expert_dir.iterdir() if f.is_file()] + assert set(file_extensions) == set(ground_truth) diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/left.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/left.tif new file mode 100644 index 0000000..6eaaf41 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/left.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/right.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/right.tif new file mode 100644 index 0000000..a1d73fe Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/right.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif new file mode 100644 index 0000000..c4c4e9e Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif new file mode 100644 index 0000000..c37a66a Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif new file mode 100644 index 0000000..fd8e99e Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif new file mode 100644 index 0000000..a11e969 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/left.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/left.tif new file mode 100644 index 0000000..d9dd0ba Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/left.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/right.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/right.tif new file mode 100644 index 0000000..b62f8b9 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/right.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif new file mode 100644 index 0000000..4ad30a4 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif new file mode 100644 index 0000000..3855e55 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif new file mode 100644 index 0000000..48b951e Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif new file mode 100644 index 0000000..6082203 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif new file mode 100644 index 0000000..ed3c813 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif new file mode 100644 index 0000000..2d36836 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif new file mode 100644 index 0000000..aa8c4ea Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif new file mode 100644 index 0000000..5d7558f Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c+0.50/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/left.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/left.tif new file mode 100644 index 0000000..a1d73fe Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/left.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/right.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/right.tif new file mode 100644 index 0000000..6eaaf41 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/right.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif new file mode 100644 index 0000000..c16dd99 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif new file mode 100644 index 0000000..a4c53dd Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif new file mode 100644 index 0000000..fb4bdbb Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif new file mode 100644 index 0000000..8c91a09 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.00c-0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/left.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/left.tif new file mode 100644 index 0000000..f502d54 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/left.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/right.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/right.tif new file mode 100644 index 0000000..07adbd8 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/right.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif new file mode 100644 index 0000000..e59c233 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif new file mode 100644 index 0000000..5e0b06b Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif new file mode 100644 index 0000000..d4f728f Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif new file mode 100644 index 0000000..eef1c83 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif new file mode 100644 index 0000000..7b7c6fd Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif new file mode 100644 index 0000000..0eb34ba Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif new file mode 100644 index 0000000..0247621 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif new file mode 100644 index 0000000..42d53ab Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T19KER/r+0.25c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/left.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/left.tif new file mode 100644 index 0000000..a187e73 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/left.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/right.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/right.tif new file mode 100644 index 0000000..61a55c6 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/right.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif new file mode 100644 index 0000000..bac175e Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif new file mode 100644 index 0000000..00615e5 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif new file mode 100644 index 0000000..389479d Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif new file mode 100644 index 0000000..65f1b40 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif new file mode 100644 index 0000000..444e643 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif new file mode 100644 index 0000000..e1dda5f Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif new file mode 100644 index 0000000..279e00e Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif new file mode 100644 index 0000000..ef60641 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/left.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/left.tif new file mode 100644 index 0000000..fb13f7a Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/left.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/right.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/right.tif new file mode 100644 index 0000000..10741e9 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/right.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif new file mode 100644 index 0000000..7830fb9 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif new file mode 100644 index 0000000..60b7af0 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif new file mode 100644 index 0000000..fd986a3 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif new file mode 100644 index 0000000..b3646c9 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif new file mode 100644 index 0000000..b8195e8 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif new file mode 100644 index 0000000..c9aa110 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif new file mode 100644 index 0000000..ad3633a Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif new file mode 100644 index 0000000..b70daaf Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c+0.50/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/left.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/left.tif new file mode 100644 index 0000000..61a55c6 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/left.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/right.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/right.tif new file mode 100644 index 0000000..a187e73 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/right.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif new file mode 100644 index 0000000..7459823 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif new file mode 100644 index 0000000..13b00e3 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif new file mode 100644 index 0000000..69b00e4 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif new file mode 100644 index 0000000..b90f9fd Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.00c-0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/left.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/left.tif new file mode 100644 index 0000000..f271288 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/left.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/right.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/right.tif new file mode 100644 index 0000000..a535171 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/right.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif new file mode 100644 index 0000000..fa71d34 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif new file mode 100644 index 0000000..7aa5415 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif new file mode 100644 index 0000000..9cf123d Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_col_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif new file mode 100644 index 0000000..9ac6fb9 Binary files /dev/null and b/tests/performance_tests/refinement/dichotomy/data_medicis/T50JML/r+0.25c+0.25/zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_row_disp.tif differ diff --git a/tests/performance_tests/refinement/dichotomy/test_dichotomy_performance.py b/tests/performance_tests/refinement/dichotomy/test_dichotomy_performance.py new file mode 100644 index 0000000..24e0884 --- /dev/null +++ b/tests/performance_tests/refinement/dichotomy/test_dichotomy_performance.py @@ -0,0 +1,390 @@ +# Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). +# +# This file is part of PANDORA2D +# +# https://github.com/CNES/Pandora2D +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Test the refinement.dichotomy pipeline. +""" + +from typing import Tuple +from pathlib import Path + +import pytest + +import numpy as np +import rasterio + +from numpy.typing import NDArray + + +class TestComparisonMedicis: + """ + Tests that the pandora2d disparity maps after using the dichotomy + are similar to those obtained with Medicis. + """ + + def remove_edges( + self, medicis_map: NDArray[np.floating], pandora2d_map: NDArray[np.floating] + ) -> Tuple[NDArray[np.floating], NDArray[np.floating]]: + """ + Get reduced disparity maps after removing medicis edges full of nans (greater than pandora2d edges) + on both pandora2d and medicis disparity maps. + """ + + # Gets coordinates for which medicis col_map is different from nan + # i.e. points that are not within the edges + non_nan_row_indexes, non_nan_col_indexes = np.where(~np.isnan(medicis_map)) + + # Remove medicis edges + medicis_map = medicis_map[ + non_nan_row_indexes[0] : non_nan_row_indexes[-1] + 1, non_nan_col_indexes[0] : non_nan_col_indexes[-1] + 1 + ] + + # Remove pandora2d edges to get the same points as the ones in medicis disparity maps + pandora2d_map = pandora2d_map[ + non_nan_row_indexes[0] : non_nan_row_indexes[-1] + 1, non_nan_col_indexes[0] : non_nan_col_indexes[-1] + 1 + ] + + return medicis_map, pandora2d_map + + @pytest.fixture() + def data_path(self): + """ + Return path to get left and right images and medicis data + """ + return Path("tests/performance_tests/refinement/dichotomy/data_medicis/") + + @pytest.fixture() + def shift_path(self, data_path, img_path): + """ + Return path to get left and right images and medicis data + """ + return data_path / img_path + + @pytest.fixture() + def medicis_maps_path(self, shift_path, medicis_method_path): + """ + Return path to get medicis data + """ + return shift_path / medicis_method_path + + @pytest.fixture() + def cfg_dichotomy(self, shift_path, subpix, filter_method): + """ + Make user configuration for dichotomy loop + """ + + return { + "input": { + "left": {"nodata": -9999, "img": str(shift_path / "left.tif")}, + "right": {"nodata": -9999, "img": str(shift_path / "right.tif")}, + "col_disparity": {"init": 0, "range": 3}, + "row_disparity": {"init": 0, "range": 3}, + }, + "pipeline": { + "matching_cost": { + "matching_cost_method": "zncc", + "window_size": 65, + "step": [1, 1], + "subpix": subpix, + "spline_order": 3, + }, # we use spline_order=3 to get better results when subpix is different from 1 + "disparity": {"disparity_method": "wta", "invalid_disparity": np.nan}, + "refinement": {"refinement_method": "dichotomy", "iterations": 9, "filter": {"method": filter_method}}, + }, + } + + @pytest.mark.parametrize( + [ + "img_path", + "subpix", + "filter_method", + "medicis_method_path", + "row_shift", + "col_shift", + "row_map_threshold", + "col_map_threshold", + ], + [ + pytest.param( + "T19KER/r+0.00c+0.50/", + 1, + "bicubic", + "zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_", + 0.0, + 0.5, + 0.00001, + 0.0, + id="T19KER (Calama, Chile) shifted of 0.5 in columns with bicubic, 9 iter and subpix=1", + ), + pytest.param( + "T50JML/r+0.00c+0.50/", + 1, + "bicubic", + "zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_", + 0.0, + 0.5, + 0.0, + 0.0, + id="T50JML (Perth, Australia) shifted of 0.5 in columns with bicubic, 9 iter and subpix=1", + ), + pytest.param( + "T19KER/r+0.00c+0.25/", + 1, + "bicubic", + "zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_", + 0.0, + 0.25, + 0.00001, + 0.0, + id="T19KER (Calama, Chile) shifted of 0.25 in columns with bicubic, 9 iter and subpix=1", + ), + pytest.param( + "T50JML/r+0.00c+0.25/", + 1, + "bicubic", + "zncc_dicho_nappe_bco/gri_zncc_dicho_nappe_bco_", + 0.0, + 0.25, + 0.0, + 0.0, + id="T50JML (Perth, Australia) shifted of 0.25 in columns with bicubic, 9 iter and subpix=1", + ), + pytest.param( + "T19KER/r+0.00c-0.25/", + 4, + "bicubic", + "zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_", + 0.0, + -0.25, + 0.002, + 0.0, + id="T19KER (Calama, Chile) shifted of -0.25 in columns with bicubic, 9 iter and subpix=4", + ), + pytest.param( + "T50JML/r+0.00c-0.25/", + 4, + "bicubic", + "zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_", + 0.0, + -0.25, + 0.004, + 0.003, + id="T50JML (Perth, Australia) shifted of -0.25 in columns with bicubic, 9 iter and subpix=4", + ), + pytest.param( + "T19KER/r+0.00c+0.50/", + 4, + "bicubic", + "zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_", + 0.0, + 0.5, + 0.004, + 0.003, + id="T19KER (Calama, Chile) shifted of 0.5 in columns with bicubic, 9 iter and subpix=4", + ), + pytest.param( + "T50JML/r+0.00c+0.50/", + 4, + "bicubic", + "zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_", + 0.0, + 0.5, + 0.004, + 0.002, + id="T50JML (Perth, Australia) shifted of 0.5 in columns with bicubic, 9 iter and subpix=4", + ), + pytest.param( + "T19KER/r+0.25c+0.25/", + 4, + "bicubic", + "zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_", + 0.25, + 0.25, + 0.0, + 0.0, + id="T19KER (Calama, Chile) shifted of 0.25 in col and in rows with bicubic, 9 iter and subpix=4", + ), + pytest.param( + "T50JML/r+0.25c+0.25/", + 4, + "bicubic", + "zncc_dicho_nappe_surech_bco/gri_zncc_dicho_nappe_surech_bco_", + 0.25, + 0.25, + 0.005, + 0.005, + id="T50JML (Perth, Australia) shifted of 0.25 in col and in rows with bicubic, 9 iter and subpix=4", + ), + pytest.param( + "T19KER/r+0.00c+0.50/", + 1, + "sinc", + "zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_", + 0.0, + 0.5, + 0.0001, + 0.0001, + id="T19KER (Calama, Chile) shifted of 0.5 in columns with sinc, 9 iter and subpix=1", + ), + pytest.param( + "T50JML/r+0.00c+0.50/", + 1, + "sinc", + "zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_", + 0.0, + 0.5, + 0.0, + 0.0, + id="T50JML (Perth, Australia) shifted of 0.5 in columns with sinc, 9 iter and subpix=1", + ), + pytest.param( + "T19KER/r+0.00c+0.25/", + 1, + "sinc", + "zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_", + 0.0, + 0.25, + 0.0, + 0.0, + id="T19KER (Calama, Chile) shifted of 0.25 in columns with sinc, 9 iter and subpix=1", + ), + pytest.param( + "T50JML/r+0.00c+0.25/", + 1, + "sinc", + "zncc_dicho_nappe_sinc/gri_zncc_dicho_nappe_sinc_", + 0.0, + 0.25, + 0.00001, + 0.00001, + id="T50JML (Perth, Australia) shifted of 0.25 in columns with sinc, 9 iter and subpix=1", + ), + pytest.param( + "T19KER/r+0.00c-0.25/", + 4, + "sinc", + "zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_", + 0.0, + -0.25, + 0.003, + 0.01, + id="T19KER (Calama, Chile) shifted of -0.25 in columns with sinc, 9 iter and subpix=4", + ), + pytest.param( + "T50JML/r+0.00c-0.25/", + 4, + "sinc", + "zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_", + 0.0, + -0.25, + 0.004, + 0.005, + id="T50JML (Perth, Australia) shifted of -0.25 in columns with sinc, 9 iter and subpix=4", + ), + pytest.param( + "T19KER/r+0.00c+0.50/", + 4, + "sinc", + "zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_", + 0.0, + 0.5, + 0.003, + 0.004, + id="T19KER (Calama, Chile) shifted of 0.5 in columns with sinc, 9 iter and subpix=4", + ), + pytest.param( + "T50JML/r+0.00c+0.50/", + 4, + "sinc", + "zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_", + 0.0, + 0.5, + 0.003, + 0.003, + id="T50JML (Perth, Australia) shifted of 0.5 in columns with sinc, 9 iter and subpix=4", + ), + pytest.param( + "T19KER/r+0.25c+0.25/", + 4, + "sinc", + "zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_", + 0.25, + 0.25, + 0.01, + 0.007, + id="T19KER (Calama, Chile) shifted of 0.25 in col and in rows with sinc, 9 iter and subpix=4", + ), + pytest.param( + "T50JML/r+0.25c+0.25/", + 4, + "sinc", + "zncc_dicho_nappe_surech_sinc/gri_zncc_dicho_nappe_surech_sinc_", + 0.25, + 0.25, + 0.004, + 0.005, + id="T50JML (Perth, Australia) shifted of 0.25 in col and in rows with sinc, 9 iter and subpix=4", + ), + ], + ) + def test_pandora2d_medicis_dichotomy( + self, run_pipeline, cfg_dichotomy, medicis_maps_path, row_shift, col_shift, row_map_threshold, col_map_threshold + ): + """ + Test that pandora2d mean errors are smaller + than those of medicis plus a threshold given as a parameter. + + Difference between medicis and pandora2d disparity maps may be linked to the difference + in the interpolation method used between the two tools when the subpix is greater than 1. + As pandora2d use the scipy zoom method (spline interpolation), + medicis use the same interpolation method as the one used for the dichotomy loop (bicubic or sinc). + + When the threshold is 0, pandora2d is at least as effective as medicis. + When the threshold is > 0, the mean error of medicis + is better than the one of pandora2d by about the value of the threshold. + """ + + # Run pandora2D pipeline + run_dir = run_pipeline(cfg_dichotomy) + + # Get pandora2d disparity maps + with rasterio.open(run_dir / "output" / "row_disparity.tif") as src: + row_map_pandora2d = src.read(1) + with rasterio.open(run_dir / "output" / "columns_disparity.tif") as src: + col_map_pandora2d = src.read(1) + + # Get medicis disparity maps + with rasterio.open(str(medicis_maps_path) + "row_disp.tif") as src: + row_map_medicis = src.read(1) + with rasterio.open(str(medicis_maps_path) + "col_disp.tif") as src: + col_map_medicis = src.read(1) + + # Remove medicis edges on both pandora2d and medicis disparity maps + # in order to compare the same sample of points. + row_map_medicis, row_map_pandora2d = self.remove_edges(row_map_medicis, row_map_pandora2d) + col_map_medicis, col_map_pandora2d = self.remove_edges(col_map_medicis, col_map_pandora2d) + + # Compute mean error between column disparities and real column shift + mean_error_pandora2d_col = np.nanmean(abs(col_map_pandora2d - col_shift)) + mean_error_medicis_col = np.nanmean(abs(col_map_medicis - col_shift)) + + # Compute mean error between row disparities and real row shift + mean_error_pandora2d_row = np.nanmean(abs(row_map_pandora2d - row_shift)) + mean_error_medicis_row = np.nanmean(abs(row_map_medicis - row_shift)) + + assert mean_error_pandora2d_col <= mean_error_medicis_col + col_map_threshold + assert mean_error_pandora2d_row <= mean_error_medicis_row + row_map_threshold diff --git a/tests/resource_tests/conftest.py b/tests/resource_tests/conftest.py index 6beb29e..786b302 100644 --- a/tests/resource_tests/conftest.py +++ b/tests/resource_tests/conftest.py @@ -19,62 +19,14 @@ """ Module with global test fixtures. """ -from typing import List -import sqlite3 -import pytest - - -class Metrics: - """ - Metrics Class - """ - - # pylint:disable=too-few-public-methods - _TOTAL_TIME_MAX = 120 # second - _CPU_USAGE_MAX = 50 # percent - _MEM_USAGE_MAX = 1024 # megabyte - - def __init__(self, items: List) -> None: - self.test_name = items[0] - self.total_time = items[1] - self.cpu_usage = items[2] - self.mem_usage = items[3] +import pytest def pytest_addoption(parser): parser.addoption("--database", action="store", default=".pymon", required=False) -@pytest.fixture(name="database_path") -def database_path_fixture(request): - return request.config.getoption("--database") - - @pytest.fixture def output_result_path(): return "./tests/resource_tests/result" - - -@pytest.fixture(name="sqlite_select_query") -def sqlite_select_query_fixture(): - return """SELECT ITEM, TOTAL_TIME, CPU_USAGE, MEM_USAGE FROM TEST_METRICS""" - - -@pytest.fixture() -def read_sqlite_table(database_path, sqlite_select_query): - """ - Read sqlite table from pytest-monitoring - """ - data = [] - try: - sqlite_connection = sqlite3.connect(database_path) - cursor = sqlite_connection.cursor() - - cursor.execute(sqlite_select_query) - records = cursor.fetchall() - data = [Metrics(record) for record in records] - sqlite_connection.close() - except sqlite3.Error as error: - print("Failed to read data from sqlite table", error) - return data diff --git a/tests/resource_tests/test_example.py b/tests/resource_tests/test_example.py deleted file mode 100644 index dc48e4d..0000000 --- a/tests/resource_tests/test_example.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python -# -# Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). -# -# This file is part of PANDORA2D -# -# https://github.com/CNES/Pandora2D -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -""" -Test example for resources -""" - -import pytest -import pandora2d - - -@pytest.mark.monitor_test -class TestConeImage: - """Test pandora with cone image.""" - - # pylint:disable=too-few-public-methods - - def test_pandora(self, classic_config, output_result_path): - """ - Test run pandora with cone images - """ - pandora2d.main(classic_config, output_result_path, True) diff --git a/tests/resource_tests/test_metrics.py b/tests/resource_tests/test_metrics.py index 6008a28..37450d1 100644 --- a/tests/resource_tests/test_metrics.py +++ b/tests/resource_tests/test_metrics.py @@ -1,6 +1,3 @@ -# pylint: disable=protected-access -#!/usr/bin/env python -# # Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). # # This file is part of PANDORA2D @@ -23,9 +20,64 @@ """ This module is used to check the resources used for the tests in this directory. """ + +# pylint: disable=protected-access + +import sqlite3 +from typing import List + import pytest +class Metrics: + """ + Metrics Class + """ + + # pylint:disable=too-few-public-methods + + _TOTAL_TIME_MAX = 120 # second + _CPU_USAGE_MAX = 50 # percent + _MEM_USAGE_MAX = 1024 # megabyte + + def __init__(self, items: List) -> None: + self.test_name = items[0] + self.test_variant = items[1] + self.total_time = items[2] + self.cpu_usage = items[3] + self.mem_usage = items[4] + + +def read_sqlite_table(database_path, sqlite_select_query): + """ + Read sqlite table from pytest-monitoring + """ + data = [] + try: + sqlite_connection = sqlite3.connect(database_path) + cursor = sqlite_connection.cursor() + + cursor.execute(sqlite_select_query) + records = cursor.fetchall() + data = [Metrics(record) for record in records] + sqlite_connection.close() + except sqlite3.Error as error: + print("Failed to read data from sqlite table", error) + return data + + +# Define the pytest_generate_tests hook to generate test cases +def pytest_generate_tests(metafunc): + """Generate list of tests from pytest-monitoring database.""" + query = "SELECT ITEM, ITEM_VARIANT, TOTAL_TIME, CPU_USAGE, MEM_USAGE FROM TEST_METRICS" + marks = [mark.name for mark in metafunc.cls.pytestmark] + if "metrics" in marks: + metrics = read_sqlite_table(metafunc.config.option.database, query) + if metrics: + # Generate test cases based on the metrics list + metafunc.parametrize("metric", metrics, ids=lambda x: x.test_variant) + + @pytest.mark.metrics @pytest.mark.monitor_skip_test class TestResource: @@ -33,29 +85,26 @@ class TestResource: Test all tests are ok for CPU/MEM and time rule """ - def test_total_time(self, read_sqlite_table): + def test_total_time(self, metric): """ Verify the time metrics for the test """ - for metric in read_sqlite_table: - assert ( - metric.total_time < metric._TOTAL_TIME_MAX - ), f"Test {metric.test_name} does not respect max time : {metric._TOTAL_TIME_MAX} (seconds)" + assert ( + metric.total_time < metric._TOTAL_TIME_MAX + ), f"Test {metric.test_variant} does not respect max time : {metric._TOTAL_TIME_MAX} (seconds)" - def test_cpu_usage(self, read_sqlite_table): + def test_cpu_usage(self, metric): """ Verify the cpu metrics for the test """ - for metric in read_sqlite_table: - assert ( - metric.cpu_usage < metric._CPU_USAGE_MAX - ), f"Test {metric.test_name} does not cpu usage max : {metric._CPU_USAGE_MAX} (%)" + assert ( + metric.cpu_usage < metric._CPU_USAGE_MAX + ), f"Test {metric.test_variant} does not cpu usage max : {metric._CPU_USAGE_MAX} (%)" - def test_mem_usage(self, read_sqlite_table): + def test_mem_usage(self, metric): """ Verify the memory metrics for the test """ - for metric in read_sqlite_table: - assert ( - metric.mem_usage < metric._MEM_USAGE_MAX - ), f"Test {metric.test_name} does not respect memory usage max : {metric._MEM_USAGE_MAX} (megabyte)" + assert ( + metric.mem_usage < metric._MEM_USAGE_MAX + ), f"Test {metric.test_variant} does not respect memory usage max : {metric._MEM_USAGE_MAX} (megabyte)" diff --git a/tests/resource_tests/test_pandora2d.py b/tests/resource_tests/test_pandora2d.py new file mode 100644 index 0000000..5837b6b --- /dev/null +++ b/tests/resource_tests/test_pandora2d.py @@ -0,0 +1,203 @@ +# Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). +# +# This file is part of PANDORA2D +# +# https://github.com/CNES/Pandora2D +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +""" +Test used resources during execution of a configuration. +""" + +# pylint: disable=redefined-outer-name + +import numpy as np +import pytest +from PIL import Image + +# Mark all test of the module with monitor_test +pytestmark = pytest.mark.monitor_test + +subpix_list = [1, 2, 4] +matching_cost_methods = ["zncc", "sad", "ssd"] +iteration_list = [1, 4, 9] + + +def reduce_image(input_path, output_path): + data = np.asarray(Image.open(input_path)) + half_row, half_col = data.shape[0] // 2, data.shape[1] // 2 + image = Image.fromarray(data[half_row - 25 : half_row + 25, half_col - 25 : half_col + 25]) + image.save(output_path, "png") + + +@pytest.fixture() +def left_img_path(tmp_path, left_img_path): + path = tmp_path / "left.png" + reduce_image(left_img_path, path) + return str(path) + + +@pytest.fixture() +def right_img_path(tmp_path, right_img_path): + path = tmp_path / "right.png" + reduce_image(right_img_path, path) + return str(path) + + +def test_estimation(run_pipeline, correct_input_cfg): + """Test a configuration with only an estimation in the pipeline.""" + configuration = { + **correct_input_cfg, + "pipeline": { + "estimation": {"estimation_method": "phase_cross_correlation"}, + }, + } + run_pipeline(configuration) + + +@pytest.mark.parametrize("subpix", subpix_list) +@pytest.mark.parametrize("matching_cost_method", matching_cost_methods) +def test_matching_cost_with_disparity(run_pipeline, correct_input_cfg, matching_cost_method, subpix): + """Test pipeline with a matching_cost and a disparity steps.""" + configuration = { + **correct_input_cfg, + "pipeline": { + "matching_cost": {"matching_cost_method": matching_cost_method, "subpix": subpix}, + "disparity": {"disparity_method": "wta", "invalid_disparity": -99}, + }, + } + run_pipeline(configuration) + + +@pytest.mark.parametrize("subpix", subpix_list) +@pytest.mark.parametrize("matching_cost_method", matching_cost_methods) +def test_matching_cost_with_estimation_and_disparity(run_pipeline, correct_input_cfg, matching_cost_method, subpix): + """Test pipeline with an estimation, a matching_cost and a disparity steps.""" + configuration = { + **correct_input_cfg, + "pipeline": { + "estimation": {"estimation_method": "phase_cross_correlation"}, + "matching_cost": {"matching_cost_method": matching_cost_method, "subpix": subpix}, + "disparity": {"disparity_method": "wta", "invalid_disparity": -99}, + }, + } + run_pipeline(configuration) + + +@pytest.mark.parametrize("subpix", subpix_list) +@pytest.mark.parametrize("matching_cost_method", matching_cost_methods) +class TestRefinement: + """Test pipelines which include a refinement step.""" + + @pytest.fixture() + def interpolation_pipeline(self, matching_cost_method, subpix): + """Pipeline for an interpolation refinement.""" + return { + "matching_cost": {"matching_cost_method": matching_cost_method, "subpix": subpix}, + "disparity": {"disparity_method": "wta", "invalid_disparity": -99}, + "refinement": {"refinement_method": "interpolation"}, + } + + @pytest.fixture() + def dichotomy_pipeline(self, matching_cost_method, subpix, iterations, filter_method): + """Pipeline for a dichotomy refinement.""" + return { + "matching_cost": {"matching_cost_method": matching_cost_method, "subpix": subpix}, + "disparity": {"disparity_method": "wta", "invalid_disparity": -99}, + "refinement": { + "refinement_method": "dichotomy", + "iterations": iterations, + "filter": {"method": filter_method}, + }, + } + + @pytest.fixture() + def optical_flow_pipeline(self, matching_cost_method, subpix, iterations): + """Pipeline for an optical flow refinement.""" + return { + "matching_cost": {"matching_cost_method": matching_cost_method, "subpix": subpix}, + "disparity": {"disparity_method": "wta", "invalid_disparity": -99}, + "refinement": { + "refinement_method": "optical_flow", + "iterations": iterations, + }, + } + + def test_interpolation(self, run_pipeline, correct_input_cfg, interpolation_pipeline): + """Test interpolation.""" + configuration = { + **correct_input_cfg, + "pipeline": {**interpolation_pipeline}, + } + run_pipeline(configuration) + + def test_interpolation_with_estimation(self, run_pipeline, correct_input_cfg, interpolation_pipeline): + """Test interpolation with estimation.""" + configuration = { + **correct_input_cfg, + "pipeline": { + "estimation": {"estimation_method": "phase_cross_correlation"}, + **interpolation_pipeline, + }, + } + run_pipeline(configuration) + + @pytest.mark.parametrize("iterations", iteration_list) + @pytest.mark.parametrize("filter_method", ["sinc", "bicubic"]) + def test_dichotomy(self, run_pipeline, correct_input_cfg, dichotomy_pipeline): + """Test dichotomy.""" + configuration = { + **correct_input_cfg, + "pipeline": { + **dichotomy_pipeline, + }, + } + run_pipeline(configuration) + + @pytest.mark.parametrize("iterations", iteration_list) + @pytest.mark.parametrize("filter_method", ["sinc", "bicubic"]) + def test_dichotomy_with_estimation(self, run_pipeline, correct_input_cfg, dichotomy_pipeline): + """Test dichotomy with estimation.""" + configuration = { + **correct_input_cfg, + "pipeline": { + "estimation": {"estimation_method": "phase_cross_correlation"}, + **dichotomy_pipeline, + }, + } + run_pipeline(configuration) + + @pytest.mark.parametrize("iterations", iteration_list) + def test_optical_flows(self, run_pipeline, correct_input_cfg, optical_flow_pipeline): + """Test optical flows.""" + configuration = { + **correct_input_cfg, + "pipeline": { + **optical_flow_pipeline, + }, + } + run_pipeline(configuration) + + @pytest.mark.parametrize("iterations", iteration_list) + def test_optical_flows_with_estimation(self, run_pipeline, correct_input_cfg, optical_flow_pipeline): + """Test optical flows with estimation.""" + configuration = { + **correct_input_cfg, + "pipeline": { + "estimation": {"estimation_method": "phase_cross_correlation"}, + **optical_flow_pipeline, + }, + } + run_pipeline(configuration) diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py index 76f0906..18f39d8 100644 --- a/tests/unit_tests/conftest.py +++ b/tests/unit_tests/conftest.py @@ -27,6 +27,7 @@ from skimage.io import imsave from pandora2d import Pandora2DMachine +from pandora2d.img_tools import add_disparity_grid @pytest.fixture() @@ -75,8 +76,8 @@ def correct_input_cfg(left_img_path, right_img_path): "nodata": -9999, }, "right": {"img": right_img_path, "nodata": -9999}, - "col_disparity": [-2, 2], - "row_disparity": [-2, 2], + "col_disparity": {"init": 1, "range": 2}, + "row_disparity": {"init": 1, "range": 2}, } } @@ -92,24 +93,8 @@ def false_input_path_image(right_img_path): "right": { "img": right_img_path, }, - "col_disparity": [-2, 2], - "row_disparity": [-2, 2], - } - } - - -@pytest.fixture -def false_input_disp(left_img_path, right_img_path): - return { - "input": { - "left": { - "img": left_img_path, - }, - "right": { - "img": right_img_path, - }, - "col_disparity": [7, 2], - "row_disparity": [-2, 2], + "col_disparity": {"init": 1, "range": 2}, + "row_disparity": {"init": 1, "range": 2}, } } @@ -194,12 +179,10 @@ def left_stereo_object(): "valid_pixels": 0, "no_data_mask": 1, "crs": None, - "col_disparity_source": [0, 1], - "row_disparity_source": [-1, 0], "transform": Affine(1.0, 0.0, 0.0, 0.0, 1.0, 0.0), } - return left + return left.pipe(add_disparity_grid, {"init": 1, "range": 1}, {"init": -1, "range": 1}) @pytest.fixture() @@ -249,8 +232,6 @@ def stereo_object_with_args(): "valid_pixels": 0, "no_data_mask": 1, "crs": None, - "col_disparity_source": [0, 1], - "row_disparity_source": [-1, 0], } data = np.array( @@ -271,4 +252,76 @@ def stereo_object_with_args(): "crs": None, } - return left_arg, right_arg + return ( + left_arg.pipe( + add_disparity_grid, + {"init": 1, "range": 1}, + {"init": -1, "range": 1}, + ), + right_arg, + ) + + +@pytest.fixture +def incorrect_disp_dict(): + """Create an incorrect disparity dictionary""" + return {"init": -460, "range": 3} + + +@pytest.fixture +def out_of_image_grid(left_img_shape, create_disparity_grid_fixture): + """ + Create an initial disparity grid with a point that has its disparity interval outside the image + and save it in tmp + """ + + height, width = left_img_shape + + init_band = np.random.randint(-3, 4, size=(height, width)) + init_band[0, 0] = -455 + + return create_disparity_grid_fixture(init_band, 2, "out_of_image_disparity.tif") + + +@pytest.fixture +def negative_exploration_grid(left_img_shape, create_disparity_grid_fixture): + """ + Create an initial disparity grid with a point that has a negative exploration value + and save it in tmp + """ + + height, width = left_img_shape + + init_band = np.random.randint(-3, 4, size=(height, width)) + + return create_disparity_grid_fixture(init_band, -2, "negative_exploration_disparity.tif") + + +@pytest.fixture +def two_bands_grid(left_img_shape, create_disparity_grid_fixture): + """ + Create an initial disparity grid with two bands and save it in tmp + """ + + height, width = left_img_shape + + first_band = np.random.randint(-3, 4, size=(height, width)) + second_band = np.random.randint(0, 6, size=(height, width)) + + data = np.stack([first_band, second_band], axis=-1) + + return create_disparity_grid_fixture(data, 2, "two_bands_disparity.tif", True) + + +@pytest.fixture +def wrong_size_grid(left_img_shape, create_disparity_grid_fixture): + """ + Create an initial disparity grid of wrong size + and save it in tmp + """ + + height, width = left_img_shape + + init_band = np.random.randint(-3, 4, size=(height - 2, width + 4)) + + return create_disparity_grid_fixture(init_band, 3, "wrong_size_disparity.tif") diff --git a/tests/unit_tests/test_check_configuration.py b/tests/unit_tests/test_check_configuration.py index 02276da..2c627da 100644 --- a/tests/unit_tests/test_check_configuration.py +++ b/tests/unit_tests/test_check_configuration.py @@ -26,7 +26,6 @@ import random import string - import pytest import transitions import numpy as np @@ -34,6 +33,7 @@ from json_checker import DictCheckerError, MissKeyCheckerError from skimage.io import imsave +from pandora.img_tools import get_metadata from pandora2d.img_tools import create_datasets_from_inputs, add_disparity_grid from pandora2d import check_configuration @@ -47,8 +47,8 @@ def datasets(self, left_img_path, right_img_path): input_cfg = { "left": {"img": left_img_path, "nodata": -9999}, "right": {"img": right_img_path, "nodata": -9999}, - "col_disparity": [-2, 2], - "row_disparity": [-3, 3], + "col_disparity": {"init": 1, "range": 2}, + "row_disparity": {"init": 1, "range": 3}, } return create_datasets_from_inputs(input_cfg) @@ -61,7 +61,9 @@ def test_nominal(self, datasets): def test_fails_with_wrong_dimension(self): """ - Test with wrong image shapes + Description : Test with wrong image shapes + Data : + Requirement : EX_CONF_11 """ data_left = np.full((3, 3), 2) data_right = np.full((4, 4), 2) @@ -78,13 +80,13 @@ def test_fails_with_wrong_dimension(self): {"im": (["row", "col"], data_left)}, coords={"row": np.arange(data_left.shape[0]), "col": np.arange(data_left.shape[1])}, attrs=attributs, - ).pipe(add_disparity_grid, [0, 1], [-1, 0]) + ).pipe(add_disparity_grid, {"init": -1, "range": 2}, {"init": -1, "range": 3}) dataset_right = xr.Dataset( {"im": (["row", "col"], data_right)}, coords={"row": np.arange(data_right.shape[0]), "col": np.arange(data_right.shape[1])}, attrs=attributs, - ).pipe(add_disparity_grid, [-2, 2], [-3, 3]) + ).pipe(add_disparity_grid, {"init": 1, "range": 2}, {"init": 1, "range": 3}) with pytest.raises(ValueError) as exc_info: check_configuration.check_datasets(dataset_left, dataset_right) @@ -100,7 +102,11 @@ def test_fails_with_wrong_dimension(self): ) def test_fails_without_disparity(self, datasets, col_disparity, row_disparity): """ - Test with missing disparities + Description : Test with missing disparities + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_08 """ dataset_left, dataset_right = datasets if col_disparity: @@ -126,19 +132,31 @@ def test_check_nominal_case(self, correct_input_cfg) -> None: assert check_configuration.check_input_section(correct_input_cfg) def test_fails_if_input_section_is_missing(self): + """ + Description : Test if input section is missing in the configuration file + Data : + Requirement : EX_CONF_01 + """ with pytest.raises(KeyError, match="input key is missing"): check_configuration.check_input_section({}) - def test_false_input_disp_should_exit(self, false_input_disp): - with pytest.raises(ValueError, match="disp_max must be bigger than disp_min"): - check_configuration.check_input_section(false_input_disp) - def test_false_input_path_image_should_raise_error(self, false_input_path_image): + """ + Description : Test raises an error if the image path isn't correct + Data : cones/monoband/right.png + Requirement : EX_CONF_09 + """ with pytest.raises(DictCheckerError): check_configuration.check_input_section(false_input_path_image) def test_fails_with_images_of_different_sizes(self, correct_input_cfg, make_empty_image): - """Images must have the same shape and size.""" + """ + Description : Images must have the same shape and size. + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_11 + """ correct_input_cfg["input"]["left"]["img"] = str(make_empty_image("left.tiff")) correct_input_cfg["input"]["right"]["img"] = str(make_empty_image("right.tiff", shape=(50, 50))) @@ -146,7 +164,13 @@ def test_fails_with_images_of_different_sizes(self, correct_input_cfg, make_empt check_configuration.check_input_section(correct_input_cfg) def test_default_nodata(self, correct_input_cfg): - """Default nodata value shoud be -9999.""" + """ + Description : Default nodata value shoud be -9999. + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_04 + """ del correct_input_cfg["input"]["left"]["nodata"] result = check_configuration.check_input_section(correct_input_cfg) @@ -178,12 +202,19 @@ class TestCheckPipelineSection: """Test check_pipeline_section.""" def test_fails_if_pipeline_section_is_missing(self, pandora2d_machine) -> None: + """ + Description : Test if the pipeline section is missing in the configuration file + Data : + Requirement : EX_CONF_02 + """ with pytest.raises(KeyError, match="pipeline key is missing"): assert check_configuration.check_pipeline_section({}, pandora2d_machine) def test_nominal_case(self, pandora2d_machine, correct_pipeline) -> None: """ - Test function for checking user pipeline section + Description : Test function for checking user pipeline section + Data : + Requirement : EX_REF_00 """ assert check_configuration.check_pipeline_section(correct_pipeline, pandora2d_machine) @@ -204,7 +235,11 @@ def test_false_disp_dict_should_raise_error(self, pandora2d_machine, false_pipel ], ) def test_wrong_order_should_raise_error(self, pandora2d_machine, step_order): - """Pipeline section order is important.""" + """ + Description : Pipeline section order is important. + Data : + Requirement : EX_CONF_07 + """ steps = { "estimation": {"estimated_shifts": [-0.5, 1.3], "error": [1.0], "phase_diff": [1.0]}, "matching_cost": {"matching_cost_method": "zncc", "window_size": 5}, @@ -217,7 +252,11 @@ def test_wrong_order_should_raise_error(self, pandora2d_machine, step_order): def test_multiband_pipeline(self, pandora2d_machine, left_rgb_path, right_rgb_path): """ - Test the method check_conf for multiband images + Description : Test the method check_conf for multiband images + Data : + - Left image : cones/multibands/left.tif + - Right image : cones/multibands/right.tif + Requirement : EX_CONF_12 """ input_multiband_cfg = { "left": { @@ -226,8 +265,8 @@ def test_multiband_pipeline(self, pandora2d_machine, left_rgb_path, right_rgb_pa "right": { "img": right_rgb_path, }, - "col_disparity": [-60, 0], - "row_disparity": [-60, 0], + "col_disparity": {"init": -30, "range": 30}, + "row_disparity": {"init": -30, "range": 30}, } cfg = { "input": input_multiband_cfg, @@ -240,46 +279,35 @@ def test_multiband_pipeline(self, pandora2d_machine, left_rgb_path, right_rgb_pa check_configuration.check_conf(cfg, pandora2d_machine) -class TestCheckConf: +class TestCheckConf: # pylint: disable=too-few-public-methods """Test check_conf method.""" def test_passes_with_good_disparity_range_and_interpolation_step( self, correct_input_cfg, correct_pipeline, pandora2d_machine ): """ - Test col_disparity & row_disparity range (=5) with interpolation step in user configuration + Description : Test col_disparity & row_disparity range (=5) with interpolation step in user configuration + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_ROI_05 """ user_cfg = {**correct_input_cfg, **correct_pipeline} check_configuration.check_conf(user_cfg, pandora2d_machine) - @pytest.mark.parametrize( - ["col_disparity", "row_disparity"], - [ - pytest.param([0, 2], [-2, 2], id="col_disparity range too small"), - pytest.param([-2, 2], [1, 4], id="row_disparity range too small"), - pytest.param([0, 2], [1, 4], id="col_disparity & row_disparity range too small"), - ], - ) - def test_fails_with_wrong_disparity_range_and_interpolation_step( - self, correct_input_cfg, correct_pipeline, pandora2d_machine, col_disparity, row_disparity - ): - """ - Test wrong col_disparity & row_disparity range with interpolation step in user configuration - """ - correct_input_cfg["input"]["col_disparity"] = col_disparity - correct_input_cfg["input"]["row_disparity"] = row_disparity - user_cfg = {**correct_input_cfg, **correct_pipeline} - with pytest.raises(ValueError) as err: - check_configuration.check_conf(user_cfg, pandora2d_machine) - assert ( - "disparity range with a size < 5 are not allowed with interpolation refinement method" in err.value.args[0] - ) - class TestCheckRoiSection: - """Test check_roi_section.""" + """ + Description : Test check_roi_section. + Requirement : EX_ROI_04 + """ def test_expect_roi_section(self): + """ + Description : Test if ROI section is missing + Data : + Requirement : EX_ROI_05 + """ with pytest.raises(MissKeyCheckerError, match="ROI"): check_configuration.check_roi_section({"input": {}}) @@ -291,10 +319,20 @@ def test_nominal_case(self, correct_roi_sensor) -> None: assert check_configuration.check_roi_section(correct_roi_sensor) def test_dimension_lt_0_raises_exception(self, false_roi_sensor_negative): + """ + Description : Raises an exception if the ROI dimensions are lower than 0 + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(BaseException): check_configuration.check_roi_section(false_roi_sensor_negative) def test_first_dimension_gt_last_dimension_raises_exception(self, false_roi_sensor_first_superior_to_last): + """ + Description : Test if the first dimension of the ROI is greater than the last one + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(BaseException): check_configuration.check_roi_section(false_roi_sensor_first_superior_to_last) @@ -345,19 +383,30 @@ def test_get_roi_pipeline( class TestCheckRoiCoherence: - """Test check_roi_coherence.""" + """ + Description : Test check_roi_coherence. + Requirement : EX_ROI_04 + """ def test_first_lt_last_is_ok(self, correct_roi_sensor) -> None: check_configuration.check_roi_coherence(correct_roi_sensor["ROI"]["col"]) def test_first_gt_last_raises_error(self, false_roi_sensor_first_superior_to_last): + """ + Description : Test if 'first' is greater than 'last' in ROI + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(ValueError) as exc_info: check_configuration.check_roi_coherence(false_roi_sensor_first_superior_to_last["ROI"]["col"]) assert str(exc_info.value) == 'In ROI "first" should be lower than "last" in sensor ROI' class TestCheckStep: - """Test check_step.""" + """ + Description : Test check_step. + Requirement : EX_STEP_02 + """ def test_nominal_case(self, pipeline_config, pandora2d_machine) -> None: """ @@ -378,7 +427,11 @@ def test_nominal_case(self, pipeline_config, pandora2d_machine) -> None: ], ) def test_fails_with_bad_step_values(self, pipeline_config, pandora2d_machine, step) -> None: - """Test check_pipeline_section fails with bad values of step.""" + """ + Description : Test check_pipeline_section fails with bad values of step. + Data : + Requirement : EX_CONF_08 + """ pipeline_config["pipeline"]["matching_cost"]["step"] = step with pytest.raises(DictCheckerError): check_configuration.check_pipeline_section(pipeline_config, pandora2d_machine) @@ -399,8 +452,8 @@ def configuration(self, right_nodata, matching_cost_method, left_img_path, right "img": right_img_path, "nodata": right_nodata, }, - "col_disparity": [-2, 2], - "row_disparity": [-2, 2], + "col_disparity": {"init": 1, "range": 2}, + "row_disparity": {"init": 1, "range": 2}, }, "pipeline": { "matching_cost": {"matching_cost_method": matching_cost_method, "window_size": 1}, @@ -410,7 +463,13 @@ def configuration(self, right_nodata, matching_cost_method, left_img_path, right @pytest.mark.parametrize("right_nodata", ["NaN", 0.1, "inf", None]) @pytest.mark.parametrize("matching_cost_method", ["sad", "ssd"]) def test_sad_or_ssd_fail_with(self, pandora2d_machine, configuration): - """Right nodata must be an integer with sad or ssd matching_cost_method.""" + """ + Description : Right nodata must be an integer with sad or ssd matching_cost_method. + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_08 + """ with pytest.raises((ValueError, DictCheckerError)): check_configuration.check_conf(configuration, pandora2d_machine) @@ -429,65 +488,17 @@ def test_zncc_passes_with(self, pandora2d_machine, configuration): @pytest.mark.parametrize("right_nodata", [0.2, None]) @pytest.mark.parametrize("matching_cost_method", ["zncc"]) def test_zncc_fails_with(self, pandora2d_machine, configuration): - """Right nodata must can not be float or nan with zncc matching_cost_method.""" + """ + Description : Right nodata can not be float or nan with zncc matching_cost_method. + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_08 + """ with pytest.raises((ValueError, DictCheckerError)): check_configuration.check_conf(configuration, pandora2d_machine) -class TestCheckDisparityRangeSize: - """Test check_disparity_range_size method.""" - - @pytest.mark.parametrize( - ["disparity", "title", "string_match"], - [ - pytest.param( - [-1, 1], - "Column", - "Column disparity range with a size < 5 are not allowed with interpolation refinement method", - id="Column disparity range < 5", - ), - pytest.param( - [-3, -1], - "Row", - "Row disparity range with a size < 5 are not allowed with interpolation refinement method", - id="Row disparity range < 5", - ), - ], - ) - def test_fails_with_disparity_ranges_lower_5(self, disparity, title, string_match): - """Disparity range size must be greater than or equal to 5 when interpolation is used as refinement method""" - with pytest.raises(ValueError, match=string_match): - check_configuration.check_disparity_range_size(disparity, title) - - @pytest.mark.parametrize( - ["disparity", "title", "string_match"], - [ - pytest.param( - "disparity_grid_test", - "Column", - "Grid disparities are not yet handled by Pandora2D", - id="Grid disparity", - ), - ], - ) - def test_fails_with_grid_disparity(self, disparity, title, string_match): - """Disparity grid is not handled yet by Pandora2D""" - with pytest.raises(TypeError, match=string_match): - check_configuration.check_disparity_range_size(disparity, title) - - @pytest.mark.parametrize( - ["disparity", "title"], - [ - pytest.param([-2, 2], "Col", id="Column disparity range greater than or equal to 5"), - pytest.param([1, 5], "Row", id="Row disparity range greater than or equal to 5"), - ], - ) - def test_passes_with_disparity_ranges_equal_5(self, disparity, title): - """Disparity range size is correct""" - - check_configuration.check_disparity_range_size(disparity, title) - - class TestDisparityRangeAgainstImageSize: """Test that out of image disparity ranges are not allowed.""" @@ -499,11 +510,11 @@ def image_path(self, tmp_path): @pytest.fixture() def row_disparity(self): - return [-4, 1] + return {"init": -2, "range": 2} @pytest.fixture() def col_disparity(self): - return [-3, 2] + return {"init": -1, "range": 2} @pytest.fixture() def configuration(self, image_path, row_disparity, col_disparity): @@ -528,34 +539,42 @@ def configuration(self, image_path, row_disparity, col_disparity): @pytest.mark.parametrize( "row_disparity", [ - pytest.param([-460, -451], id="Out on left"), - pytest.param([451, 460], id="Out on right"), + pytest.param({"init": -456, "range": 5}, id="Out on left"), + pytest.param({"init": 456, "range": 5}, id="Out on right"), ], ) def test_row_disparity_totally_out(self, pandora2d_machine, configuration): - """Totally out disparities should raise an error.""" + """ + Description : Totally out disparities should raise an error. + Data : tmp_path / "tiff_file.tif" + Requirement : EX_CONF_08 + """ with pytest.raises(ValueError, match="Row disparity range out of image"): check_configuration.check_conf(configuration, pandora2d_machine) @pytest.mark.parametrize( "col_disparity", [ - pytest.param([-460, -451], id="Out on top"), - pytest.param([451, 460], id="Out on bottom"), + pytest.param({"init": -456, "range": 5}, id="Out on top"), + pytest.param({"init": 456, "range": 5}, id="Out on bottom"), ], ) def test_column_disparity_totally_out(self, pandora2d_machine, configuration): - """Totally out disparities should raise an error.""" + """ + Description : Totally out disparities should raise an error. + Data : tmp_path / "tiff_file.tif" + Requirement : EX_CONF_08 + """ with pytest.raises(ValueError, match="Column disparity range out of image"): check_configuration.check_conf(configuration, pandora2d_machine) @pytest.mark.parametrize( ["row_disparity", "col_disparity"], [ - pytest.param([-460, -450], [100, 200], id="Partially out on left"), - pytest.param([450, 460], [100, 200], id="Partially out on right"), - pytest.param([100, 200], [-460, -450], id="Partially out on top"), - pytest.param([100, 200], [450, 460], id="Partially out on bottom"), + pytest.param({"init": -455, "range": 5}, {"init": 150, "range": 50}, id="Partially out on left"), + pytest.param({"init": 455, "range": 5}, {"init": 150, "range": 50}, id="Partially out on right"), + pytest.param({"init": 150, "range": 50}, {"init": -455, "range": 5}, id="Partially out on top"), + pytest.param({"init": 150, "range": 50}, {"init": 455, "range": 5}, id="Partially out on bottom"), ], ) def test_disparity_partially_out(self, pandora2d_machine, configuration): @@ -563,6 +582,93 @@ def test_disparity_partially_out(self, pandora2d_machine, configuration): check_configuration.check_conf(configuration, pandora2d_machine) +class TestCheckDisparity: + """ + Test check_disparity method + """ + + @pytest.mark.parametrize( + ["make_input_cfg"], + [ + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "second_correct_grid"}, + id="Correct disparity with variable initial value", + ), + pytest.param( + {"row_disparity": "constant_initial_disparity", "col_disparity": "second_constant_initial_disparity"}, + id="Correct disparity with constant initial value", + ), + ], + indirect=["make_input_cfg"], + ) + def test_passes_check_disparity(self, left_img_path, make_input_cfg): + """ + Test check_disparity method with correct input disparities + """ + + image_metadata = get_metadata(left_img_path) + + check_configuration.check_disparity(image_metadata, make_input_cfg) + + @pytest.mark.parametrize( + ["make_input_cfg", "error_type", "error_message"], + [ + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "left_img_shape"}, + AttributeError, + "The disparities in rows and columns must be given as 2 dictionaries", + id="Col disparity is not a dictionary", + ), + pytest.param( + {"row_disparity": "left_img_shape", "col_disparity": "correct_grid"}, + AttributeError, + "The disparities in rows and columns must be given as 2 dictionaries", + id="Row disparity is not a dictionary", + ), + pytest.param( + {"row_disparity": "constant_initial_disparity", "col_disparity": "correct_grid"}, + ValueError, + "Initial columns and row disparity values must be two strings or two integers", + id="Initial value is different for columns and rows disparity", + ), + pytest.param( + {"row_disparity": "out_of_image_grid", "col_disparity": "second_correct_grid"}, + ValueError, + "Row disparity range out of image", + id="Row disparity grid out of image for one point", + ), + pytest.param( + {"row_disparity": "constant_initial_disparity", "col_disparity": "incorrect_disp_dict"}, + ValueError, + "Column disparity range out of image", + id="Column disparity dict out of image for one point", + ), + pytest.param( + {"row_disparity": "two_bands_grid", "col_disparity": "correct_grid"}, + AttributeError, + "Initial disparity grid must be a 1-channel grid", + id="Row disparity grid has two band", + ), + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "wrong_size_grid"}, + AttributeError, + "Initial disparity grids and image must have the same size", + id="Column disparity grid size is different from image size", + ), + ], + indirect=["make_input_cfg"], + ) + def test_fails_check_disparity(self, left_img_path, make_input_cfg, error_type, error_message): + """ + Test check_disparity method with incorrect input disparities + """ + + image_metadata = get_metadata(left_img_path) + + with pytest.raises(error_type, match=error_message): + check_configuration.check_disparity(image_metadata, make_input_cfg) + + @pytest.mark.parametrize( "extra_section_name", [ @@ -571,7 +677,49 @@ def test_disparity_partially_out(self, pandora2d_machine, configuration): ], ) def test_extra_section_is_allowed(correct_input_cfg, correct_pipeline, pandora2d_machine, extra_section_name): - """Should not raise an error if an extra section is added.""" + """ + Description : Should not raise an error if an extra section is added. + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_05 + """ configuration = {**correct_input_cfg, **correct_pipeline, extra_section_name: {}} check_configuration.check_conf(configuration, pandora2d_machine) + + +class TestExpertModeSection: + """ + Description : Test expert_mode_section. + """ + + def test_expert_mode_section_missing_profile_parameter(self): + """ + Description : Test if profiling section is missing + Data : + Requirement : + """ + + with pytest.raises(MissKeyCheckerError) as exc_info: + check_configuration.check_expert_mode_section({"expert_mode": {}}) + assert str(exc_info.value) == "Please be sure to set the profiling dictionary" + + @pytest.mark.parametrize( + ["parameter", "wrong_value_parameter"], + [ + pytest.param("folder_name", 12, id="error folder name with an int"), + pytest.param("folder_name", ["folder_name"], id="error folder name with a list"), + pytest.param("folder_name", {"folder_name": "expert_mode"}, id="error folder name with a dict"), + pytest.param("folder_name", 12.0, id="error folder name with a float"), + ], + ) + def test_configuration_expert_mode(self, parameter, wrong_value_parameter): + """ + Description : Test if wrong parameters are detected + Data : + Requirement : + """ + with pytest.raises(DictCheckerError) as err: + check_configuration.check_expert_mode_section({"profiling": {parameter: wrong_value_parameter}}) + assert "folder_name" in err.value.args[0] diff --git a/tests/unit_tests/test_common.py b/tests/unit_tests/test_common.py index 0f3f5d9..9acab5d 100644 --- a/tests/unit_tests/test_common.py +++ b/tests/unit_tests/test_common.py @@ -39,6 +39,7 @@ from pandora2d.img_tools import create_datasets_from_inputs from pandora2d import matching_cost, disparity, refinement from pandora2d.state_machine import Pandora2DMachine +from pandora2d.constants import Criteria class TestSaveDataset: @@ -278,8 +279,8 @@ def test_dataset_disp_maps_with_pipeline_computation(self, roi, step, left_image "img": right_image, "nodata": -9999, }, - "col_disparity": [0, 4], - "row_disparity": [-2, 2], + "col_disparity": {"init": 2, "range": 2}, + "row_disparity": {"init": 1, "range": 2}, } img_left, img_right = create_datasets_from_inputs(input_cfg, roi=roi) @@ -301,20 +302,11 @@ def test_dataset_disp_maps_with_pipeline_computation(self, roi, step, left_image matching_cost_matcher.allocate_cost_volume_pandora( img_left=img_left, img_right=img_right, - grid_min_col=np.full((3, 3), 0), - grid_max_col=np.full((3, 3), 4), cfg=cfg, ) # compute cost volumes - cvs = matching_cost_matcher.compute_cost_volumes( - img_left=img_left, - img_right=img_right, - grid_min_col=np.full((3, 3), 0), - grid_max_col=np.full((3, 3), 4), - grid_min_row=np.full((3, 3), -2), - grid_max_row=np.full((3, 3), 2), - ) + cvs = matching_cost_matcher.compute_cost_volumes(img_left=img_left, img_right=img_right) cfg_disp = {"disparity_method": "wta", "invalid_disparity": -9999} disparity_matcher = disparity.Disparity(cfg_disp) @@ -347,7 +339,6 @@ def test_disparity_map_output_georef(correct_pipeline, correct_input_cfg): """ Test outputs georef with crs and transform """ - img_left, img_right = create_datasets_from_inputs(input_config=correct_input_cfg["input"]) # Stock crs and transform information from input @@ -366,3 +357,258 @@ def test_disparity_map_output_georef(correct_pipeline, correct_input_cfg): assert "EPSG:32632" == dataset.attrs["crs"] assert Affine(25.94, 0.00, -5278429.43, 0.00, -25.94, 14278941.03) == dataset.attrs["transform"] + + +class TestSetOutOfDisparity: + """Test effect of disparity grids.""" + + @pytest.fixture() + def disp_coords(self): + return "disp_row" + + @pytest.fixture() + def init_value(self): + return 0.0 + + @pytest.fixture() + def range_col(self): + return np.arange(4) + + @pytest.fixture() + def range_row(self): + return np.arange(5) + + @pytest.fixture() + def disp_range_col(self): + return np.arange(2, 2 + 7) + + @pytest.fixture() + def disp_range_row(self): + return np.arange(-5, -5 + 6) + + @pytest.fixture() + def dataset(self, range_row, range_col, disp_range_col, disp_range_row, init_value, disp_coords): + """make a xarray dataset and disparity grids""" + xarray = xr.DataArray( + np.full((5, 4, 7, 6), init_value), + coords={ + "row": range_row, + "col": range_col, + "disp_col": disp_range_col, + "disp_row": disp_range_row, + }, + dims=["row", "col", "disp_col", "disp_row"], + ) + + xarray.attrs = {"col_disparity_source": [2, 8], "row_disparity_source": [-5, 0]} + min_disp_grid = np.full((xarray.sizes["row"], xarray.sizes["col"]), xarray.coords[disp_coords].data[0]) + max_disp_grid = np.full((xarray.sizes["row"], xarray.sizes["col"]), xarray.coords[disp_coords].data[-1]) + return xarray, min_disp_grid, max_disp_grid + + @pytest.mark.parametrize( + ["init_value", "value"], + [ + [0.0, np.nan], + [0.0, 1], + [0.0, -1], + [0.0, np.inf], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED], + ], + ) + def test_homogeneous_row_grids(self, dataset, value): + """With grids set to extreme disparities, cost_volumes should be left untouched.""" + # As set_out_of_row_disparity_range_to_other_value modify cost_volumes in place we do a copy to be able + # to make the comparison later. + array, min_disp_grid, max_disp_grid = dataset + make_array_copy = array.copy(deep=True) + common.set_out_of_row_disparity_range_to_other_value(array, min_disp_grid, max_disp_grid, value) + + xr.testing.assert_equal(array, make_array_copy) + + @pytest.mark.parametrize( + ["init_value", "value"], + [ + [0.0, np.nan], + [0.0, 10], + [0.0, -10], + [0.0, np.inf], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED], + ], + ) + @pytest.mark.parametrize("disp_coords", ["disp_col"]) + def test_homogeneous_col_grids(self, dataset, value): + """With grids set to extreme disparities, cost_volumes should be left untouched.""" + # As set_out_of_col_disparity_range_to_other_value modify cost_volumes in place we do a copy to be able + # to make the comparison later. + array, min_disp_grid, max_disp_grid = dataset + make_array_copy = array.copy(deep=True) + common.set_out_of_col_disparity_range_to_other_value(array, min_disp_grid, max_disp_grid, value) + + xr.testing.assert_equal(array, make_array_copy) + + @pytest.mark.parametrize( + ["init_value", "value"], + [ + [0.0, 0.0], + [0.0, -1], + [0.0, np.inf], + [0.0, -np.inf], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED], + ], + ) + def test_variable_min_row(self, dataset, value, disp_coords, init_value): + """Check special value below min disparities.""" + array, min_disp_grid, max_disp_grid = dataset + min_disp_index = 1 + min_disp_grid[::2] = array.coords[disp_coords].data[min_disp_index] + + common.set_out_of_row_disparity_range_to_other_value(array, min_disp_grid, max_disp_grid, value) + + expected_value = array.data[::2, ..., :min_disp_index] + expected_zeros_on_odd_lines = array.data[1::2, ...] + expected_zeros_on_even_lines = array.data[::2, ..., min_disp_index:] + + assert np.all(expected_value == value) + assert np.all(expected_zeros_on_odd_lines == init_value) + assert np.all(expected_zeros_on_even_lines == init_value) + + @pytest.mark.parametrize( + ["init_value", "value"], + [ + [0.0, 0.0], + [0.0, -1], + [0.0, np.inf], + [0.0, -np.inf], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED], + ], + ) + @pytest.mark.parametrize("disp_coords", ["disp_col"]) + def test_variable_min_col(self, dataset, value, disp_coords, init_value): + """Check special value below min disparities.""" + array, min_disp_grid, max_disp_grid = dataset + min_disp_index = 1 + min_disp_grid[:, ::2] = array.coords[disp_coords].data[min_disp_index] + + common.set_out_of_col_disparity_range_to_other_value(array, min_disp_grid, max_disp_grid, value) + + expected_value = array.data[:, ::2, :min_disp_index, ...] + expected_zeros_on_odd_columns = array.data[:, 1::2, ...] + expected_zeros_on_even_columns = array.data[:, ::2, min_disp_index:, ...] + + assert np.all(expected_value == value) + assert np.all(expected_zeros_on_odd_columns == init_value) + assert np.all(expected_zeros_on_even_columns == init_value) + + @pytest.mark.parametrize( + ["init_value", "value"], + [ + [0.0, 0.0], + [0.0, -1], + [0.0, np.inf], + [0.0, -np.inf], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED], + ], + ) + def test_variable_max_row(self, dataset, value, disp_coords, init_value): + """Check special value above max disparities.""" + array, min_disp_grid, max_disp_grid = dataset + max_disp_index = 1 + max_disp_grid[::2] = array.coords[disp_coords].data[max_disp_index] + + common.set_out_of_row_disparity_range_to_other_value(array, min_disp_grid, max_disp_grid, value) + + expected_value = array.data[::2, ..., (max_disp_index + 1) :] + expected_zeros_on_odd_lines = array.data[1::2, ...] + expected_zeros_on_even_lines = array.data[::2, ..., : (max_disp_index + 1)] + + assert np.all(expected_value == value) + assert np.all(expected_zeros_on_odd_lines == init_value) + assert np.all(expected_zeros_on_even_lines == init_value) + + @pytest.mark.parametrize( + ["init_value", "value"], + [ + [0.0, 0.0], + [0.0, -1], + [0.0, np.inf], + [0.0, -np.inf], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED], + ], + ) + @pytest.mark.parametrize("disp_coords", ["disp_col"]) + def test_variable_max_col(self, dataset, value, disp_coords, init_value): + """Check special value above max disparities.""" + array, min_disp_grid, max_disp_grid = dataset + max_disp_index = 1 + max_disp_grid[:, ::2] = array.coords[disp_coords].data[max_disp_index] + + common.set_out_of_col_disparity_range_to_other_value(array, min_disp_grid, max_disp_grid, value) + + expected_value = array.data[:, ::2, (max_disp_index + 1) :, ...] + expected_zeros_on_odd_columns = array.data[:, 1::2, ...] + expected_zeros_on_even_columns = array.data[:, ::2, : (max_disp_index + 1), ...] + + assert np.all(expected_value == value) + assert np.all(expected_zeros_on_odd_columns == init_value) + assert np.all(expected_zeros_on_even_columns == init_value) + + @pytest.mark.parametrize( + ["init_value", "value"], + [ + [0.0, 0.0], + [0.0, -1], + [0.0, np.inf], + [0.0, -np.inf], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED], + ], + ) + def test_variable_min_and_max_row(self, dataset, value, disp_coords, init_value): + """Check special value below min and above max disparities.""" + array, min_disp_grid, max_disp_grid = dataset + min_disp_index = 1 + min_disp_grid[::2] = array.coords[disp_coords].data[min_disp_index] + max_disp_index = 2 + max_disp_grid[::2] = array.coords[disp_coords].data[max_disp_index] + + common.set_out_of_row_disparity_range_to_other_value(array, min_disp_grid, max_disp_grid, value) + + expected_below_min = array.data[::2, ..., :min_disp_index] + expected_above_max = array.data[::2, ..., (max_disp_index + 1) :] + expected_zeros_on_odd_lines = array.data[1::2, ...] + expected_zeros_on_even_lines = array.data[::2, ..., min_disp_index : (max_disp_index + 1)] + + assert np.all(expected_below_min == value) + assert np.all(expected_above_max == value) + assert np.all(expected_zeros_on_odd_lines == init_value) + assert np.all(expected_zeros_on_even_lines == init_value) + + @pytest.mark.parametrize( + ["init_value", "value"], + [ + [0.0, 0.0], + [0.0, -1], + [0.0, np.inf], + [0.0, -np.inf], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED], + ], + ) + @pytest.mark.parametrize("disp_coords", ["disp_col"]) + def test_variable_min_and_max_col(self, dataset, value, disp_coords, init_value): + """Check special value below min and above max disparities.""" + array, min_disp_grid, max_disp_grid = dataset + min_disp_index = 1 + min_disp_grid[:, ::2] = array.coords[disp_coords].data[min_disp_index] + max_disp_index = 2 + max_disp_grid[:, ::2] = array.coords[disp_coords].data[max_disp_index] + + common.set_out_of_col_disparity_range_to_other_value(array, min_disp_grid, max_disp_grid, value) + + expected_below_min = array.data[:, ::2, :min_disp_index, ...] + expected_above_max = array.data[:, ::2, (max_disp_index + 1) :, ...] + expected_zeros_on_odd_columns = array.data[:, 1::2, ...] + expected_zeros_on_even_columns = array.data[:, ::2, min_disp_index : (max_disp_index + 1), ...] + + assert np.all(expected_below_min == value) + assert np.all(expected_above_max == value) + assert np.all(expected_zeros_on_odd_columns == init_value) + assert np.all(expected_zeros_on_even_columns == init_value) diff --git a/tests/unit_tests/test_criteria.py b/tests/unit_tests/test_criteria.py new file mode 100644 index 0000000..1892154 --- /dev/null +++ b/tests/unit_tests/test_criteria.py @@ -0,0 +1,1339 @@ +# Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). +# +# This file is part of PANDORA2D +# +# https://github.com/CNES/Pandora2D +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" +Test methods from criteria.py file +""" +# pylint: disable=too-many-lines +# pylint: disable=redefined-outer-name +import copy +import pytest +import numpy as np +import xarray as xr + +from pandora2d import matching_cost, criteria +from pandora2d.constants import Criteria +from pandora2d.img_tools import add_disparity_grid + + +@pytest.fixture() +def img_size(): + row = 10 + col = 13 + return (row, col) + + +@pytest.fixture() +def disparity_cfg(): + return {"init": 1, "range": 2}, {"init": -1, "range": 4} + + +@pytest.fixture() +def no_data_mask(): + return 1 + + +@pytest.fixture() +def valid_pixels(): + return 0 + + +@pytest.fixture() +def image(img_size, disparity_cfg, valid_pixels, no_data_mask): + """Make image""" + row, col = img_size + row_disparity, col_disparity = disparity_cfg + data = np.random.uniform(0, row * col, (row, col)) + + return xr.Dataset( + { + "im": (["row", "col"], data), + "msk": (["row", "col"], np.zeros_like(data)), + }, + coords={"row": np.arange(data.shape[0]), "col": np.arange(data.shape[1])}, + attrs={ + "no_data_img": -9999, + "valid_pixels": valid_pixels, + "no_data_mask": no_data_mask, + "crs": None, + "invalid_disparity": np.nan, + }, + ).pipe(add_disparity_grid, col_disparity, row_disparity) + + +@pytest.fixture() +def mask_image(image, msk): + image["msk"].data = msk + + +@pytest.fixture() +def window_size(): + return 1 + + +@pytest.fixture() +def subpix(): + return 1 + + +@pytest.fixture() +def matching_cost_cfg(window_size, subpix): + return {"matching_cost_method": "ssd", "window_size": window_size, "subpix": subpix} + + +@pytest.fixture() +def cost_volumes(matching_cost_cfg, image): + """Compute a cost_volumes""" + matching_cost_ = matching_cost.MatchingCost(matching_cost_cfg) + + matching_cost_.allocate_cost_volume_pandora(img_left=image, img_right=image, cfg=matching_cost_cfg) + return matching_cost_.compute_cost_volumes(img_left=image, img_right=image) + + +@pytest.fixture() +def criteria_dataarray(img_size): + shape = (*img_size, 9, 5) + return xr.DataArray( + np.full(shape, Criteria.VALID), + coords={ + "row": np.arange(shape[0]), + "col": np.arange(shape[1]), + "disp_col": np.arange(-5, 4), + "disp_row": np.arange(-1, 4), + }, + dims=["row", "col", "disp_col", "disp_row"], + ) + + +class TestAllocateCriteriaDataset: + """Test create a criteria xarray.Dataset.""" + + @pytest.mark.parametrize( + ["value", "data_type"], + [ + [0, None], + [0, np.uint8], + [np.nan, np.float32], + [Criteria.VALID, None], + [Criteria.VALID.value, np.uint16], + ], + ) + def test_nominal_case(self, cost_volumes, value, data_type): + """Test allocate a criteria dataarray with correct cost_volumes, value and data_type.""" + criteria_dataarray = criteria.allocate_criteria_dataarray(cost_volumes, value, data_type) + + assert criteria_dataarray.shape == cost_volumes.cost_volumes.shape + + @pytest.mark.parametrize("value", [0, Criteria.VALID]) + @pytest.mark.parametrize("subpix", [1, 2, 4]) + def test_with_subpix(self, cost_volumes, value, subpix, img_size, disparity_cfg): + """Test allocate a criteria dataarray with correct cost_volumes, value and data_type.""" + criteria_dataarray = criteria.allocate_criteria_dataarray(cost_volumes, value, None) + + row, col = img_size + row_disparity, col_disparity = disparity_cfg + nb_col_disp = 2 * col_disparity["range"] * subpix + 1 + nb_row_disp = 2 * row_disparity["range"] * subpix + 1 + + assert criteria_dataarray.shape == cost_volumes.cost_volumes.shape + assert criteria_dataarray.shape == (row, col, nb_col_disp, nb_row_disp) + + +class TestSetUnprocessedDisparity: + """Test create a criteria xarray.Dataset.""" + + @pytest.fixture() + def grid_min_col(self, image): + return image["col_disparity"].sel(band_disp="min") + + @pytest.fixture() + def grid_max_col(self, image): + return image["col_disparity"].sel(band_disp="max") + + @pytest.fixture() + def grid_min_row(self, image): + return image["row_disparity"].sel(band_disp="min") + + @pytest.fixture() + def grid_max_row(self, image): + return image["row_disparity"].sel(band_disp="max") + + def test_homogeneous_grids(self, criteria_dataarray, grid_min_col, grid_max_col, grid_min_row, grid_max_row): + """With uniform grids""" + make_criteria_copy = criteria_dataarray.copy(deep=True) + criteria.set_unprocessed_disp(criteria_dataarray, grid_min_col, grid_max_col, grid_min_row, grid_max_row) + + xr.testing.assert_equal(criteria_dataarray, make_criteria_copy) + + def test_variable_col_disparity( + self, criteria_dataarray, grid_min_col, grid_max_col, grid_min_row, grid_max_row, img_size + ): + """With variable column disparity grids""" + _, col = img_size + nb_col_set = int(col / 2) + grid_min_col[:, :nb_col_set] = criteria_dataarray.coords["disp_col"].data[1] + grid_max_col[:, nb_col_set:] = criteria_dataarray.coords["disp_col"].data[-2] + + criteria.set_unprocessed_disp(criteria_dataarray, grid_min_col, grid_max_col, grid_min_row, grid_max_row) + + assert np.all( + criteria_dataarray.data[:, :nb_col_set, 0, :] == Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED + ) + assert np.all(criteria_dataarray.data[:, nb_col_set:, 0, :] == Criteria.VALID) + assert np.all( + criteria_dataarray.data[:, nb_col_set:, -1, :] == Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED + ) + assert np.all(criteria_dataarray.data[:, :nb_col_set, -1, :] == Criteria.VALID) + + def test_variable_row_disparity( + self, criteria_dataarray, grid_min_col, grid_max_col, grid_min_row, grid_max_row, img_size + ): + """With variable row disparity grids""" + row, _ = img_size + nb_row_set = int(row / 2) + grid_min_row[:nb_row_set, :] = criteria_dataarray.coords["disp_row"].data[1] + grid_max_row[nb_row_set:, :] = criteria_dataarray.coords["disp_row"].data[-2] + + criteria.set_unprocessed_disp(criteria_dataarray, grid_min_col, grid_max_col, grid_min_row, grid_max_row) + + assert np.all( + criteria_dataarray.data[:nb_row_set, :, :, 0] == Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED + ) + assert np.all(criteria_dataarray.data[nb_row_set:, :, :, 0] == Criteria.VALID) + assert np.all( + criteria_dataarray.data[nb_row_set:, :, :, -1] == Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED + ) + assert np.all(criteria_dataarray.data[:nb_row_set, :, :, -1] == Criteria.VALID) + + +class TestMaskBorder: + """Test mask_border method.""" + + def test_null_offset(self, criteria_dataarray): + """offset = 0, no raise PANDORA2D_MSK_PIXEL_LEFT_BORDER criteria""" + make_criteria_copy = criteria_dataarray.copy(deep=True) + criteria.mask_border(0, criteria_dataarray) + + # Check criteria_dataarray has not changed + xr.testing.assert_equal(criteria_dataarray, make_criteria_copy) + # Check the PANDORA2D_MSK_PIXEL_LEFT_BORDER criteria does not raise + assert np.all(criteria_dataarray.data[:, :, :, :] != Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER) + + @pytest.mark.parametrize("offset", [1, 2, 3]) + def test_variable_offset(self, criteria_dataarray, offset): + """ + With mask_border, the PANDORA2D_MSK_PIXEL_LEFT_BORDER criteria is raised on the border. + + Example : + offset = 1 + + For this image : 1 2 3 4 5 6 7 8 + 1 2 3 4 5 6 7 8 + 1 2 3 4 5 6 7 8 + 1 2 3 4 5 6 7 8 + 1 2 3 4 5 6 7 8 + 1 2 3 4 5 6 7 8 + + and a criteria_dataarray : 0 0 0 0 0 0 0 0 + 0 0 0 0 0 0 0 0 + 0 0 0 0 0 0 0 0 + 0 0 0 0 0 0 0 0 + 0 0 0 0 0 0 0 0 + 0 0 0 0 0 0 0 0 + + the result is : 1 1 1 1 1 1 1 1 + 1 0 0 0 0 0 0 1 + 1 0 0 0 0 0 0 1 + 1 0 0 0 0 0 0 1 + 1 0 0 0 0 0 0 1 + 1 1 1 1 1 1 1 1 + """ + criteria.mask_border(offset, criteria_dataarray) + + assert np.all(criteria_dataarray.data[:offset, :, :, :] == Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER) + assert np.all(criteria_dataarray.data[-offset:, :, :, :] == Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER) + assert np.all(criteria_dataarray.data[:, :offset, :, :] == Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER) + assert np.all(criteria_dataarray.data[:, -offset:, :, :] == Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER) + + +class TestMaskDisparityOutsideRightImage: + """Test mask_disparity_outside_right_image method.""" + + @pytest.fixture() + def ground_truth_null_disparity(self, offset, img_size): + """Make ground_truth of criteria dataarray for null disparity""" + data = np.full(img_size, Criteria.VALID) + if offset > 0: + data[:offset, :] = Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + data[-offset:, :] = Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + data[:, :offset] = Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + data[:, -offset:] = Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + return data + + @pytest.fixture() + def ground_truth_first_disparity(self, offset, img_size): + """ + Make ground_truth of criteria dataarray for first disparity (disp_col=-5 and disp_row=-1) + + Example for window_size = 3 -> offset = 1, disp_col=-5 & disp_row=-1 & img_size = (10, 13) + data = ([ + [8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8], + [8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8], + [8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0, 0], + [8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0, 0], + [8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0, 0], + [8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0, 0], + [8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0, 0], + [8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0, 0], + [8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0, 0], + [8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0, 0] + ]) + + Example for window_size = 5 -> offset = 2, disp_col=-5 & disp_row=-1 & img_size = (10, 13) + data = ([ + [8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8], + [8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8], + [8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8], + [8, 8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0], + [8, 8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0], + [8, 8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0], + [8, 8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0], + [8, 8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0], + [8, 8, 8, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0], + [8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8] + ]) + """ + data = np.full(img_size, Criteria.VALID) + # Update row + first_row_disparity = -1 + delta_row_start = offset + abs(first_row_disparity) + delta_row_end = offset + first_row_disparity + data[:delta_row_start, :] = Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + if delta_row_end > 0: + data[-delta_row_end:, :] = Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + # Udpate col + first_col_disparity = -5 + delta_col_start = offset + abs(first_col_disparity) + delta_col_end = offset + first_col_disparity + data[:, :delta_col_start] = Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + if delta_col_end > 0: + data[:, -delta_col_end:] = Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + return data + + @pytest.mark.parametrize( + "offset", + [ + pytest.param(0), + pytest.param(1), + pytest.param(2), + pytest.param(3), + pytest.param(49, id="offset > dimension"), + ], + ) + def test_nominal(self, offset, criteria_dataarray, ground_truth_null_disparity, ground_truth_first_disparity): + """ + Test mask_disparity_outside_right_image + """ + criteria.mask_disparity_outside_right_image(offset, criteria_dataarray) + + np.testing.assert_array_equal(criteria_dataarray.values[:, :, 5, 1], ground_truth_null_disparity) + np.testing.assert_array_equal(criteria_dataarray.values[:, :, 0, 0], ground_truth_first_disparity) + + +@pytest.mark.parametrize("img_size", [(5, 6)]) +class TestMaskLeftNoData: + """Test mask_left_no_data function.""" + + @pytest.mark.parametrize( + ["no_data_position", "window_size", "row_slice", "col_slice"], + [ + pytest.param((2, 2), 1, 2, 2), + pytest.param((2, 2), 3, np.s_[1:4], np.s_[1:4]), + pytest.param((0, 2), 1, 0, 2), + pytest.param((0, 2), 3, np.s_[:2], np.s_[1:4]), + pytest.param((4, 5), 3, np.s_[-2:], np.s_[-2:]), + ], + ) + def test_add_criteria_to_all_valid( + self, img_size, image, criteria_dataarray, no_data_position, window_size, row_slice, col_slice + ): + """Test add to a previously VALID criteria.""" + no_data_row_position, no_data_col_position = no_data_position + + image["msk"][no_data_row_position, no_data_col_position] = image.attrs["no_data_mask"] + + expected_criteria_data = np.full((*img_size, 9, 5), Criteria.VALID) + expected_criteria_data[row_slice, col_slice, ...] = Criteria.PANDORA2D_MSK_PIXEL_LEFT_NODATA + + criteria.mask_left_no_data(image, window_size, criteria_dataarray) + + np.testing.assert_array_equal(criteria_dataarray.values, expected_criteria_data) + + @pytest.mark.parametrize( + ["no_data_position", "window_size", "row_slice", "col_slice"], + [ + pytest.param((2, 2), 1, 2, 2), + pytest.param((2, 2), 3, np.s_[1:4], np.s_[1:4]), + pytest.param((0, 2), 1, 0, 2), + pytest.param((0, 2), 3, np.s_[:2], np.s_[1:4]), + pytest.param((4, 5), 3, np.s_[-2:], np.s_[-2:]), + ], + ) + def test_add_to_existing( + self, img_size, image, criteria_dataarray, no_data_position, window_size, row_slice, col_slice + ): + """Test we do not override existing criteria but combine it.""" + no_data_row_position, no_data_col_position = no_data_position + + image["msk"][no_data_row_position, no_data_col_position] = image.attrs["no_data_mask"] + + criteria_dataarray.data[no_data_row_position, no_data_col_position, ...] = ( + Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + ) + + expected_criteria_data = np.full((*img_size, 9, 5), Criteria.VALID) + expected_criteria_data[row_slice, col_slice, ...] = Criteria.PANDORA2D_MSK_PIXEL_LEFT_NODATA + expected_criteria_data[no_data_row_position, no_data_col_position, ...] = ( + Criteria.PANDORA2D_MSK_PIXEL_LEFT_NODATA | Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + ) + + criteria.mask_left_no_data(image, window_size, criteria_dataarray) + + np.testing.assert_array_equal(criteria_dataarray.values, expected_criteria_data) + + +@pytest.mark.parametrize("img_size", [(4, 5)]) +class TestMaskRightNoData: + """Test mask_right_no_data function.""" + + @pytest.mark.usefixtures("mask_image") + @pytest.mark.parametrize( + ["no_data_mask", "msk", "disp_row", "disp_col", "expected_criteria"], + [ + # pylint: disable=line-too-long + pytest.param( + 1, + np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 1], + ] + ), + -1, + -1, + np.array( + # fmt: off + [ + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + ] + # fmt: on + ), + id="Disp -1 -1 - Pos (3,4)", + ), + pytest.param( + 1, + np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 1], + ] + ), + -1, + 1, + np.array( + # fmt: off + [ + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + ] + # fmt: on + ), + id="Disp -1 1 - Pos (3,4)", + ), + pytest.param( + 1, + np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 1], + ] + ), + 1, + 1, + np.array( + # fmt: off + [ + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + ] + # fmt: on + ), + id="Disp 1 1 - Pos (3,4)", + ), + pytest.param( + 1, + np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 1], + ] + ), + 2, + 1, + np.array( + # fmt: off + [ + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + ] + # fmt: on + ), + id="Disp 2 1 - Pos (3,4)", + ), + pytest.param( + 2, + np.array( + [ + [0, 0, 0, 0, 0], + [1, 0, 0, 0, 0], + [0, 1, 0, 0, 0], + [0, 0, 0, 0, 2], + ] + ), + 2, + 1, + np.array( + # fmt: off + [ + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + ] + # fmt: on + ), + id="Disp 2 1 - other no_data_mask", + ), + ], + # pylint: enable=line-too-long + ) + def test_window_size_1(self, image, criteria_dataarray, disp_row, disp_col, expected_criteria): + """Test some disparity couples with a window size of 1.""" + + criteria.mask_right_no_data(image, 1, criteria_dataarray) + + np.testing.assert_array_equal( + criteria_dataarray.sel(disp_row=disp_row, disp_col=disp_col), + expected_criteria, + ) + + @pytest.mark.usefixtures("mask_image") + @pytest.mark.parametrize( + ["no_data_mask", "msk", "disp_row", "disp_col", "expected_criteria"], + # pylint: disable=line-too-long + [ + pytest.param( + 1, + np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 1, 0], + [0, 0, 0, 0, 0], + ] + ), + -1, + -1, + np.array( + [ + # fmt: off + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA], + # fmt: on + ] + ), + id="Disp -1 -1 - Pos (2,3)", + ), + pytest.param( + 1, + np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 1, 0], + [0, 0, 0, 0, 0], + ] + ), + -1, + 1, + np.array( + [ + # fmt: off + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID], + # fmt: on + ] + ), + id="Disp -1 1 - Pos (2,3)", + ), + pytest.param( + 1, + np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 1, 0], + [0, 0, 0, 0, 0], + ] + ), + 1, + 1, + np.array( + [ + # fmt: off + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + # fmt: on + ] + ), + id="Disp 1 1 - Pos (2,3)", + ), + pytest.param( + 3, + np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 1, 2, 0], + [0, 0, 1, 3, 0], + [0, 0, 4, 0, 0], + ] + ), + 1, + 1, + np.array( + [ + # fmt: off + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + # fmt: on + ] + ), + id="Disp 1 1 - Pos (2,3) - other no_data_mask", + ), + pytest.param( + 1, + np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 1, 0], + [0, 0, 0, 0, 0], + ] + ), + 2, + 1, + np.array( + [ + # fmt: off + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + # fmt: on + ] + ), + id="Disp 2 1 - Pos (2,3)", + ), + ], + # pylint: enable=line-too-long + ) + def test_window_size_3(self, image, criteria_dataarray, disp_row, disp_col, expected_criteria): + """Test some disparity couples with a window size of 3.""" + + criteria.mask_right_no_data(image, 3, criteria_dataarray) + + np.testing.assert_array_equal( + criteria_dataarray.sel(disp_row=disp_row, disp_col=disp_col), + expected_criteria, + ) + + def test_combination(self, image, criteria_dataarray): + """Test that we combine with existing criteria and do not override them.""" + image["msk"].data = np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 1], + ] + ) + + criteria_dataarray.data[2, 3, ...] = Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + + criteria.mask_right_no_data(image, 1, criteria_dataarray) + + assert ( + criteria_dataarray.sel(row=2, col=3, disp_row=1, disp_col=1).data + == Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE | Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA + ) + + +class TestMaskLeftInvalid: + """Test mask_left_invalid function.""" + + @pytest.mark.parametrize( + ["invalid_position"], + [ + pytest.param((2, 2)), + pytest.param((0, 0)), + pytest.param((0, 2)), + pytest.param((9, 12)), + pytest.param((4, 5)), + ], + ) + def test_mask_left_invalid(self, img_size, image, criteria_dataarray, invalid_position): + """ + Test that mask_invalid_left method raises criteria PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_LEFT + for points whose value is neither valid_pixels or no_data_mask. + """ + invalid_row_position, invalid_col_position = invalid_position + + # We put 2 in img_left msk because it is different from valid_pixels=0 and no_data_mask=1 + image["msk"][invalid_row_position, invalid_col_position] = 2 + + expected_criteria_data = np.full((*img_size, 9, 5), Criteria.VALID) + expected_criteria_data[invalid_row_position, invalid_col_position, ...] = ( + Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_LEFT + ) + + criteria.mask_left_invalid(image, criteria_dataarray) + + np.testing.assert_array_equal(criteria_dataarray.values, expected_criteria_data) + + @pytest.mark.parametrize( + ["invalid_position"], + [ + pytest.param((2, 2)), + pytest.param((0, 0)), + pytest.param((0, 2)), + pytest.param((9, 12)), + pytest.param((4, 5)), + ], + ) + def test_add_to_existing(self, img_size, image, criteria_dataarray, invalid_position): + """Test we do not override existing criteria but combine it.""" + invalid_row_position, invalid_col_position = invalid_position + + # We put 2 in img_left msk because it is different from valid_pixels=0 and no_data_mask=1 + image["msk"][invalid_row_position, invalid_col_position] = 2 + + criteria_dataarray.data[invalid_row_position, invalid_col_position, ...] = ( + Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + ) + + expected_criteria_data = np.full((*img_size, 9, 5), Criteria.VALID) + expected_criteria_data[invalid_row_position, invalid_col_position, ...] = ( + Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_LEFT | Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + ) + + criteria.mask_left_invalid(image, criteria_dataarray) + + np.testing.assert_array_equal(criteria_dataarray.values, expected_criteria_data) + + +@pytest.mark.parametrize("img_size", [(4, 5)]) +class TestMaskRightInvalid: + """Test mask_right_invalid function.""" + + @pytest.mark.usefixtures("mask_image") + @pytest.mark.parametrize( + ["valid_pixels", "no_data_mask", "msk", "expected_criteria", "disp_col", "disp_row"], + [ + # pylint: disable=line-too-long + pytest.param( + 0, + 1, + np.array( # msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 2, 0], + [0, 0, 0, 0, 0], + ] + ), + np.array( + [ + # fmt: off + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT], + # fmt: on + ] + ), + -1, # disp_col + -1, # disp_row + id="Invalid point at center of right mask with disp_row=-1 and disp_col=-1", + ), + pytest.param( + 0, + 1, + np.array( # msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 2, 0], + [0, 0, 0, 0, 0], + ] + ), + np.array( + [ + # fmt: off + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + # fmt: on + ] + ), + 2, # disp_col + 1, # disp_row + id="Invalid point at center of right mask with disp_row=2 and disp_col=1", + ), + pytest.param( + 0, + 1, + np.array( # msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 2], + ] + ), + np.array( + [ + # fmt: off + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + # fmt: on + ] + ), + -1, # disp_col + -1, # disp_row + id="Invalid point at right bottom corner of right mask with disp_row=-1 and disp_col=-1", + ), + pytest.param( + 0, + 1, + np.array( # msk + [ + [0, 0, 0, 0, 0], + [0, 3, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + np.array( + [ + # fmt: off + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + # fmt: on + ] + ), + -1, # disp_col + -1, # disp_row + id="Invalid point at center of right mask with disp_row=-1 and disp_col=-1", + ), + pytest.param( + 0, + 1, + np.array( # msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 4, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + np.array( + [ + # fmt: off + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT, Criteria.VALID, Criteria.VALID], + # fmt: on + ] + ), + 0, # disp_col + -1, # disp_row + id="Invalid point at center of right mask with disp_row=-1 and disp_col=0", + ), + pytest.param( + 3, + 4, + np.array( # msk + [ + [3, 3, 3, 3, 3], + [3, 3, 0, 3, 4], + [3, 3, 4, 3, 3], + [3, 4, 3, 3, 3], + ] + ), + np.array( + [ + # fmt: off + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + # fmt: on + ] + ), + 0, # disp_col + -1, # disp_row + id="Invalid point at center of right mask with no_data_mask=4, valid_pixels=3, disp_row=-1 and disp_col=0", + ), + # pylint: enable=line-too-long + ], + ) + def test_mask_invalid_right(self, image, criteria_dataarray, expected_criteria, disp_col, disp_row): + """ + Test that mask_invalid_right method raises criteria PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT + for points whose value is neither valid_pixels or no_data_mask when we shift it by its disparity. + """ + + criteria.mask_right_invalid(image, criteria_dataarray) + + np.testing.assert_array_equal( + criteria_dataarray.sel(disp_row=disp_row, disp_col=disp_col), + expected_criteria, + ) + + @pytest.mark.usefixtures("mask_image") + @pytest.mark.parametrize( + ["msk", "disp_col", "disp_row"], + [ + pytest.param( + np.array( # msk + [ + [0, 0, 0, 0, 0], + [0, 3, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + -2, # disp_col + -1, # disp_row + id="Invalid point at center of right mask with disp_row=-1 and disp_col=-2", + ), + pytest.param( + np.array( # msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 2], + ] + ), + 1, # disp_col + 1, # disp_row + id="Invalid point at right bottom corner of right mask with disp_row=1 and disp_col=1", + ), + ], + ) + def test_combination(self, image, criteria_dataarray, disp_col, disp_row): + """ + Test that we combine Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT + with existing criteria and do not override them. + """ + + criteria_dataarray.data[2, 3, ...] = Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE + + criteria.mask_right_invalid(image, criteria_dataarray) + + assert ( + criteria_dataarray.sel(row=2, col=3, disp_row=disp_row, disp_col=disp_col).data + == Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE | Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT + ) + + +@pytest.mark.parametrize("img_size", [(4, 5)]) +class TestGetCriteriaDataarray: + """Test get_criteria_dataarray function.""" + + @pytest.fixture() + def image_variable_disp(self, image, img_size): + """Make image with variable disparity grids""" + + # Make so when we change image_variable_disp mask it + # does not change image mask + img = copy.copy(image) + row, col = img_size + + nb_col_set = int(col / 2) + nb_row_set = int(row / 2) + + # Get variable col disparities + + # Minimal col disparity grid is equal to: + # [[-3, -3, -5, -5, -5] + # [-3, -3, -5, -5, -5] + # [-3, -3, -5, -5, -5] + # [-3, -3, -5, -5, -5]] + img["col_disparity"].sel(band_disp="min")[:, :nb_col_set] = -3 + + # Maximal col disparity grid is equal to: + # [[ 3, 3, 1, 1, 1] + # [ 3, 3, 1, 1, 1] + # [ 3, 3, 1, 1, 1] + # [ 3, 3, 1, 1, 1]] + img["col_disparity"].sel(band_disp="max")[:, nb_col_set:] = 1 + + # Get variable row disparities + + # Minimal row disparity grid is equal to: + # [[ 0, 0, 0, 0, 0] + # [ 0, 0, 0, 0, 0] + # [-1, -1, -1, -1, -1] + # [-1, -1, -1, -1, -1]] + img["row_disparity"].sel(band_disp="min")[:nb_row_set, :] = 0 + + # Maximal row disparity grid is equal to: + # [[ 3, 3, 3, 3, 3] + # [ 3, 3, 3, 3, 3] + # [ 2, 2, 2, 2, 2] + # [ 2, 2, 2, 2, 2]] + + img["row_disparity"].sel(band_disp="max")[nb_row_set:, :] = 2 + + return img + + @pytest.mark.usefixtures("mask_image") + @pytest.mark.parametrize( + ["left_msk", "msk", "disp_col", "disp_row", "window_size", "expected_criteria"], + [ + # pylint: disable=line-too-long + pytest.param( + np.array( # left msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + np.array( # right msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + 0, # disp_col + 0, # disp_row + 1, # window_size + np.array( + [ + # fmt: off + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + # fmt: on + ] + ), + id="Everything is valid", + ), + pytest.param( + np.array( # left msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + np.array( # right msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + 2, # disp_col + -1, # disp_row + 1, # window_size + np.array( + [ + # fmt: off + [Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED], + [Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED], + [Criteria.VALID , Criteria.VALID , Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED], + [Criteria.VALID , Criteria.VALID , Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED], + # fmt: on + ] + ), + id="Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED overcome other criteria", + ), + pytest.param( + np.array( # left msk + [ + [0, 0, 0, 0, 0], + [0, 1, 0, 0, 0], + [0, 0, 0, 2, 0], + [0, 0, 0, 0, 0], + ] + ), + np.array( # right msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 2, 0, 1, 0], + [0, 0, 0, 0, 0], + ] + ), + -1, # disp_col + 1, # disp_row + 1, # window_size + np.array( + [ + # fmt: off + [Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE , Criteria.VALID, Criteria.VALID, Criteria.VALID, Criteria.VALID], + [Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE , Criteria.PANDORA2D_MSK_PIXEL_LEFT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA], + [Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE , Criteria.VALID, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_LEFT, Criteria.VALID], + [Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE , Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE , Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE , Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE , Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE ], + # fmt: on + ] + ), + id="Mix of criteria with window_size=1", + ), + pytest.param( + np.array( # left msk + [ + [0, 0, 0, 0, 0], + [0, 1, 0, 0, 0], + [0, 0, 0, 2, 0], + [0, 0, 0, 0, 0], + ] + ), + np.array( # right msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 3, 0, 1, 0], + [0, 0, 0, 0, 0], + ] + ), + -1, # disp_col + 1, # disp_row + 3, # window_size + np.array( + [ + # fmt: off + [Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER , Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER], + [Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER , Criteria.PANDORA2D_MSK_PIXEL_LEFT_NODATA | Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_LEFT_NODATA | Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER], + [Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER , Criteria.PANDORA2D_MSK_PIXEL_LEFT_NODATA | Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_LEFT_NODATA | Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA | Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE | Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_LEFT, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER], + [Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER , Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER , Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER , Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER], + # fmt: on + ] + ), + id="Mix of criteria with window_size=3", + ), + pytest.param( + np.array( # left msk + [ + [0, 0, 0, 0, 0], + [0, 0, 1, 0, 0], + [0, 0, 2, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + np.array( # right msk + [ + [0, 0, 0, 0, 0], + [0, 0, 1, 0, 0], + [0, 0, 2, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + 0, # disp_col + 1, # disp_row + 1, # window_size + np.array( + [ + # fmt: off + [Criteria.VALID, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_LEFT_NODATA | Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_RIGHT, Criteria.VALID, Criteria.VALID], + [Criteria.VALID, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_INVALIDITY_MASK_LEFT, Criteria.VALID, Criteria.VALID], + [Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE], + # fmt: on + ] + ), + id="Centered invalid and no data in msk with window_size=1", + ), + pytest.param( + np.array( # left msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + np.array( # right msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 1], + [0, 0, 0, 0, 0], + ] + ), + 1, # disp_col + 1, # disp_row + 3, # window_size + np.array( + [ + # fmt: off + [Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER], + [Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.VALID, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA | Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER], + [Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA | Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_NODATA | Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER], + [Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER], + # fmt: on + ] + ), + id="Right no data on the border and window_size=3", + ), + pytest.param( + np.array( # left msk + [ + [0, 0, 0, 0, 0], + [0, 1, 0, 0, 0], + [0, 0, 0, 2, 0], + [0, 0, 0, 0, 0], + ] + ), + np.array( # right msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 2, 0, 1, 0], + [0, 0, 0, 0, 0], + ] + ), + -1, # disp_col + 1, # disp_row + 5, # window_size + np.array( + [ + # fmt: off + [Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER], + [Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER], + [Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER], + [Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER, Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER], + # fmt: on + ] + ), + id="Window_size=5, only Criteria.PANDORA2D_MSK_PIXEL_LEFT_BORDER is raised", + ), + pytest.param( + np.array( # left msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + np.array( # right msk + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + -5, # disp_col + 0, # disp_row + 1, # window_size + np.array( + [ + # fmt: off + [Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE], + [Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE], + [Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE], + [Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_DISPARITY_UNPROCESSED, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE, Criteria.PANDORA2D_MSK_PIXEL_RIGHT_DISPARITY_OUTSIDE], + # fmt: on + ] + ), + id="Column disparity out of the image or unprocessed for all points", + ), + # pylint: enable=line-too-long + ], + ) + def test_get_criteria_dataarray( + self, image_variable_disp, image, left_msk, cost_volumes, disp_col, disp_row, expected_criteria + ): + """ + Test get_criteria_dataarray method with + different disparities, window sizes and masks + """ + + image_variable_disp["msk"].data = left_msk + + criteria_dataarray = criteria.get_criteria_dataarray( + left_image=image_variable_disp, right_image=image, cv=cost_volumes + ) + + np.testing.assert_array_equal( + criteria_dataarray.sel(disp_row=disp_row, disp_col=disp_col), + expected_criteria, + ) diff --git a/tests/unit_tests/test_disparity.py b/tests/unit_tests/test_disparity.py index 1423549..33b9b10 100644 --- a/tests/unit_tests/test_disparity.py +++ b/tests/unit_tests/test_disparity.py @@ -34,22 +34,34 @@ from pandora.margins import Margins from pandora2d import matching_cost, disparity +from pandora2d.img_tools import add_disparity_grid class TestCheckConf: - """Test check conf.""" + """ + Description : Test check conf. + Requirement : EX_CONF_04 + """ def test_nominal_case(self): """Should not raise error.""" disparity.Disparity({"disparity_method": "wta", "invalid_disparity": -9999}) def test_disparity_method_is_mandatory(self): - """Should raise an error.""" + """ + Description : Should raise an error if disparity method isn't specified . + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(json_checker.core.exceptions.MissKeyCheckerError): disparity.Disparity({"invalid_disparity": "5"}) def test_fails_with_bad_disparity_method_value(self): - """Should raise an error.""" + """ + Description : Should raise an error if the disparity method isn't correct. + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(json_checker.core.exceptions.DictCheckerError): disparity.Disparity({"disparity_method": "WTN"}) @@ -102,21 +114,14 @@ def test_extrema_split(left_stereo_object, right_stereo_object, extrema_func, ex """ Test the min_split function """ - - left, right = left_stereo_object, right_stereo_object - # create a cost_volume, with SAD measure, window_size 1, dispx_min 0, dispx_max 1, dispy_min -1, dispy_max 0 cfg = {"pipeline": {"matching_cost": {"matching_cost_method": "sad", "window_size": 1}}} matching_cost_test = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) - grid_min_col = np.full((3, 3), 0) - grid_max_col = np.full((3, 3), 1) - grid_min_row = np.full((3, 3), -1) - grid_max_row = np.full((3, 3), 0) - matching_cost_test.allocate_cost_volume_pandora( - img_left=left, img_right=right, grid_min_col=grid_min_col, grid_max_col=grid_max_col, cfg=cfg - ) - cvs = matching_cost_test.compute_cost_volumes(left, right, grid_min_col, grid_max_col, grid_min_row, grid_max_row) + left_stereo_object["col_disparity"][1, :, :] = np.full((3, 3), 1) + left_stereo_object["row_disparity"][0, :, :] = np.full((3, 3), -1) + matching_cost_test.allocate_cost_volume_pandora(img_left=left_stereo_object, img_right=right_stereo_object, cfg=cfg) + cvs = matching_cost_test.compute_cost_volumes(left_stereo_object, right_stereo_object) disparity_test = disparity.Disparity({"disparity_method": "wta", "invalid_disparity": -9999}) # searching along dispy axis @@ -155,20 +160,14 @@ def test_arg_split(stereo_object_with_args, extrema_func, arg_extrema_func, expe matching_cost_test = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) - grid_min_col = np.full((3, 3), 0) - grid_max_col = np.full((3, 3), 1) - grid_min_row = np.full((3, 3), -1) - grid_max_row = np.full((3, 3), 0) + left_arg["col_disparity"][1, :, :] = np.full((5, 5), 1) + left_arg["row_disparity"][0, :, :] = np.full((5, 5), -1) matching_cost_test.allocate_cost_volume_pandora( img_left=left_arg, img_right=right_arg, - grid_min_col=grid_min_col, - grid_max_col=grid_max_col, cfg=cfg, ) - cvs = matching_cost_test.compute_cost_volumes( - left_arg, right_arg, grid_min_col, grid_max_col, grid_min_row, grid_max_row - ) + cvs = matching_cost_test.compute_cost_volumes(left_arg, right_arg) disparity_test = disparity.Disparity({"disparity_method": "wta", "invalid_disparity": -9999}) # searching along dispy axis @@ -178,54 +177,9 @@ def test_arg_split(stereo_object_with_args, extrema_func, arg_extrema_func, expe np.testing.assert_allclose(min_tensor, expected_result, atol=1e-06) -@pytest.mark.parametrize( - "margins", - [ - None, - Margins(0, 0, 0, 0), - Margins(0, 0, 1, 1), - Margins(1, 1, 1, 1), - pytest.param( - Margins(3, 3, 3, 3), - ), - pytest.param( - Margins(1, 2, 3, 4), - ), - ], -) -def test_compute_disparity_map_row(margins): - """ - Test function for disparity computation - """ - data = np.array( - ([[9, 10, 11, 12], [5, 6, 7, 8], [1, 2, 3, 4]]), - dtype=np.float64, - ) - mask = np.array(([0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]), dtype=np.int16) - left = xr.Dataset( - {"im": (["row", "col"], data), "msk": (["row", "col"], mask)}, - coords={"row": np.arange(data.shape[0]), "col": np.arange(data.shape[1])}, - ) - left.attrs = { - "no_data_img": -9999, - "valid_pixels": 0, - "no_data_mask": 1, - "crs": None, - "transform": Affine(1.0, 0.0, 0.0, 0.0, 1.0, 0.0), - "col_disparity_source": [-2, 2], - "row_disparity_source": [-2, 2], - } - - data = np.array( - [[5, 6, 7, 8], [1, 2, 3, 4], [9, 10, 11, 12]], - dtype=np.float64, - ) - mask = np.array(([0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]), dtype=np.int16) - right = xr.Dataset( - {"im": (["row", "col"], data), "msk": (["row", "col"], mask)}, - coords={"row": np.arange(data.shape[0]), "col": np.arange(data.shape[1])}, - ) - right.attrs = { +@pytest.fixture() +def default_attributs(): + return { "no_data_img": -9999, "valid_pixels": 0, "no_data_mask": 1, @@ -233,125 +187,49 @@ def test_compute_disparity_map_row(margins): "transform": Affine(1.0, 0.0, 0.0, 0.0, 1.0, 0.0), } - ground_truth_col = np.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]) - - ground_truth_row = np.array([[2, 2, 2, 2], [-1, -1, -1, -1], [-1, -1, -1, -1]]) +@pytest.fixture() +def cfg_mc(): # create matching_cost object with measure = ssd, window_size = 1 - cfg_mc = {"pipeline": {"matching_cost": {"matching_cost_method": "ssd", "window_size": 1}}} + return {"pipeline": {"matching_cost": {"matching_cost_method": "ssd", "window_size": 1}}} - matching_cost_matcher = matching_cost.MatchingCost(cfg_mc["pipeline"]["matching_cost"]) - # create disparity object with WTA method - cfg_disp = {"disparity_method": "wta", "invalid_disparity": -5} - disparity_matcher = disparity.Disparity(cfg_disp) - grid_min_col = np.full((3, 4), -2) - grid_max_col = np.full((3, 4), 2) - grid_min_row = np.full((3, 4), -2) - grid_max_row = np.full((3, 4), 2) - matching_cost_matcher.allocate_cost_volume_pandora( - img_left=left, - img_right=right, - grid_min_col=grid_min_col, - grid_max_col=grid_max_col, - cfg=cfg_mc, - margins=margins, - ) - cvs = matching_cost_matcher.compute_cost_volumes( - left, right, grid_min_col, grid_max_col, grid_min_row, grid_max_row, margins - ) +def matching_cost_obj(cfg): + return matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) - delta_x, delta_y, _ = disparity_matcher.compute_disp_maps(cvs) - np.testing.assert_array_equal(ground_truth_col, delta_x) - np.testing.assert_array_equal(ground_truth_row, delta_y) +@pytest.fixture() +def disparity_matcher(): + # create disparity object with WTA method + cfg_disp = {"disparity_method": "wta", "invalid_disparity": -5} + return disparity.Disparity(cfg_disp) -@pytest.mark.parametrize( - "margins", - [ - None, - Margins(0, 0, 0, 0), - Margins(1, 0, 1, 0), - pytest.param( - Margins(3, 3, 3, 3), - ), - pytest.param( - Margins(1, 2, 3, 4), - ), - ], -) -def test_compute_disparity_map_col(margins): +@pytest.fixture() +def img_left(default_attributs, data_left, disparity_cfg): """ - Test function for disparity computation + Creates left image fixture """ - data = np.array( - ([[5, 6, 7, 8], [1, 2, 3, 4], [9, 10, 11, 12]]), - dtype=np.float64, - ) - mask = np.array(([0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]), dtype=np.int16) left = xr.Dataset( - {"im": (["row", "col"], data), "msk": (["row", "col"], mask)}, - coords={"row": np.arange(data.shape[0]), "col": np.arange(data.shape[1])}, + {"im": (["row", "col"], data_left)}, + coords={"row": np.arange(data_left.shape[0]), "col": np.arange(data_left.shape[1])}, ) - left.attrs = { - "no_data_img": -9999, - "valid_pixels": 0, - "no_data_mask": 1, - "crs": None, - "transform": Affine(1.0, 0.0, 0.0, 0.0, 1.0, 0.0), - "col_disparity_source": [-3, 3], - "row_disparity_source": [-3, 3], - } + left.attrs = default_attributs + left.pipe(add_disparity_grid, disparity_cfg["col_disparity"], disparity_cfg["row_disparity"]) + return left - data = np.array( - [[8, 5, 6, 7], [4, 1, 2, 3], [12, 9, 10, 11]], - dtype=np.float64, - ) - mask = np.array(([0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]), dtype=np.int16) - right = xr.Dataset( - {"im": (["row", "col"], data), "msk": (["row", "col"], mask)}, - coords={"row": np.arange(data.shape[0]), "col": np.arange(data.shape[1])}, - ) - right.attrs = { - "no_data_img": -9999, - "valid_pixels": 0, - "no_data_mask": 1, - "crs": None, - "transform": Affine(1.0, 0.0, 0.0, 0.0, 1.0, 0.0), - } - - ground_truth_row = np.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]) - ground_truth_col = np.array([[1, 1, 1, -3], [1, 1, 1, -3], [1, 1, 1, -3]]) - - # create matching_cost object with measure = ssd, window_size = 1 - cfg_mc = {"pipeline": {"matching_cost": {"matching_cost_method": "ssd", "window_size": 1}}} - matching_cost_matcher = matching_cost.MatchingCost(cfg_mc["pipeline"]["matching_cost"]) - # create disparity object with WTA method - cfg_disp = {"disparity_method": "wta", "invalid_disparity": -5} - disparity_matcher = disparity.Disparity(cfg_disp) - - grid_min_col = np.full((3, 4), -3) - grid_max_col = np.full((3, 4), 3) - grid_min_row = np.full((3, 4), -3) - grid_max_row = np.full((3, 4), 3) - matching_cost_matcher.allocate_cost_volume_pandora( - img_left=left, - img_right=right, - grid_min_col=grid_min_col, - grid_max_col=grid_max_col, - cfg=cfg_mc, - margins=margins, - ) - cvs = matching_cost_matcher.compute_cost_volumes( - left, right, grid_min_col, grid_max_col, grid_min_row, grid_max_row, margins +@pytest.fixture() +def img_right(default_attributs, data_right): + """ + Creates right image fixture + """ + right = xr.Dataset( + {"im": (["row", "col"], data_right)}, + coords={"row": np.arange(data_right.shape[0]), "col": np.arange(data_right.shape[1])}, ) - - delta_x, delta_y, _ = disparity_matcher.compute_disp_maps(cvs) - - np.testing.assert_array_equal(ground_truth_col, delta_x) - np.testing.assert_array_equal(ground_truth_row, delta_y) + right.attrs = default_attributs + return right @pytest.mark.parametrize( @@ -360,58 +238,44 @@ def test_compute_disparity_map_col(margins): None, Margins(0, 0, 0, 0), Margins(1, 0, 1, 0), + Margins(1, 1, 1, 1), + Margins(3, 3, 3, 3), + Margins(1, 2, 3, 4), + ], +) +@pytest.mark.parametrize( + ["data_left", "data_right", "ground_truth_row", "ground_truth_col", "disparity_cfg"], + [ + pytest.param( + np.array(([[5, 6, 7, 8], [1, 2, 3, 4], [9, 10, 11, 12]]), dtype=np.float64), + np.array(([[8, 5, 6, 7], [4, 1, 2, 3], [12, 9, 10, 11]]), dtype=np.float64), + np.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]), + np.array([[1, 1, 1, -3], [1, 1, 1, -3], [1, 1, 1, -3]]), + {"col_disparity": {"init": 0, "range": 3}, "row_disparity": {"init": 0, "range": 3}}, + id="disparity_map_col", + ), pytest.param( - Margins(3, 3, 3, 3), + np.array(([[9, 10, 11, 12], [5, 6, 7, 8], [1, 2, 3, 4]]), dtype=np.float64), + np.array(([[5, 6, 7, 8], [1, 2, 3, 4], [9, 10, 11, 12]]), dtype=np.float64), + np.array([[2, 2, 2, 2], [-1, -1, -1, -1], [-1, -1, -1, -1]]), + np.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]), + {"col_disparity": {"init": 0, "range": 2}, "row_disparity": {"init": 0, "range": 2}}, + id="disparity_map_row", ), pytest.param( - Margins(1, 2, 3, 4), + np.array(([[9, 10, 11, 12], [5, 6, 7, 8], [1, 2, 3, 4]]), dtype=np.float64), + np.array(([[8, 5, 6, 7], [4, 1, 2, 3], [12, 9, 10, 11]]), dtype=np.float64), + np.array([[2, 2, 2, 2], [-1, -1, -1, -1], [-1, -1, -1, -1]]), + np.array([[1, 1, 1, -3], [1, 1, 1, -3], [1, 1, 1, -3]]), + {"col_disparity": {"init": 0, "range": 3}, "row_disparity": {"init": 0, "range": 3}}, + id="disparity_map_col_row", ), ], ) -def test_compute_disparity_map_col_row(margins): +def test_compute_disparity_map(margins, img_left, img_right, ground_truth_row, ground_truth_col): """ Test function for disparity computation """ - data = np.array( - ([[9, 10, 11, 12], [5, 6, 7, 8], [1, 2, 3, 4]]), - dtype=np.float64, - ) - mask = np.array(([0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]), dtype=np.int16) - left = xr.Dataset( - {"im": (["row", "col"], data), "msk": (["row", "col"], mask)}, - coords={"row": np.arange(data.shape[0]), "col": np.arange(data.shape[1])}, - ) - left.attrs = { - "no_data_img": -9999, - "valid_pixels": 0, - "no_data_mask": 1, - "crs": None, - "transform": Affine(1.0, 0.0, 0.0, 0.0, 1.0, 0.0), - "col_disparity_source": [-3, 3], - "row_disparity_source": [-3, 3], - } - - data = np.array( - [[8, 5, 6, 7], [4, 1, 2, 3], [12, 9, 10, 11]], - dtype=np.float64, - ) - mask = np.array(([0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]), dtype=np.int16) - right = xr.Dataset( - {"im": (["row", "col"], data), "msk": (["row", "col"], mask)}, - coords={"row": np.arange(data.shape[0]), "col": np.arange(data.shape[1])}, - ) - right.attrs = { - "no_data_img": -9999, - "valid_pixels": 0, - "no_data_mask": 1, - "crs": None, - "transform": Affine(1.0, 0.0, 0.0, 0.0, 1.0, 0.0), - } - - ground_truth_row = np.array([[2, 2, 2, 2], [-1, -1, -1, -1], [-1, -1, -1, -1]]) - - ground_truth_col = np.array([[1, 1, 1, -3], [1, 1, 1, -3], [1, 1, 1, -3]]) - # create matching_cost object with measure = ssd, window_size = 1 cfg_mc = {"pipeline": {"matching_cost": {"matching_cost_method": "ssd", "window_size": 1}}} matching_cost_matcher = matching_cost.MatchingCost(cfg_mc["pipeline"]["matching_cost"]) @@ -419,21 +283,13 @@ def test_compute_disparity_map_col_row(margins): cfg_disp = {"disparity_method": "wta", "invalid_disparity": -5} disparity_matcher = disparity.Disparity(cfg_disp) - grid_min_col = np.full((3, 4), -3) - grid_max_col = np.full((3, 4), 3) - grid_min_row = np.full((3, 4), -3) - grid_max_row = np.full((3, 4), 3) matching_cost_matcher.allocate_cost_volume_pandora( - img_left=left, - img_right=right, - grid_min_col=grid_min_col, - grid_max_col=grid_max_col, + img_left=img_left, + img_right=img_right, cfg=cfg_mc, margins=margins, ) - cvs = matching_cost_matcher.compute_cost_volumes( - left, right, grid_min_col, grid_max_col, grid_min_row, grid_max_row, margins - ) + cvs = matching_cost_matcher.compute_cost_volumes(img_left, img_right, margins) delta_x, delta_y, _ = disparity_matcher.compute_disp_maps(cvs) diff --git a/tests/unit_tests/test_estimation.py b/tests/unit_tests/test_estimation.py index 19cb2cd..2c82175 100644 --- a/tests/unit_tests/test_estimation.py +++ b/tests/unit_tests/test_estimation.py @@ -85,7 +85,9 @@ def estimation_class(full_configuration): ) def test_false_check_conf(estimation_method, range_col, range_row, sample_factor, error): """ - test check_conf of estimation with wrongs pipelines + Description : test check_conf of estimation with wrongs pipelines + Data : + Requirement : EX_CONF_08 """ with pytest.raises(error): @@ -122,7 +124,11 @@ def test_check_conf(): ], ) def test_default_parameters_values(full_configuration, parameter, expected_value): - """Test default values are the expected ones.""" + """ + Description : Test default values are the expected ones. + Data : + Requirement : EX_CONF_04 + """ del full_configuration[parameter] result = estimation.AbstractEstimation(full_configuration) # type: ignore[abstract] @@ -136,14 +142,14 @@ def test_update_cfg_with_estimation(estimation_class): """ gt_cfg = { - "input": {"col_disparity": [-2, 2], "row_disparity": [-2, 2]}, + "input": {"col_disparity": {"init": 1, "range": 2}, "row_disparity": {"init": 1, "range": 2}}, "pipeline": {"estimation": {"estimated_shifts": [-0.5, 1.3], "error": [1.0], "phase_diff": [1.0]}}, } cfg = estimation_class.update_cfg_with_estimation( {"input": {}, "pipeline": {"estimation": {}}}, - [-2, 2], - [-2, 2], + {"init": 1, "range": 2}, + {"init": 1, "range": 2}, -np.array([0.5, -1.3]), {"error": np.array([1.0]), "phase_diff": np.array([1.0])}, ) @@ -162,8 +168,8 @@ def test_estimation_computation(left_stereo_object, right_stereo_object, estimat row_disparity, col_disparity, shifts, extra_dict = estimation_.compute_estimation(left, right) - assert col_disparity == [-5, 5] - assert row_disparity == [-6, 4] + assert col_disparity == {"init": 0, "range": 5} + assert row_disparity == {"init": -1, "range": 5} assert np.array_equal(shifts, [-0.8, 0]) assert extra_dict["error"] == 0.9999999999855407 assert extra_dict["phase_diff"] == "1.06382330e-18" diff --git a/tests/unit_tests/test_img_tools/test_create_datasets_from_input.py b/tests/unit_tests/test_img_tools/test_create_datasets_from_input.py index 62e7952..6e758ed 100644 --- a/tests/unit_tests/test_img_tools/test_create_datasets_from_input.py +++ b/tests/unit_tests/test_img_tools/test_create_datasets_from_input.py @@ -33,51 +33,58 @@ from pandora2d import img_tools -def _make_input_section(left_img_path, right_img_path): - """This is not a fixture because we want to use it with different scopes.""" - return { - "left": { - "img": left_img_path, - "nodata": -9999, - }, - "right": { - "img": right_img_path, - "nodata": -9999, - }, - "col_disparity": [-2, 2], - "row_disparity": [-3, 4], - } - - -@pytest.fixture() -def input_section(left_img_path, right_img_path): - return _make_input_section(left_img_path, right_img_path) - - class TestReturnedValue: """Test expected properties of returned value of create_datasets_from_inputs.""" @pytest.fixture() - def result(self, left_img_path, right_img_path): - return img_tools.create_datasets_from_inputs(_make_input_section(left_img_path, right_img_path)) + def result(self, make_input_cfg): + return img_tools.create_datasets_from_inputs(make_input_cfg) - def test_use_function_from_pandora(self, mocker, input_section): + @pytest.mark.parametrize( + ["make_input_cfg"], + [ + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "correct_grid"}, + id="Correct disparity grids", + ), + pytest.param( + {"row_disparity": "constant_initial_disparity", "col_disparity": "second_constant_initial_disparity"}, + id="Correct disparity dictionaries", + ), + ], + indirect=["make_input_cfg"], + ) + def test_use_function_from_pandora(self, mocker, make_input_cfg): """Test we use `create_dataset_from_inputs` from pandora. We assume this function is well tested in Pandora and that we just need to test that we use it. """ pandora_function = mocker.patch.object(img_tools.pandora_img_tools, "create_dataset_from_inputs") - img_tools.create_datasets_from_inputs(input_section) + img_tools.create_datasets_from_inputs(make_input_cfg) pandora_function.assert_has_calls( [ - mocker.call(input_section["left"], None), - mocker.call(input_section["right"], None), + mocker.call(make_input_cfg["left"], None), + mocker.call(make_input_cfg["right"], None), ], any_order=True, ) + @pytest.mark.parametrize( + ["make_input_cfg"], + [ + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "correct_grid"}, + id="Correct disparity grids", + ), + pytest.param( + {"row_disparity": "constant_initial_disparity", "col_disparity": "second_constant_initial_disparity"}, + id="Correct disparity dictionaries", + ), + ], + indirect=["make_input_cfg"], + ) def test_returns_left_and_right_datasets(self, result, left_img_path, right_img_path): """Test left and right datasets are returned as namedtuple.""" assert len(result) == 2 @@ -91,32 +98,318 @@ def test_returns_left_and_right_datasets(self, result, left_img_path, right_img_ pandora.img_tools.rasterio_open(right_img_path).read(1, out_dtype=np.float32), ) + @pytest.mark.parametrize( + ["make_input_cfg"], + [ + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "correct_grid"}, + id="Correct disparity grids", + ), + pytest.param( + {"row_disparity": "constant_initial_disparity", "col_disparity": "second_constant_initial_disparity"}, + id="Correct disparity dictionaries", + ), + ], + indirect=["make_input_cfg"], + ) def test_disp_band_coordinates(self, result): """Test disp_band coordinates is present.""" np.testing.assert_equal(result.left.coords["band_disp"].data, ["min", "max"]) - def test_disparity_source(self, result): + @pytest.mark.parametrize( + [ + "make_input_cfg", + "left_col_disparity_source", + "left_row_disparity_source", + "right_col_disparity_source", + "right_row_disparity_source", + ], + [ + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "second_correct_grid"}, + [-26, 10], + [-5, 8], + [-10, 26], + [-8, 5], + id="Correct disparity grids", + ), + pytest.param( + {"row_disparity": "constant_initial_disparity", "col_disparity": "second_constant_initial_disparity"}, + [-2, 2], + [-2, 4], + [-2, 2], + [-4, 2], + id="Correct disparity dictionaries", + ), + ], + indirect=["make_input_cfg"], + ) + def test_disparity_source( + self, + result, + left_col_disparity_source, + left_row_disparity_source, + right_col_disparity_source, + right_row_disparity_source, + ): """Test.""" - assert result.left.attrs["col_disparity_source"] == [-2, 2] - assert result.left.attrs["row_disparity_source"] == [-3, 4] - assert result.right.attrs["col_disparity_source"] == [-2, 2] - assert result.right.attrs["row_disparity_source"] == [-4, 3] - def test_resulting_disparity_grids(self, result): + assert result.left.attrs["col_disparity_source"] == left_col_disparity_source + assert result.left.attrs["row_disparity_source"] == left_row_disparity_source + assert result.right.attrs["col_disparity_source"] == right_col_disparity_source + assert result.right.attrs["row_disparity_source"] == right_row_disparity_source + + @pytest.mark.parametrize( + [ + "make_input_cfg", + "expected_left_col_disparity", + "expected_left_row_disparity", + "expected_right_col_disparity", + "expected_right_row_disparity", + ], + [ + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "second_correct_grid"}, + # Array of size 2x375x450 with alternating cols of 0, -26 and -6 + # for array[0,::] and alternating cols of 10, -16 and 4 for array[1,::] + np.array( + [ + np.tile([[0, -26, -6]], (375, 450 // 3 + 1))[:, :450], + np.tile([[10, -16, 4]], (375, 450 // 3 + 1))[:, :450], + ] + ), + # Array of size 2x375x450 with alternating rows of -3, -5 and -2 + # for array[0,::] and alternating rows of 7, 5 and 8 for array[1,::] + np.array( + [ + np.tile([[-3], [-5], [-2]], (375 // 3 + 1, 450))[:375, :], + np.tile([[7], [5], [8]], (375 // 3 + 1, 450))[:375, :], + ] + ), + # Array of size 2x375x450 with alternating cols of -10, 16 and -4 + # for array[0,::] and alternating cols of 0, 26 and 6 for array[1,::] + np.array( + [ + np.tile([[-10, 16, -4]], (375, 450 // 3 + 1))[:, :450], + np.tile([[0, 26, 6]], (375, 450 // 3 + 1))[:, :450], + ] + ), + # Array of size 2x375x450 with alternating rows of -7, -5 and -8 + # for array[0,::] and alternating rows of 3, 5 and 2 for array[1,::] + np.array( + [ + np.tile([[-7], [-5], [-8]], (375 // 3 + 1, 450))[:375, :], + np.tile([[3], [5], [2]], (375 // 3 + 1, 450))[:375, :], + ] + ), + id="Correct disparity grids", + ), + pytest.param( + {"row_disparity": "constant_initial_disparity", "col_disparity": "second_constant_initial_disparity"}, + np.array([np.full((375, 450), -2), np.full((375, 450), 2)]), + np.array([np.full((375, 450), -2), np.full((375, 450), 4)]), + np.array([np.full((375, 450), -2), np.full((375, 450), 2)]), + np.array([np.full((375, 450), -4), np.full((375, 450), 2)]), + id="Correct disparity dictionaries", + ), + ], + indirect=["make_input_cfg"], + ) + def test_resulting_disparity_grids( + self, + result, + expected_left_col_disparity, + expected_left_row_disparity, + expected_right_col_disparity, + expected_right_row_disparity, + ): """ - Test the method create_dataset_from_inputs with the disparity + Test the method create_dataset_from_inputs with dictionary and grid disparity """ - expected_left_col_disparity = np.array([np.full((375, 450), -2), np.full((375, 450), 2)]) - expected_left_row_disparity = np.array([np.full((375, 450), -3), np.full((375, 450), 4)]) - expected_right_col_disparity = np.array([np.full((375, 450), -2), np.full((375, 450), 2)]) - expected_right_row_disparity = np.array([np.full((375, 450), -4), np.full((375, 450), 3)]) np.testing.assert_array_equal(result.left["col_disparity"], expected_left_col_disparity) np.testing.assert_array_equal(result.left["row_disparity"], expected_left_row_disparity) np.testing.assert_array_equal(result.right["col_disparity"], expected_right_col_disparity) np.testing.assert_array_equal(result.right["row_disparity"], expected_right_row_disparity) + @pytest.mark.parametrize( + [ + "make_input_cfg", + "expected_left_col_disparity", + "expected_left_row_disparity", + "expected_right_col_disparity", + "expected_right_row_disparity", + "roi", + ], + [ + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "second_correct_grid"}, + # Array of size 2x96x97 with alternating cols of 0, -26 and -6 + # for array[0,::] and alternating cols of 10, -16 and 4 for array[1,::] + np.array( + [ + np.tile([[0, -26, -6]], (375, 450 // 3 + 1))[7:103, 8:105], + np.tile([[10, -16, 4]], (375, 450 // 3 + 1))[7:103, 8:105], + ] + ), + # Array of size 2x96x97 with alternating rows of -3, -5 and -2 + # for array[0,::] and alternating rows of 7, 5 and 8 for array[1,::] + np.array( + [ + np.tile([[-3], [-5], [-2]], (375 // 3 + 1, 450))[7:103, 8:105], + np.tile([[7], [5], [8]], (375 // 3 + 1, 450))[7:103, 8:105], + ] + ), + # Array of size 2x96x97 with alternating cols of -10, 16 and -4 + # for array[0,::] and alternating cols of 0, 26 and 6 for array[1,::] + np.array( + [ + np.tile([[-10, 16, -4]], (375, 450 // 3 + 1))[7:103, 8:105], + np.tile([[0, 26, 6]], (375, 450 // 3 + 1))[7:103, 8:105], + ] + ), + # Array of size 2x96x97 with alternating rows of -7, -5 and -8 + # for array[0,::] and alternating rows of 3, 5 and 2 for array[1,::] + np.array( + [ + np.tile([[-7], [-5], [-8]], (375 // 3 + 1, 450))[7:103, 8:105], + np.tile([[3], [5], [2]], (375 // 3 + 1, 450))[7:103, 8:105], + ] + ), + # ROI + {"col": {"first": 10, "last": 100}, "row": {"first": 10, "last": 100}, "margins": (2, 3, 4, 2)}, + id="Disparity grids with centered ROI", + ), + pytest.param( + {"row_disparity": "constant_initial_disparity", "col_disparity": "second_constant_initial_disparity"}, + np.array([np.full((96, 97), -2), np.full((96, 97), 2)]), + np.array([np.full((96, 97), -2), np.full((96, 97), 4)]), + np.array([np.full((96, 97), -2), np.full((96, 97), 2)]), + np.array([np.full((96, 97), -4), np.full((96, 97), 2)]), + {"col": {"first": 10, "last": 100}, "row": {"first": 10, "last": 100}, "margins": (2, 3, 4, 2)}, + id="Disparity dictionaries with centered ROI", + ), + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "second_correct_grid"}, + # Array of size 2x96x102 with alternating cols of 0, -26 and -6 + # for array[0,::] and alternating cols of 10, -16 and 4 for array[1,::] + np.array( + [ + np.tile([[0, -26, -6]], (375, 450 // 3 + 1))[7:103, 348:450], + np.tile([[10, -16, 4]], (375, 450 // 3 + 1))[7:103, 348:450], + ] + ), + # Array of size 2x96x102 with alternating rows of -3, -5 and -2 + # for array[0,::] and alternating rows of 7, 5 and 8 for array[1,::] + np.array( + [ + np.tile([[-3], [-5], [-2]], (375 // 3 + 1, 450))[7:103, 348:450], + np.tile([[7], [5], [8]], (375 // 3 + 1, 450))[7:103, 348:450], + ] + ), + # Array of size 2x96x102 with alternating cols of -10, 16 and -4 + # for array[0,::] and alternating cols of 0, 26 and 6 for array[1,::] + np.array( + [ + np.tile([[-10, 16, -4]], (375, 450 // 3 + 1))[7:103, 348:450], + np.tile([[0, 26, 6]], (375, 450 // 3 + 1))[7:103, 348:450], + ] + ), + # Array of size 2x96x102 with alternating rows of -7, -5 and -8 + # for array[0,::] and alternating rows of 3, 5 and 2 for array[1,::] + np.array( + [ + np.tile([[-7], [-5], [-8]], (375 // 3 + 1, 450))[7:103, 348:450], + np.tile([[3], [5], [2]], (375 // 3 + 1, 450))[7:103, 348:450], + ] + ), + # ROI + {"col": {"first": 350, "last": 460}, "row": {"first": 10, "last": 100}, "margins": (2, 3, 4, 2)}, + id="Disparity grids with right overlapping ROI", + ), + pytest.param( + {"row_disparity": "constant_initial_disparity", "col_disparity": "second_constant_initial_disparity"}, + np.array([np.full((96, 102), -2), np.full((96, 102), 2)]), + np.array([np.full((96, 102), -2), np.full((96, 102), 4)]), + np.array([np.full((96, 102), -2), np.full((96, 102), 2)]), + np.array([np.full((96, 102), -4), np.full((96, 102), 2)]), + {"col": {"first": 350, "last": 460}, "row": {"first": 10, "last": 100}, "margins": (2, 3, 4, 2)}, + id="Disparity dictionaries with right overlapping ROI", + ), + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "second_correct_grid"}, + # Array of size 2x103x97 with alternating cols of 0, -26 and -6 + # for array[0,::] and alternating cols of 10, -16 and 4 for array[1,::] + np.array( + [ + np.tile([[0, -26, -6]], (375, 450 // 3 + 1))[0:103, 8:105], + np.tile([[10, -16, 4]], (375, 450 // 3 + 1))[0:103, 8:105], + ] + ), + # Array of size 2x103x97 with alternating rows of -3, -5 and -2 + # for array[0,::] and alternating rows of 7, 5 and 8 for array[1,::] + np.array( + [ + np.tile([[-3], [-5], [-2]], (375 // 3 + 1, 450))[0:103, 8:105], + np.tile([[7], [5], [8]], (375 // 3 + 1, 450))[0:103, 8:105], + ] + ), + # Array of size 2x103x97 with alternating cols of -10, 16 and -4 + # for array[0,::] and alternating cols of 0, 26 and 6 for array[1,::] + np.array( + [ + np.tile([[-10, 16, -4]], (375, 450 // 3 + 1))[0:103, 8:105], + np.tile([[0, 26, 6]], (375, 450 // 3 + 1))[0:103, 8:105], + ] + ), + # Array of size 2x103x97 with alternating rows of -7, -5 and -8 + # for array[0,::] and alternating rows of 3, 5 and 2 for array[1,::] + np.array( + [ + np.tile([[-7], [-5], [-8]], (375 // 3 + 1, 450))[0:103, 8:105], + np.tile([[3], [5], [2]], (375 // 3 + 1, 450))[0:103, 8:105], + ] + ), + # ROI + {"col": {"first": 10, "last": 100}, "row": {"first": 0, "last": 100}, "margins": (2, 3, 4, 2)}, + id="Disparity grids with top overlapping ROI", + ), + pytest.param( + {"row_disparity": "constant_initial_disparity", "col_disparity": "second_constant_initial_disparity"}, + np.array([np.full((103, 97), -2), np.full((103, 97), 2)]), + np.array([np.full((103, 97), -2), np.full((103, 97), 4)]), + np.array([np.full((103, 97), -2), np.full((103, 97), 2)]), + np.array([np.full((103, 97), -4), np.full((103, 97), 2)]), + {"col": {"first": 10, "last": 100}, "row": {"first": 0, "last": 100}, "margins": (2, 3, 4, 2)}, + id="Disparity dictionaries with top overlapping ROI", + ), + ], + indirect=["make_input_cfg"], + ) + def test_resulting_disparity_grids_with_roi( + self, + make_input_cfg, + expected_left_col_disparity, + expected_left_row_disparity, + expected_right_col_disparity, + expected_right_row_disparity, + roi, + ): + """ + Test the method create_dataset_from_inputs with dictionary and grid disparity with ROI + + """ + + make_input_cfg["ROI"] = roi + + datasets = img_tools.create_datasets_from_inputs(make_input_cfg, roi=roi) + + np.testing.assert_array_equal(datasets.left["col_disparity"], expected_left_col_disparity) + np.testing.assert_array_equal(datasets.left["row_disparity"], expected_left_row_disparity) + np.testing.assert_array_equal(datasets.right["col_disparity"], expected_right_col_disparity) + np.testing.assert_array_equal(datasets.right["row_disparity"], expected_right_row_disparity) + class TestDisparityChecking: """Test checks done on disparities.""" @@ -131,53 +424,141 @@ class TestDisparityChecking: ), ], ) - def test_fails_when_disparity_is_missing(self, input_section, missing, message): - """Test when disparity is not provided.""" + def test_fails_when_disparity_is_missing(self, correct_input_cfg, missing, message): + """ + Description : Test when disparity is not provided. + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_08 + """ for key in missing: - del input_section[key] + del correct_input_cfg["input"][key] with pytest.raises(KeyError) as exc_info: - img_tools.create_datasets_from_inputs(input_section) + img_tools.create_datasets_from_inputs(correct_input_cfg["input"]) assert exc_info.value.args[0] == message - @pytest.mark.parametrize("disparity", [None, 1, 3.14, "grid_path"]) + @pytest.mark.parametrize("disparity", [None, 1, 3.14, [-2, 2]]) @pytest.mark.parametrize("disparity_key", ["col_disparity", "row_disparity"]) - def test_fails_when_disparities_are_not_lists_or_tuples(self, input_section, disparity_key, disparity): - """Test.""" - input_section[disparity_key] = disparity + def test_fails_when_disparities_have_wrong_type(self, correct_input_cfg, disparity_key, disparity): + """ + Description : Test if disparities are not dictionaries or grid in the input section + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_08 + """ + correct_input_cfg["input"][disparity_key] = disparity with pytest.raises(ValueError) as exc_info: - img_tools.create_datasets_from_inputs(input_section) - assert exc_info.value.args[0] == "Disparity should be iterable of length 2" + img_tools.create_datasets_from_inputs(correct_input_cfg["input"]) + assert exc_info.value.args[0] == "Disparity should be a dictionary" - @pytest.mark.parametrize("disparity", [None, np.nan, np.inf, float("nan"), float("inf")]) + @pytest.mark.parametrize("disparity", [{"wrong_init": 2, "range": 2}]) @pytest.mark.parametrize("disparity_key", ["col_disparity", "row_disparity"]) - def test_fails_with_bad_disparity_values(self, input_section, disparity_key, disparity): - """Test.""" - input_section[disparity_key] = disparity + def test_fails_when_dict_has_wrong_keys(self, correct_input_cfg, disparity_key, disparity): + """ + Description : Test dict with wrong keys + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_08 + """ + correct_input_cfg["input"][disparity_key] = disparity with pytest.raises(ValueError) as exc_info: - img_tools.create_datasets_from_inputs(input_section) - assert exc_info.value.args[0] == "Disparity should be iterable of length 2" + img_tools.create_datasets_from_inputs(correct_input_cfg["input"]) + + assert exc_info.value.args[0] == "Disparity dictionary should contains keys : init and range" + @pytest.mark.parametrize("disparity", [{"init": 2.0, "range": 2}]) @pytest.mark.parametrize("disparity_key", ["col_disparity", "row_disparity"]) - def test_fails_when_disparity_max_lt_disparity_min(self, input_section, disparity_key): - """Test.""" - input_section[disparity_key] = [8, -10] + def test_fails_when_init_is_a_float(self, correct_input_cfg, disparity_key, disparity): + """ + Description : Test if init is a float + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_08 + """ + correct_input_cfg["input"][disparity_key] = disparity + with pytest.raises(ValueError) as exc_info: - img_tools.create_datasets_from_inputs(input_section) - assert exc_info.value.args[0] == "Min disparity (8) should be lower than Max disparity (-10)" + img_tools.create_datasets_from_inputs(correct_input_cfg["input"]) + + assert exc_info.value.args[0] == "Disparity init should be an integer or a path to a grid" + + @pytest.mark.parametrize("disparity", [{"init": 2, "range": -2}]) + @pytest.mark.parametrize("disparity_key", ["col_disparity", "row_disparity"]) + def test_fails_when_range_is_lt_0(self, correct_input_cfg, disparity_key, disparity): + """ + Description : Test if range is lower than 0 + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_08 + """ + correct_input_cfg["input"][disparity_key] = disparity + + with pytest.raises(ValueError) as exc_info: + img_tools.create_datasets_from_inputs(correct_input_cfg["input"]) + + assert exc_info.value.args[0] == "Disparity range should be an integer greater or equal to 0" + + @pytest.mark.parametrize("disparity", [None, np.nan, np.inf, float("nan"), float("inf")]) + @pytest.mark.parametrize("disparity_key", ["col_disparity", "row_disparity"]) + def test_fails_with_bad_disparity_values(self, correct_input_cfg, disparity_key, disparity): + """ + Description : Test if the disparity is a dictionary + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + Requirement : EX_CONF_08 + """ + correct_input_cfg["input"][disparity_key] = disparity + + with pytest.raises(ValueError) as exc_info: + img_tools.create_datasets_from_inputs(correct_input_cfg["input"]) + assert exc_info.value.args[0] == "Disparity should be a dictionary" + + @pytest.mark.parametrize( + ["make_input_cfg"], + [ + pytest.param( + {"row_disparity": "negative_exploration_grid", "col_disparity": "correct_grid"}, + id="Negative exploration grid for row disparity", + ), + pytest.param( + {"row_disparity": "correct_grid", "col_disparity": "negative_exploration_grid"}, + id="Negative exploration grid for col disparity", + ), + ], + indirect=["make_input_cfg"], + ) + def test_fails_when_range_band_is_lt_0(self, make_input_cfg): + """ + Description : Test if range band contains values lower than 0 + Data : + - Left image : cones/monoband/left.png + - Right image : cones/monoband/right.png + """ + + with pytest.raises(ValueError) as exc_info: + img_tools.create_datasets_from_inputs(make_input_cfg) + + assert exc_info.value.args[0] == "Disparity range should be an integer greater or equal to 0" - def test_create_dataset_from_inputs_with_estimation_step(self, input_section): + def test_create_dataset_from_inputs_with_estimation_step(self, correct_input_cfg): """ test dataset_from_inputs with an estimation step and no disparity range """ - configuration_with_estimation = {"input": input_section} + configuration_with_estimation = correct_input_cfg del configuration_with_estimation["input"]["row_disparity"] del configuration_with_estimation["input"]["col_disparity"] configuration_with_estimation["pipeline"] = {"estimation": {"estimation_method": "phase_cross_correlation"}} result = img_tools.create_datasets_from_inputs( - input_section, estimation_cfg=configuration_with_estimation["pipeline"].get("estimation") + correct_input_cfg["input"], estimation_cfg=configuration_with_estimation["pipeline"].get("estimation") ) assert result.left.attrs["col_disparity_source"] == [-9999, -9999] diff --git a/tests/unit_tests/test_img_tools/test_get_roi_processing.py b/tests/unit_tests/test_img_tools/test_get_roi_processing.py index 4bd11e3..c3e2d95 100644 --- a/tests/unit_tests/test_img_tools/test_get_roi_processing.py +++ b/tests/unit_tests/test_img_tools/test_get_roi_processing.py @@ -25,6 +25,7 @@ # pylint: disable=redefined-outer-name import pytest +import numpy as np from pandora2d import img_tools @@ -44,12 +45,30 @@ def default_roi(): @pytest.mark.parametrize( ["col_disparity", "row_disparity", "expected"], [ - pytest.param([-60, 0], [0, 2], (60, 2, 2, 2), id="Negative disparity for columns"), - pytest.param([0, 2], [-60, 0], (2, 60, 2, 2), id="Negative disparity for rows"), - pytest.param([-60, 0], [-60, 0], (60, 60, 2, 2), id="Negative disparity for columns and rows"), - pytest.param([0, 60], [0, 60], (2, 2, 60, 60), id="Positive disparity for columns and rows"), - pytest.param([-1, 1], [-1, 1], (2, 2, 2, 2), id="Margins greater than disparities"), - pytest.param([-3, 3], [-3, 3], (3, 3, 3, 3), id="Margins lower than disparities"), + pytest.param( + {"init": -30, "range": 30}, {"init": 1, "range": 1}, (60, 2, 2, 2), id="Negative disparity for columns" + ), + pytest.param( + {"init": 1, "range": 1}, {"init": -30, "range": 30}, (2, 60, 2, 2), id="Negative disparity for rows" + ), + pytest.param( + {"init": -30, "range": 30}, + {"init": -30, "range": 30}, + (60, 60, 2, 2), + id="Negative disparity for columns and rows", + ), + pytest.param( + {"init": 30, "range": 30}, + {"init": 30, "range": 30}, + (2, 2, 60, 60), + id="Positive disparity for columns and rows", + ), + pytest.param( + {"init": 0, "range": 1}, {"init": 0, "range": 1}, (2, 2, 2, 2), id="Margins greater than disparities" + ), + pytest.param( + {"init": 0, "range": 3}, {"init": 0, "range": 3}, (3, 3, 3, 3), id="Margins lower than disparities" + ), ], ) def test_roi_with_negative_and_positive_disparities(default_roi, col_disparity, row_disparity, expected): @@ -60,3 +79,67 @@ def test_roi_with_negative_and_positive_disparities(default_roi, col_disparity, assert test_roi_column["margins"] == expected assert test_roi_column == default_roi + + +@pytest.fixture +def positive_grid(left_img_shape, create_disparity_grid_fixture): + """Create a positive disparity grid and save it in tmp""" + + height, width = left_img_shape + + # Array of size (height, width) with alternating rows of 6 and 8 + init_band = np.tile([[6], [8]], (height // 2 + 1, width))[:height, :] + + return create_disparity_grid_fixture(init_band, 2, "postive_disparity.tif") + + +@pytest.fixture +def negative_grid(left_img_shape, create_disparity_grid_fixture): + """Create a negative disparity grid and save it in tmp""" + + height, width = left_img_shape + + # Array of size (height, width) with alternating rows of -5 and -7 + init_band = np.tile([[-5], [-7]], (height // 2 + 1, width))[:height, :] + + return create_disparity_grid_fixture(init_band, 2, "negative_disparity.tif") + + +@pytest.fixture +def lower_than_margins_grid(left_img_shape, create_disparity_grid_fixture): + """ + Create a disparity grid with disparity lower than default_roi margins + and save it in tmp + """ + + height, width = left_img_shape + + init_band = np.full((height, width), 0) + + return create_disparity_grid_fixture(init_band, 1, "lower_than_margins_disparity.tif") + + +@pytest.mark.parametrize( + ["col_disparity", "row_disparity", "expected"], + [ + pytest.param("second_correct_grid", "correct_grid", (26, 5, 10, 8), id="Negative and positive disparities"), + pytest.param("negative_grid", "positive_grid", (9, 4, 3, 10), id="Negative disparities for columns"), + pytest.param("positive_grid", "negative_grid", (4, 9, 10, 3), id="Negative disparities for rows"), + pytest.param( + "lower_than_margins_grid", + "lower_than_margins_grid", + (2, 2, 2, 2), + id="Margins greater than disparities", + ), + ], +) +def test_roi_with_negative_and_positive_disparities_grids(default_roi, col_disparity, row_disparity, expected, request): + """ + Test the get_roi_processing method with grid disparities + """ + test_roi_column = img_tools.get_roi_processing( + default_roi, request.getfixturevalue(col_disparity), request.getfixturevalue(row_disparity) + ) + + assert test_roi_column["margins"] == expected + assert test_roi_column == default_roi diff --git a/tests/unit_tests/test_interpolation_filter/test_bicubic.py b/tests/unit_tests/test_interpolation_filter/test_bicubic.py index 711e360..5f90858 100644 --- a/tests/unit_tests/test_interpolation_filter/test_bicubic.py +++ b/tests/unit_tests/test_interpolation_filter/test_bicubic.py @@ -30,7 +30,7 @@ @pytest.fixture() def filter_instance(): return pandora2d.interpolation_filter.AbstractFilter( # pylint: disable=abstract-class-instantiated - filter_method="bicubic", + cfg={"method": "bicubic"}, ) # type: ignore[abstract] diff --git a/tests/unit_tests/test_interpolation_filter/test_cardinal_sine.py b/tests/unit_tests/test_interpolation_filter/test_cardinal_sine.py new file mode 100644 index 0000000..8020fb3 --- /dev/null +++ b/tests/unit_tests/test_interpolation_filter/test_cardinal_sine.py @@ -0,0 +1,147 @@ +# Copyright (c) 2024 Centre National d'Etudes Spatiales (CNES). +# This file is part of PANDORA2D +# +# https://github.com/CNES/Pandora2D +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Test Cardinal sine filter. +""" + +# Make pylint happy with fixtures: +# pylint: disable=redefined-outer-name, protected-access + +import json_checker +import numpy as np +import pytest + +from pandora.margins import Margins + +import pandora2d.interpolation_filter + + +@pytest.fixture() +def size(): + return 6 + + +@pytest.fixture() +def fractional_shift(): + return 0.25 + + +@pytest.fixture() +def config(size): + return {"method": "sinc", "size": size} + + +@pytest.fixture() +def filter_instance(config, fractional_shift): + return pandora2d.interpolation_filter.AbstractFilter( # pylint: disable=abstract-class-instantiated + cfg=config, + fractional_shift=fractional_shift, + ) # type: ignore[abstract] + + +def test_factory(filter_instance): + assert isinstance(filter_instance, pandora2d.interpolation_filter.cardinal_sine.CardinalSine) + + +class TestCheckConf: + """Test the check_conf method.""" + + def test_method_field(self, config): + """An exception should be raised if `method` is not `sinc`.""" + config["method"] = "invalid_method" + + with pytest.raises(json_checker.core.exceptions.DictCheckerError) as err: + pandora2d.interpolation_filter.cardinal_sine.CardinalSine(config) + assert "invalid_method" in err.value.args[0] + + @pytest.mark.parametrize("size", [5, 22]) + def test_out_of_bound_size_field(self, config): + """An exception should be raised if `size` is not between 6 and 21.""" + with pytest.raises(json_checker.core.exceptions.DictCheckerError) as err: + pandora2d.interpolation_filter.cardinal_sine.CardinalSine(config) + assert "size" in err.value.args[0] + + def test_size_is_optional_and_default_value(self): + """If size is not given into config it should default to 6.""" + config = {"method": "sinc"} + sinc_filter = pandora2d.interpolation_filter.cardinal_sine.CardinalSine(config) + assert sinc_filter._HALF_SIZE == 6 + assert sinc_filter._SIZE == 13 + + +@pytest.mark.parametrize("fractional_shift", [-0.5, 1, 4]) +def test_raise_error_with_invalid_fractional(config, fractional_shift): + """Test an exception is raised if not in range [0,1[.""" + with pytest.raises( + ValueError, + match=f"fractional_shift greater than 0 and lower than 1 expected, got {fractional_shift}", + ): + pandora2d.interpolation_filter.AbstractFilter( # pylint: disable=abstract-class-instantiated + cfg=config, + fractional_shift=fractional_shift, + ) # type: ignore[abstract] + + +@pytest.mark.parametrize("size", [6, 21]) +def test_margins(filter_instance, size): + assert filter_instance.margins == Margins(size, size, size, size) + + +@pytest.fixture() +def reference_implementation(size, subpixel): + """Reference implementation translated from Medicis.""" + sigma = size + nb_of_coeffs_per_precision = 1 + (size * 2) + tab_coeffs = np.zeros([subpixel, nb_of_coeffs_per_precision]) + + aux1 = -2.0 / (sigma * sigma * np.pi) + + for i in range(subpixel): + precision = i / subpixel + + for j in range(nb_of_coeffs_per_precision): + aux = (precision - (j - size)) * np.pi + + if aux == 0: + tab_coeffs[i][j] = 1 + else: + tab_coeffs[i][j] = np.sin(aux) * np.exp(aux1 * aux * aux) / aux + + somme = np.sum(tab_coeffs[i]) + tab_coeffs[i] /= somme + + return tab_coeffs + + +@pytest.mark.parametrize("size", [6, 10, 21]) +@pytest.mark.parametrize("subpixel", [4, 8, 16]) +def test_compute_coefficient_table(reference_implementation, size, subpixel): + """Test values computed against reference implementation.""" + fractional_shifts = np.arange(subpixel) / subpixel + result = pandora2d.interpolation_filter.cardinal_sine.compute_coefficient_table(size, fractional_shifts) + # Do to the use of `np.sinc` there is a little difference in the results so we use almost_equal + np.testing.assert_array_almost_equal(result, reference_implementation) + + +def test_get_coeffs(filter_instance, size): + """Test retrieve good coefficients from computed table.""" + fractional_shifts = np.arange(4) / 4 + coeffs = pandora2d.interpolation_filter.cardinal_sine.compute_coefficient_table(size, fractional_shifts) + + np.testing.assert_array_equal(filter_instance.get_coeffs(0.25), coeffs[1]) + np.testing.assert_array_equal(filter_instance.get_coeffs(0.5), coeffs[2]) + np.testing.assert_array_equal(filter_instance.get_coeffs(0.75), coeffs[3]) diff --git a/tests/unit_tests/test_interpolation_filter/test_interpolation_filter.py b/tests/unit_tests/test_interpolation_filter/test_interpolation_filter.py index a2dc5a6..b565003 100644 --- a/tests/unit_tests/test_interpolation_filter/test_interpolation_filter.py +++ b/tests/unit_tests/test_interpolation_filter/test_interpolation_filter.py @@ -227,7 +227,7 @@ def test_interpolate(filter_method, image, positions_col, positions_row, expecte """ assert ( - AbstractFilter(filter_method).interpolate( # type: ignore[abstract] # pylint: disable=abstract-class-instantiated + AbstractFilter({"method": filter_method}).interpolate( # type: ignore[abstract] # pylint: disable=abstract-class-instantiated request.getfixturevalue(image), (positions_col, positions_row) ) == expected_values diff --git a/tests/unit_tests/test_matching_cost/conftest.py b/tests/unit_tests/test_matching_cost/conftest.py index 7886a77..c7cde2c 100644 --- a/tests/unit_tests/test_matching_cost/conftest.py +++ b/tests/unit_tests/test_matching_cost/conftest.py @@ -33,7 +33,7 @@ from pandora import import_plugin from pandora2d import matching_cost -from pandora2d.img_tools import create_datasets_from_inputs +from pandora2d.img_tools import create_datasets_from_inputs, add_disparity_grid @pytest.fixture() @@ -81,8 +81,8 @@ def input_config(left_image, right_image): "img": right_image, "nodata": -9999, }, - "col_disparity": [0, 1], - "row_disparity": [-1, 1], + "col_disparity": {"init": 1, "range": 1}, + "row_disparity": {"init": 1, "range": 2}, } @@ -143,7 +143,7 @@ def create(data): @pytest.fixture() def left_zncc(create_image): - """Left image for Znnc.""" + """Left image for Zncc.""" data = np.array( [ [1, 1, 1, 1, 1], @@ -159,7 +159,7 @@ def left_zncc(create_image): @pytest.fixture() def right_zncc(create_image): - """Right image for Znnc.""" + """Right image for Zncc.""" data = np.array( ( [ @@ -182,12 +182,12 @@ def null_disparity_grid(): @pytest.fixture() def positive_disparity_grid(): - return np.full((3, 3), 1) + return np.full((3, 3), 2) @pytest.fixture() def negative_disparity_grid(): - return np.full((3, 3), -1) + return np.full((3, 3), -2) class DisparityGrids(NamedTuple): @@ -215,6 +215,10 @@ def data_with_null_disparity(left_zncc, right_zncc, null_disparity_grid): """ Coherent Data for test_step. """ + col_disparity_cfg = {"init": 0, "range": 0} + row_disparity_cfg = {"init": 0, "range": 0} + left_zncc.pipe(add_disparity_grid, col_disparity_cfg, row_disparity_cfg) + disparity_grids = DisparityGrids( col_min=null_disparity_grid, col_max=null_disparity_grid, @@ -231,8 +235,6 @@ def data_with_null_disparity(left_zncc, right_zncc, null_disparity_grid): ], dtype=np.float32, ) - left_zncc.attrs["col_disparity_source"] = [0, 0] - left_zncc.attrs["row_disparity_source"] = [0, 0] return StepData( left=left_zncc, right=right_zncc, full_matching_cost=full_matching_cost, disparity_grids=disparity_grids ) @@ -241,6 +243,10 @@ def data_with_null_disparity(left_zncc, right_zncc, null_disparity_grid): @pytest.fixture() def data_with_positive_disparity_in_col(left_zncc, right_zncc, null_disparity_grid, positive_disparity_grid): """Coherent Data for test_step.""" + col_disparity_cfg = {"init": 1, "range": 1} + row_disparity_cfg = {"init": 0, "range": 0} + left_zncc.pipe(add_disparity_grid, col_disparity_cfg, row_disparity_cfg) + disparity_grids = DisparityGrids( col_min=null_disparity_grid, col_max=positive_disparity_grid, @@ -250,39 +256,43 @@ def data_with_positive_disparity_in_col(left_zncc, right_zncc, null_disparity_gr full_matching_cost = np.array( [ [ - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + ], + [ + [[np.nan], [np.nan], [np.nan]], + [[-0.45], [-0.460179], [-0.46513027]], + [[-0.47058824], [-0.4756515], [np.nan]], + [[-0.48076922], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], ], [ - [[np.nan], [np.nan]], - [[-0.45], [-0.460179]], - [[-0.47058824], [-0.4756515]], - [[-0.48076922], [np.nan]], - [[np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[-0.45], [-0.460179], [-0.46513027]], + [[-0.47058824], [-0.4756515], [np.nan]], + [[-0.48076922], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], ], [ - [[np.nan], [np.nan]], - [[-0.45], [-0.460179]], - [[-0.47058824], [-0.4756515]], - [[-0.48076922], [np.nan]], - [[np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[0.0], [0.0], [0.0]], + [[0.0], [0.0], [np.nan]], + [[0.0], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], ], - [[[np.nan], [np.nan]], [[0.0], [0.0]], [[0.0], [0.0]], [[0.0], [np.nan]], [[np.nan], [np.nan]]], [ - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], ], ], dtype=np.float32, ) - left_zncc.attrs["col_disparity_source"] = [0, 1] - left_zncc.attrs["row_disparity_source"] = [0, 0] return StepData( left=left_zncc, right=right_zncc, full_matching_cost=full_matching_cost, disparity_grids=disparity_grids ) @@ -291,6 +301,10 @@ def data_with_positive_disparity_in_col(left_zncc, right_zncc, null_disparity_gr @pytest.fixture() def data_with_positive_disparity_in_row(left_zncc, right_zncc, null_disparity_grid, positive_disparity_grid): """Coherent Data for test_step.""" + col_disparity_cfg = {"init": 0, "range": 0} + row_disparity_cfg = {"init": 1, "range": 1} + left_zncc.pipe(add_disparity_grid, col_disparity_cfg, row_disparity_cfg) + disparity_grids = DisparityGrids( col_min=null_disparity_grid, col_max=null_disparity_grid, @@ -299,22 +313,44 @@ def data_with_positive_disparity_in_row(left_zncc, right_zncc, null_disparity_gr ) full_matching_cost = np.array( [ - [[[np.nan, np.nan]], [[np.nan, np.nan]], [[np.nan, np.nan]], [[np.nan, np.nan]], [[np.nan, np.nan]]], [ - [[np.nan, np.nan]], - [[-0.45, -0.45]], - [[-0.47058824, -0.47058824]], - [[-0.48076922, -0.48076922]], - [[np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + ], + [ + [[np.nan, np.nan, np.nan]], + [[-0.45, -0.45, 0.0]], + [[-0.47058824, -0.47058824, 0.0]], + [[-0.48076922, -0.48076922, 0.0]], + [[np.nan, np.nan, np.nan]], + ], + [ + [[np.nan, np.nan, np.nan]], + [[-0.45, 0.0, np.nan]], + [[-0.47058824, 0.0, np.nan]], + [[-0.48076922, 0.0, np.nan]], + [[np.nan, np.nan, np.nan]], + ], + [ + [[np.nan, np.nan, np.nan]], + [[0.0, np.nan, np.nan]], + [[0.0, np.nan, np.nan]], + [[0.0, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + ], + [ + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], ], - [[[np.nan, np.nan]], [[-0.45, 0.0]], [[-0.47058824, 0.0]], [[-0.48076922, 0.0]], [[np.nan, np.nan]]], - [[[np.nan, np.nan]], [[0.0, np.nan]], [[0.0, np.nan]], [[0.0, np.nan]], [[np.nan, np.nan]]], - [[[np.nan, np.nan]], [[np.nan, np.nan]], [[np.nan, np.nan]], [[np.nan, np.nan]], [[np.nan, np.nan]]], ], dtype=np.float32, ) - left_zncc.attrs["col_disparity_source"] = [0, 0] - left_zncc.attrs["row_disparity_source"] = [0, 1] return StepData( left=left_zncc, right=right_zncc, full_matching_cost=full_matching_cost, disparity_grids=disparity_grids ) @@ -323,6 +359,10 @@ def data_with_positive_disparity_in_row(left_zncc, right_zncc, null_disparity_gr @pytest.fixture() def data_with_negative_disparity_in_col(left_zncc, right_zncc, null_disparity_grid, negative_disparity_grid): """Coherent Data for test_step.""" + col_disparity_cfg = {"init": -1, "range": 1} + row_disparity_cfg = {"init": 0, "range": 0} + left_zncc.pipe(add_disparity_grid, col_disparity_cfg, row_disparity_cfg) + disparity_grids = DisparityGrids( col_min=negative_disparity_grid, col_max=null_disparity_grid, @@ -332,39 +372,43 @@ def data_with_negative_disparity_in_col(left_zncc, right_zncc, null_disparity_gr full_matching_cost = np.array( [ [ - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], ], [ - [[np.nan], [np.nan]], - [[np.nan], [-0.45]], - [[-0.460179], [-0.47058824]], - [[-0.4756515], [-0.48076922]], - [[np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [-0.45]], + [[np.nan], [-0.460179], [-0.47058824]], + [[-0.46513027], [-0.4756515], [-0.48076922]], + [[np.nan], [np.nan], [np.nan]], ], [ - [[np.nan], [np.nan]], - [[np.nan], [-0.45]], - [[-0.460179], [-0.47058824]], - [[-0.4756515], [-0.48076922]], - [[np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [-0.45]], + [[np.nan], [-0.460179], [-0.47058824]], + [[-0.46513027], [-0.4756515], [-0.48076922]], + [[np.nan], [np.nan], [np.nan]], ], - [[[np.nan], [np.nan]], [[np.nan], [0.0]], [[0.0], [0.0]], [[0.0], [0.0]], [[np.nan], [np.nan]]], [ - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], - [[np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [0.0]], + [[np.nan], [0.0], [0.0]], + [[0.0], [0.0], [0.0]], + [[np.nan], [np.nan], [np.nan]], + ], + [ + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], + [[np.nan], [np.nan], [np.nan]], ], ], dtype=np.float32, ) - left_zncc.attrs["col_disparity_source"] = [-1, 0] - left_zncc.attrs["row_disparity_source"] = [0, 0] return StepData( left=left_zncc, right=right_zncc, full_matching_cost=full_matching_cost, disparity_grids=disparity_grids ) @@ -373,6 +417,10 @@ def data_with_negative_disparity_in_col(left_zncc, right_zncc, null_disparity_gr @pytest.fixture() def data_with_negative_disparity_in_row(left_zncc, right_zncc, null_disparity_grid, negative_disparity_grid): """Coherent Data for test_step.""" + col_disparity_cfg = {"init": 0, "range": 0} + row_disparity_cfg = {"init": -1, "range": 1} + left_zncc.pipe(add_disparity_grid, col_disparity_cfg, row_disparity_cfg) + disparity_grids = DisparityGrids( col_min=null_disparity_grid, col_max=null_disparity_grid, @@ -381,22 +429,44 @@ def data_with_negative_disparity_in_row(left_zncc, right_zncc, null_disparity_gr ) full_matching_cost = np.array( [ - [[[np.nan, np.nan]], [[np.nan, np.nan]], [[np.nan, np.nan]], [[np.nan, np.nan]], [[np.nan, np.nan]]], [ - [[np.nan, np.nan]], - [[np.nan, -0.45]], - [[np.nan, -0.47058824]], - [[np.nan, -0.48076922]], - [[np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + ], + [ + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, -0.45]], + [[np.nan, np.nan, -0.47058824]], + [[np.nan, np.nan, -0.48076922]], + [[np.nan, np.nan, np.nan]], + ], + [ + [[np.nan, np.nan, np.nan]], + [[np.nan, 1.0, -0.45]], + [[np.nan, 1.0, -0.47058824]], + [[np.nan, 1.0, -0.48076922]], + [[np.nan, np.nan, np.nan]], + ], + [ + [[np.nan, np.nan, np.nan]], + [[-0.45, 1.0, 0.0]], + [[-0.47058824, 1.0, 0.0]], + [[-0.48076922, 1.0, 0.0]], + [[np.nan, np.nan, np.nan]], + ], + [ + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan]], ], - [[[np.nan, np.nan]], [[1.0, -0.45]], [[1.0, -0.47058824]], [[1.0, -0.48076922]], [[np.nan, np.nan]]], - [[[np.nan, np.nan]], [[1.0, 0.0]], [[1.0, 0.0]], [[1.0, 0.0]], [[np.nan, np.nan]]], - [[[np.nan, np.nan]], [[np.nan, np.nan]], [[np.nan, np.nan]], [[np.nan, np.nan]], [[np.nan, np.nan]]], ], dtype=np.float32, ) - left_zncc.attrs["col_disparity_source"] = [0, 0] - left_zncc.attrs["row_disparity_source"] = [-1, 0] return StepData( left=left_zncc, right=right_zncc, full_matching_cost=full_matching_cost, disparity_grids=disparity_grids ) @@ -407,6 +477,10 @@ def data_with_disparity_negative_in_row_and_positive_in_col( left_zncc, right_zncc, null_disparity_grid, positive_disparity_grid, negative_disparity_grid ): """Coherent Data for test_step.""" + col_disparity_cfg = {"init": 1, "range": 1} + row_disparity_cfg = {"init": -1, "range": 1} + left_zncc.pipe(add_disparity_grid, col_disparity_cfg, row_disparity_cfg) + disparity_grids = DisparityGrids( col_min=null_disparity_grid, col_max=positive_disparity_grid, @@ -416,45 +490,43 @@ def data_with_disparity_negative_in_row_and_positive_in_col( full_matching_cost = np.array( [ [ - [[np.nan, np.nan], [np.nan, np.nan]], - [[np.nan, np.nan], [np.nan, np.nan]], - [[np.nan, np.nan], [np.nan, np.nan]], - [[np.nan, np.nan], [np.nan, np.nan]], - [[np.nan, np.nan], [np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], ], [ - [[np.nan, np.nan], [np.nan, np.nan]], - [[np.nan, -0.45], [np.nan, -0.460179]], - [[np.nan, -0.47058824], [np.nan, -0.4756515]], - [[np.nan, -0.48076922], [np.nan, np.nan]], - [[np.nan, np.nan], [np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan, -0.45], [np.nan, np.nan, -0.460179], [np.nan, np.nan, -0.46513027]], + [[np.nan, np.nan, -0.47058824], [np.nan, np.nan, -0.4756515], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan, -0.48076922], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], ], [ - [[np.nan, np.nan], [np.nan, np.nan]], - [[1.0, -0.45], [0.99705446, -0.460179]], - [[1.0, -0.47058824], [0.99886817, -0.4756515]], - [[1.0, -0.48076922], [np.nan, np.nan]], - [[np.nan, np.nan], [np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, 1.0, -0.45], [np.nan, 0.99705446, -0.460179], [np.nan, 0.99227786, -0.46513027]], + [[np.nan, 1.0, -0.47058824], [np.nan, 0.99886817, -0.4756515], [np.nan, np.nan, np.nan]], + [[np.nan, 1.0, -0.48076922], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], ], [ - [[np.nan, np.nan], [np.nan, np.nan]], - [[1.0, 0.0], [0.99705446, 0.0]], - [[1.0, 0.0], [0.99886817, 0.0]], - [[1.0, 0.0], [np.nan, np.nan]], - [[np.nan, np.nan], [np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[-0.45, 1.0, 0.0], [-0.460179, 0.99705446, 0.0], [-0.46513027, 0.99227786, 0.0]], + [[-0.47058824, 1.0, 0.0], [-0.4756515, 0.99886817, 0.0], [np.nan, np.nan, np.nan]], + [[-0.48076922, 1.0, 0.0], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], ], [ - [[np.nan, np.nan], [np.nan, np.nan]], - [[np.nan, np.nan], [np.nan, np.nan]], - [[np.nan, np.nan], [np.nan, np.nan]], - [[np.nan, np.nan], [np.nan, np.nan]], - [[np.nan, np.nan], [np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], ], ], dtype=np.float32, ) - left_zncc.attrs["col_disparity_source"] = [0, 1] - left_zncc.attrs["row_disparity_source"] = [-1, 0] return StepData( left=left_zncc, right=right_zncc, full_matching_cost=full_matching_cost, disparity_grids=disparity_grids ) @@ -490,23 +562,10 @@ def cost_volumes(input_config, matching_cost_matcher, configuration): """Create cost_volumes.""" img_left, img_right = create_datasets_from_inputs(input_config, roi=None) - matching_cost_matcher.allocate_cost_volume_pandora( - img_left=img_left, - img_right=img_right, - grid_min_col=np.full((5, 5), 0), - grid_max_col=np.full((5, 5), 1), - cfg=configuration, - ) + matching_cost_matcher.allocate_cost_volume_pandora(img_left=img_left, img_right=img_right, cfg=configuration) # compute cost volumes - return matching_cost_matcher.compute_cost_volumes( - img_left=img_left, - img_right=img_right, - grid_min_col=np.full((5, 5), 0), - grid_max_col=np.full((5, 5), 1), - grid_min_row=np.full((5, 5), -1), - grid_max_row=np.full((5, 5), 0), - ) + return matching_cost_matcher.compute_cost_volumes(img_left=img_left, img_right=img_right) @pytest.fixture() diff --git a/tests/unit_tests/test_matching_cost/test_matching_cost_allocate.py b/tests/unit_tests/test_matching_cost/test_matching_cost_allocate.py index 70a70a4..eb04233 100644 --- a/tests/unit_tests/test_matching_cost/test_matching_cost_allocate.py +++ b/tests/unit_tests/test_matching_cost/test_matching_cost_allocate.py @@ -36,7 +36,7 @@ def test_allocate_cost_volume(left_stereo_object, right_stereo_object): """ # generated data for the test - np_data = np.empty((3, 3, 2, 2)) + np_data = np.empty((3, 3, 3, 3)) np_data.fill(np.nan) c_row = [0, 1, 2] @@ -46,8 +46,8 @@ def test_allocate_cost_volume(left_stereo_object, right_stereo_object): row = np.arange(c_row[0], c_row[-1] + 1) col = np.arange(c_col[0], c_col[-1] + 1) - disparity_range_col = np.arange(-1, 0 + 1) - disparity_range_row = np.arange(-1, 0 + 1) + disparity_range_col = np.arange(0, 2 + 1) + disparity_range_row = np.arange(-2, 0 + 1) # Create the cost volume if np_data is None: @@ -67,8 +67,8 @@ def test_allocate_cost_volume(left_stereo_object, right_stereo_object): cost_volumes_test.attrs["crs"] = None cost_volumes_test.attrs["transform"] = Affine(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) cost_volumes_test.attrs["band_correl"] = None - cost_volumes_test.attrs["col_disparity_source"] = [0, 1] - cost_volumes_test.attrs["row_disparity_source"] = [-1, 0] + cost_volumes_test.attrs["col_disparity_source"] = [0, 2] + cost_volumes_test.attrs["row_disparity_source"] = [-2, 0] cost_volumes_test.attrs["no_data_img"] = -9999 cost_volumes_test.attrs["no_data_mask"] = 1 cost_volumes_test.attrs["valid_pixels"] = 0 @@ -80,19 +80,10 @@ def test_allocate_cost_volume(left_stereo_object, right_stereo_object): matching_cost_matcher = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) matching_cost_matcher.allocate_cost_volume_pandora( - img_left=left_stereo_object, - img_right=right_stereo_object, - grid_min_col=np.full((3, 3), -1), - grid_max_col=np.full((3, 3), 0), - cfg=cfg, + img_left=left_stereo_object, img_right=right_stereo_object, cfg=cfg ) cost_volumes_fun = matching_cost_matcher.compute_cost_volumes( - img_left=left_stereo_object, - img_right=right_stereo_object, - grid_min_col=np.full((3, 3), -1), - grid_max_col=np.full((3, 3), 0), - grid_min_row=np.full((3, 3), -1), - grid_max_row=np.full((3, 3), 0), + img_left=left_stereo_object, img_right=right_stereo_object ) # check that the generated xarray dataset is equal to the ground truth diff --git a/tests/unit_tests/test_matching_cost/test_matching_cost_check_conf.py b/tests/unit_tests/test_matching_cost/test_matching_cost_check_conf.py index ed3f074..3851027 100644 --- a/tests/unit_tests/test_matching_cost/test_matching_cost_check_conf.py +++ b/tests/unit_tests/test_matching_cost/test_matching_cost_check_conf.py @@ -35,19 +35,28 @@ def test_check_conf(): """ - test check_conf of matching cost pipeline + Description : test check_conf of matching cost pipeline + Data : + Requirement : EX_MC_ZNCC_00 """ matching_cost.MatchingCost({"matching_cost_method": "zncc", "window_size": 5}) def test_invalid_method(): - """census is not expected to be used with pandora2d.""" + """ + Description : census is not expected to be used with pandora2d. + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(json_checker.core.exceptions.DictCheckerError): matching_cost.MatchingCost({"matching_cost_method": "census", "window_size": 5}) class TestWindowSize: - """Test window_size parameter values.""" + """ + Description : Test window_size parameter values. + Requirement : EX_CONF_04, EX_MC_00 + """ @pytest.mark.parametrize("method", ["zncc", "sad", "ssd"]) def test_default_window_size(self, method): @@ -57,6 +66,11 @@ def test_default_window_size(self, method): @pytest.mark.parametrize("method", ["zncc", "sad", "ssd"]) def test_fails_with_invalid_window_size(self, method): + """ + Description : Test the validity of the window_size parameter + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(json_checker.core.exceptions.DictCheckerError) as err: matching_cost.MatchingCost({"matching_cost_method": method, "window_size": -1}) assert "window_size" in err.value.args[0] @@ -66,7 +80,10 @@ def test_fails_with_invalid_window_size(self, method): @pytest.mark.plugin_tests @pytest.mark.skipif(importlib.util.find_spec("mc_cnn") is None, reason="MCCNN plugin not installed") class TestMCCNNConf: - """Test window_size with MCCNN plugin.""" + """ + Description : Test window_size with MCCNN plugin. + Requirement : EX_CONF_04, EX_MC_00 + """ def test_default_window_size(self): result = matching_cost.MatchingCost({"matching_cost_method": "mc_cnn", "step": [1, 1]}) @@ -80,7 +97,8 @@ def test_fails_with_invalid_window_size(self): class TestStep: """ - Test step in matching_cost configuration + Description : Test step in matching_cost configuration + Requirement : EX_CONF_04, EX_STEP_02, EX_MC_01 """ def test_nominal_case(self): @@ -92,18 +110,38 @@ def test_default_step(self): assert result.cfg["step"] == [1, 1] def test_fails_with_negative_step(self): + """ + Description : Test if the step is negative + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(json_checker.core.exceptions.DictCheckerError): matching_cost.MatchingCost({"matching_cost_method": "zncc", "window_size": 5, "step": [-2, 3]}) def test_fails_with_one_element_list(self): + """ + Description : Test fails if the step is a list of one element + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(json_checker.core.exceptions.DictCheckerError): matching_cost.MatchingCost({"matching_cost_method": "zncc", "window_size": 5, "step": [2]}) def test_fails_with_more_than_two_element_list(self): + """ + Description : Test fails if the step is a list of more than 2 elements + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(json_checker.core.exceptions.DictCheckerError): matching_cost.MatchingCost({"matching_cost_method": "zncc", "window_size": 5, "step": [2, 3, 4]}) def test_fails_with_string_element(self): + """ + Description : Test fails if the step list contains a string element + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(json_checker.core.exceptions.DictCheckerError): matching_cost.MatchingCost({"matching_cost_method": "zncc", "window_size": 5, "step": ["2", 3]}) diff --git a/tests/unit_tests/test_matching_cost/test_matching_cost_compute.py b/tests/unit_tests/test_matching_cost/test_matching_cost_compute.py index 18d177f..ddaac2b 100644 --- a/tests/unit_tests/test_matching_cost/test_matching_cost_compute.py +++ b/tests/unit_tests/test_matching_cost/test_matching_cost_compute.py @@ -21,17 +21,19 @@ """ Test compute_cost_volumes method from Matching cost """ + import importlib.util # pylint: disable=redefined-outer-name # pylint: disable=too-many-lines import numpy as np import xarray as xr +from pytest_mock import MockerFixture from rasterio import Affine import pytest from pandora.margins import Margins -from pandora2d import matching_cost +from pandora2d import matching_cost, disparity from pandora2d.img_tools import create_datasets_from_inputs, add_disparity_grid @@ -49,7 +51,17 @@ @pytest.mark.parametrize("col_step", [1, 2, pytest.param(5, id="Step gt image")]) @pytest.mark.parametrize("row_step", [1, 2, pytest.param(5, id="Step gt image")]) def test_steps(request, data_fixture_name, col_step, row_step): - """We expect step to work.""" + """ + Description : We expect step to work. + Data : + - data_with_null_disparity, + - data_with_positive_disparity_in_col, + - data_with_positive_disparity_in_row, + - data_with_negative_disparity_in_col, + - data_with_negative_disparity_in_row, + - data_with_disparity_negative_in_row_and_positive_in_col + Requirement : EX_STEP_00, EX_STEP_01 + """ data = request.getfixturevalue(data_fixture_name) # sum of squared difference images self.left, self.right, window_size=3 @@ -58,22 +70,9 @@ def test_steps(request, data_fixture_name, col_step, row_step): } # initialise matching cost matching_cost_matcher = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) - matching_cost_matcher.allocate_cost_volume_pandora( - img_left=data.left, - img_right=data.right, - grid_min_col=data.disparity_grids.col_min, - grid_max_col=data.disparity_grids.col_max, - cfg=cfg, - ) + matching_cost_matcher.allocate_cost_volume_pandora(img_left=data.left, img_right=data.right, cfg=cfg) # compute cost volumes - zncc = matching_cost_matcher.compute_cost_volumes( - img_left=data.left, - img_right=data.right, - grid_min_col=data.disparity_grids.col_min, - grid_max_col=data.disparity_grids.col_max, - grid_min_row=data.disparity_grids.row_min, - grid_max_row=data.disparity_grids.row_max, - ) + zncc = matching_cost_matcher.compute_cost_volumes(img_left=data.left, img_right=data.right) # indexes are : row, col, disp_x, disp_y np.testing.assert_equal(zncc["cost_volumes"].data, data.full_matching_cost[::row_step, ::col_step, :, :]) @@ -83,6 +82,10 @@ def test_compute_cv_ssd(left_stereo_object, right_stereo_object): """ Test the cost volume product by ssd """ + # update disparity + left_stereo_object["col_disparity"][0, :, :] = np.full((3, 3), -1) + left_stereo_object["col_disparity"][1, :, :] = np.full((3, 3), 0) + left_stereo_object["row_disparity"][0, :, :] = np.full((3, 3), -1) # sum of squared difference images left, right, window_size=1 cfg = {"pipeline": {"matching_cost": {"matching_cost_method": "ssd", "window_size": 1}}} # sum of squared difference ground truth for the images left, right, window_size=1 @@ -112,22 +115,11 @@ def test_compute_cv_ssd(left_stereo_object, right_stereo_object): matching_cost_matcher = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) matching_cost_matcher.allocate_cost_volume_pandora( - img_left=left_stereo_object, - img_right=right_stereo_object, - grid_min_col=np.full((3, 3), -1), - grid_max_col=np.full((3, 3), 0), - cfg=cfg, + img_left=left_stereo_object, img_right=right_stereo_object, cfg=cfg ) # compute cost volumes - ssd = matching_cost_matcher.compute_cost_volumes( - img_left=left_stereo_object, - img_right=right_stereo_object, - grid_min_col=np.full((3, 3), -1), - grid_max_col=np.full((3, 3), 0), - grid_min_row=np.full((3, 3), -1), - grid_max_row=np.full((3, 3), 0), - ) + ssd = matching_cost_matcher.compute_cost_volumes(img_left=left_stereo_object, img_right=right_stereo_object) # check that the generated cost_volumes is equal to ground truth np.testing.assert_allclose(ssd["cost_volumes"].data, ad_ground_truth, atol=1e-06) @@ -156,29 +148,15 @@ def test_compute_cv_mc_cnn(): "no_data_mask": 1, "crs": None, "transform": Affine(1.0, 0.0, 0.0, 0.0, 1.0, 0.0), - "col_disparity_source": [-1, 1], - "row_disparity_source": [-1, 1], } + img.pipe(add_disparity_grid, {"init": 0, "range": 1}, {"init": 0, "range": 1}) matching_cost_matcher = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) - matching_cost_matcher.allocate_cost_volume_pandora( - img_left=img, - img_right=img, - grid_min_col=np.full((100, 100), -1), - grid_max_col=np.full((100, 100), 1), - cfg=cfg, - ) + matching_cost_matcher.allocate_cost_volume_pandora(img_left=img, img_right=img, cfg=cfg) # compute cost volumes - mccnn = matching_cost_matcher.compute_cost_volumes( - img_left=img, - img_right=img, - grid_min_col=np.full((100, 100), -1), - grid_max_col=np.full((100, 100), 1), - grid_min_row=np.full((100, 100), -1), - grid_max_row=np.full((100, 100), 1), - ) + mccnn = matching_cost_matcher.compute_cost_volumes(img_left=img, img_right=img) # get cv with disparity = 0 disp = abs(mccnn["cost_volumes"].data[:, :, 1, 1]) @@ -192,7 +170,10 @@ def test_compute_cv_sad(left_stereo_object, right_stereo_object): """ Test the cost volume product by sad """ - + # update disparity + left_stereo_object["col_disparity"][0, :, :] = np.full((3, 3), -1) + left_stereo_object["col_disparity"][1, :, :] = np.full((3, 3), 0) + left_stereo_object["row_disparity"][0, :, :] = np.full((3, 3), -1) # sum of squared difference images left, right, window_size=1 cfg = {"pipeline": {"matching_cost": {"matching_cost_method": "sad", "window_size": 1}}} # sum of absolute difference ground truth for the images left, right, window_size=1 @@ -222,21 +203,10 @@ def test_compute_cv_sad(left_stereo_object, right_stereo_object): # initialise matching cost matching_cost_matcher = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) matching_cost_matcher.allocate_cost_volume_pandora( - img_left=left_stereo_object, - img_right=right_stereo_object, - grid_min_col=np.full((3, 3), -1), - grid_max_col=np.full((3, 3), 0), - cfg=cfg, + img_left=left_stereo_object, img_right=right_stereo_object, cfg=cfg ) # compute cost volumes - sad = matching_cost_matcher.compute_cost_volumes( - img_left=left_stereo_object, - img_right=right_stereo_object, - grid_min_col=np.full((3, 3), -1), - grid_max_col=np.full((3, 3), 0), - grid_min_row=np.full((3, 3), -1), - grid_max_row=np.full((3, 3), 0), - ) + sad = matching_cost_matcher.compute_cost_volumes(img_left=left_stereo_object, img_right=right_stereo_object) # check that the generated cost_volumes is equal to ground truth np.testing.assert_allclose(sad["cost_volumes"].data, ad_ground_truth, atol=1e-06) @@ -262,9 +232,8 @@ def test_compute_cv_zncc(): "no_data_mask": 1, "crs": None, "transform": Affine(1.0, 0.0, 0.0, 0.0, 1.0, 0.0), - "col_disparity_source": [0, 1], - "row_disparity_source": [-1, 0], } + left_zncc.pipe(add_disparity_grid, {"init": 1, "range": 1}, {"init": -1, "range": 1}) data = np.array( ([[1, 1, 1, 1, 1], [3, 4, 5, 6, 7], [1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [1, 1, 1, 1, 1]]), @@ -320,28 +289,15 @@ def test_compute_cv_zncc(): # initialise matching cost matching_cost_matcher = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) - matching_cost_matcher.allocate_cost_volume_pandora( - img_left=left_zncc, - img_right=right_zncc, - grid_min_col=np.full((3, 3), 0), - grid_max_col=np.full((3, 3), 1), - cfg=cfg, - ) + matching_cost_matcher.allocate_cost_volume_pandora(img_left=left_zncc, img_right=right_zncc, cfg=cfg) # compute cost volumes - zncc = matching_cost_matcher.compute_cost_volumes( - img_left=left_zncc, - img_right=right_zncc, - grid_min_col=np.full((3, 3), 0), - grid_max_col=np.full((3, 3), 1), - grid_min_row=np.full((3, 3), -1), - grid_max_row=np.full((3, 3), 0), - ) - # check that the generated cost_volumes is equal to ground truth + zncc = matching_cost_matcher.compute_cost_volumes(img_left=left_zncc, img_right=right_zncc) - np.testing.assert_allclose(zncc["cost_volumes"].data[1, 1, 0, 1], ad_ground_truth_1_1_0_0, rtol=1e-06) - np.testing.assert_allclose(zncc["cost_volumes"].data[1, 1, 0, 0], ad_ground_truth_1_1_0_1, rtol=1e-06) - np.testing.assert_allclose(zncc["cost_volumes"].data[2, 2, 0, 1], ad_ground_truth_2_2_0_0, rtol=1e-06) - np.testing.assert_allclose(zncc["cost_volumes"].data[2, 2, 0, 0], ad_ground_truth_2_2_0_1, rtol=1e-06) + # check that the generated cost_volumes is equal to ground truth + np.testing.assert_allclose(zncc["cost_volumes"].data[1, 1, 0, 2], ad_ground_truth_1_1_0_0, rtol=1e-06) + np.testing.assert_allclose(zncc["cost_volumes"].data[1, 1, 0, 1], ad_ground_truth_1_1_0_1, rtol=1e-06) + np.testing.assert_allclose(zncc["cost_volumes"].data[2, 2, 0, 2], ad_ground_truth_2_2_0_0, rtol=1e-06) + np.testing.assert_allclose(zncc["cost_volumes"].data[2, 2, 0, 1], ad_ground_truth_2_2_0_1, rtol=1e-06) @pytest.mark.parametrize( @@ -438,11 +394,29 @@ def test_compute_cv_zncc(): np.array([5]), # Only ROI["row"]["first"]=5 is in the cost_volume rows id="ROI and step_row and step_col greater than the number of columns and rows in the ROI", ), + pytest.param( + {"col": {"first": 1, "last": 7}, "row": {"first": 1, "last": 7}, "margins": [2, 2, 2, 2]}, + [2, 1], + np.arange(10), # Coordinates of user ROI + margins + np.arange(1, 10, 2), # ROI["row"]["first"]=1 then coordinates are [1,3,5,7,9] + id="First point of ROI lower than margin and step_row=2", + ), + pytest.param( + {"col": {"first": 3, "last": 5}, "row": {"first": 1, "last": 5}, "margins": [4, 4, 4, 4]}, + [1, 2], + np.arange(1, 10, 2), # ROI["col"]["first"]=3 then coordinates are [1,3,5,7,9] + np.arange(0, 10), # Coordinates of user ROI + margins + id="First point of ROI lower than margin and step_col=2", + ), ], ) def test_cost_volume_coordinates_with_roi(roi, input_config, matching_cost_config, col_expected, row_expected): """ - Test that we have the correct cost_volumes coordinates with a ROI + Description : Test that we have the correct cost_volumes coordinates with a ROI + Data : + - Left image : tmp_path / "left_img.png" + - Right image : tmp_path / "right_img.png" + Requirement : EX_STEP_00 """ cfg = {"input": input_config, "pipeline": {"matching_cost": matching_cost_config}, "ROI": roi} @@ -451,25 +425,12 @@ def test_cost_volume_coordinates_with_roi(roi, input_config, matching_cost_confi matching_cost_matcher = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) - matching_cost_matcher.allocate_cost_volume_pandora( - img_left=img_left, - img_right=img_right, - grid_min_col=np.full((10, 10), 0), - grid_max_col=np.full((10, 10), 1), - cfg=cfg, - ) + matching_cost_matcher.allocate_cost_volume_pandora(img_left=img_left, img_right=img_right, cfg=cfg) np.testing.assert_array_equal(matching_cost_matcher.grid_.attrs["col_to_compute"], col_expected) # compute cost volumes with roi - cost_volumes_with_roi = matching_cost_matcher.compute_cost_volumes( - img_left=img_left, - img_right=img_right, - grid_min_col=np.full((10, 10), 0), - grid_max_col=np.full((10, 10), 1), - grid_min_row=np.full((10, 10), -1), - grid_max_row=np.full((10, 10), 0), - ) + cost_volumes_with_roi = matching_cost_matcher.compute_cost_volumes(img_left=img_left, img_right=img_right) np.testing.assert_array_equal(cost_volumes_with_roi["cost_volumes"].coords["col"], col_expected) np.testing.assert_array_equal(cost_volumes_with_roi["cost_volumes"].coords["row"], row_expected) @@ -487,7 +448,7 @@ def test_cost_volume_coordinates_with_roi(roi, input_config, matching_cost_confi pytest.param( [2, 1], np.arange(10), - np.arange(0, 10, 2), # 1 < step_row < len(cost_volume["cost_volumes"].coords["row"]) + np.arange(0, 10, 2), # 1 < step_row < len(cost_volume["cost_volumes"].coords["row"]) id="No ROI, step_row=2 and step_col=1", ), pytest.param( @@ -498,7 +459,7 @@ def test_cost_volume_coordinates_with_roi(roi, input_config, matching_cost_confi ), pytest.param( [1, 3], - np.arange(0, 10, 3), # 1 < step_col < len(cost_volume["cost_volumes"].coords["col"]) + np.arange(0, 10, 3), # 1 < step_col < len(cost_volume["cost_volumes"].coords["col"]) np.arange(10), id="No ROI, step_row=1 and step_col=3", ), @@ -524,7 +485,11 @@ def test_cost_volume_coordinates_with_roi(roi, input_config, matching_cost_confi ) def test_cost_volume_coordinates_without_roi(input_config, matching_cost_config, col_expected, row_expected): """ - Test that we have the correct cost_volumes coordinates without a ROI + Description : Test that we have the correct cost_volumes coordinates without a ROI + Data : + - Left image : tmp_path / "left_img.png" + - Right image : tmp_path / "right_img.png" + Requirement : EX_STEP_00 """ cfg = { @@ -536,25 +501,12 @@ def test_cost_volume_coordinates_without_roi(input_config, matching_cost_config, matching_cost_matcher = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) - matching_cost_matcher.allocate_cost_volume_pandora( - img_left=img_left, - img_right=img_right, - grid_min_col=np.full((10, 10), 0), - grid_max_col=np.full((10, 10), 1), - cfg=cfg, - ) + matching_cost_matcher.allocate_cost_volume_pandora(img_left=img_left, img_right=img_right, cfg=cfg) np.testing.assert_array_equal(matching_cost_matcher.grid_.attrs["col_to_compute"], col_expected) # compute cost volumes without roi - cost_volumes = matching_cost_matcher.compute_cost_volumes( - img_left=img_left, - img_right=img_right, - grid_min_col=np.full((10, 10), 0), - grid_max_col=np.full((10, 10), 1), - grid_min_row=np.full((10, 10), -1), - grid_max_row=np.full((10, 10), 0), - ) + cost_volumes = matching_cost_matcher.compute_cost_volumes(img_left=img_left, img_right=img_right) np.testing.assert_array_equal(cost_volumes["cost_volumes"].coords["col"], col_expected) np.testing.assert_array_equal(cost_volumes["cost_volumes"].coords["row"], row_expected) @@ -572,8 +524,8 @@ def test_cost_volume_coordinates_without_roi(input_config, matching_cost_config, [ pytest.param( [1, 1], - (5, 5, 2, 2), - (5, 4, 2, 2), + (5, 5, 3, 5), + (5, 4, 3, 5), np.s_[2:4, 2:4, :, :], np.s_[2:4, 1:3, :, :], (5, 5), @@ -581,8 +533,8 @@ def test_cost_volume_coordinates_without_roi(input_config, matching_cost_config, ), pytest.param( [1, 2], - (5, 3, 2, 2), - (5, 2, 2, 2), + (5, 3, 3, 5), + (5, 2, 3, 5), np.s_[2:4, 2:4:2, :, :], np.s_[2:4, 1:3:2, :, :], (5, 5), @@ -590,8 +542,8 @@ def test_cost_volume_coordinates_without_roi(input_config, matching_cost_config, ), pytest.param( [2, 1], - (3, 5, 2, 2), - (3, 4, 2, 2), + (3, 5, 3, 5), + (3, 4, 3, 5), np.s_[2:4, 2:4, :, :], np.s_[2:4, 1:3, :, :], (5, 5), @@ -617,22 +569,9 @@ def test_roi_inside_and_margins_inside( # pylint: disable=too-many-arguments # crop image with roi img_left, img_right = create_datasets_from_inputs(input_config, roi=roi) - matching_cost_matcher.allocate_cost_volume_pandora( - img_left=img_left, - img_right=img_right, - grid_min_col=np.full((5, 5), 0), - grid_max_col=np.full((5, 5), 1), - cfg=configuration_roi, - ) + matching_cost_matcher.allocate_cost_volume_pandora(img_left=img_left, img_right=img_right, cfg=configuration_roi) # compute cost volumes with roi - cost_volumes_with_roi = matching_cost_matcher.compute_cost_volumes( - img_left=img_left, - img_right=img_right, - grid_min_col=np.full((5, 5), 0), - grid_max_col=np.full((5, 5), 1), - grid_min_row=np.full((5, 5), -1), - grid_max_row=np.full((5, 5), 0), - ) + cost_volumes_with_roi = matching_cost_matcher.compute_cost_volumes(img_left=img_left, img_right=img_right) assert cost_volumes_with_roi["cost_volumes"].data.shape == expected_shape_roi assert cost_volumes["cost_volumes"].data.shape == expected_shape @@ -642,79 +581,224 @@ def test_roi_inside_and_margins_inside( # pylint: disable=too-many-arguments ) -class TestSubpix: - """Test subpix parameter""" - - @pytest.fixture() - def make_image_fixture(self): - """ - Create image dataset - """ +@pytest.fixture() +def make_image_fixture(): + """ + Create image dataset + """ - def make_image(disp_row, disp_col, data): - img = xr.Dataset( - {"im": (["row", "col"], data)}, - coords={"row": np.arange(data.shape[0]), "col": np.arange(data.shape[1])}, - ) + def make_image(disp_row, disp_col, data): + img = xr.Dataset( + {"im": (["row", "col"], data)}, + coords={"row": np.arange(data.shape[0]), "col": np.arange(data.shape[1])}, + ) - add_disparity_grid(img, disp_col, disp_row) + img.pipe(add_disparity_grid, disp_col, disp_row) - img.attrs = { + img.attrs.update( + { "no_data_img": -9999, "valid_pixels": 0, "no_data_mask": 1, "crs": None, - "col_disparity_source": disp_col, - "row_disparity_source": disp_row, } + ) + + return img + + return make_image - return img - return make_image +@pytest.fixture() +def make_cost_volumes(make_image_fixture, request): + """ + Instantiate a matching_cost and compute cost_volumes + """ + + cfg = { + "pipeline": { + "matching_cost": { + "matching_cost_method": "ssd", + "window_size": 1, + "step": request.param["step"], + "subpix": request.param["subpix"], + } + } + } + + disp_row = request.param["disp_row"] + disp_col = request.param["disp_col"] + + img_left = make_image_fixture(disp_row, disp_col, request.param["data_left"]) + img_right = make_image_fixture(disp_row, disp_col, request.param["data_right"]) + + matching_cost_ = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) + + matching_cost_.allocate_cost_volume_pandora(img_left=img_left, img_right=img_right, cfg=cfg) + + cost_volumes = matching_cost_.compute_cost_volumes(img_left=img_left, img_right=img_right) + + return cost_volumes + + +class TestDisparityGrid: + """Test effect of disparity grids.""" + + @pytest.fixture() + def nb_rows(self): + return 5 + + @pytest.fixture() + def nb_cols(self): + return 4 + + @pytest.fixture() + def nb_disp_rows(self): + return 6 + + @pytest.fixture() + def nb_disp_cols(self): + return 7 @pytest.fixture() - def make_cost_volumes(self, make_image_fixture, request): + def cost_volumes(self, nb_rows, nb_cols, nb_disp_rows, nb_disp_cols): + """cost_volumes full of zeros.""" + # only need because allocate_cost_volumes delete it + fake_pandora_attrs = {"col_to_compute": 1, "sampling_interval": 1} + return matching_cost.MatchingCost.allocate_cost_volumes( + fake_pandora_attrs, + row=np.arange(nb_rows), + col=np.arange(nb_cols), + disp_range_col=np.arange(2, 2 + nb_disp_cols), + disp_range_row=np.arange(-5, -5 + nb_disp_rows), + ) + + @pytest.fixture() + def min_disp_grid(self, cost_volumes, nb_rows, nb_cols): + return np.full((nb_rows, nb_cols), cost_volumes.coords["disp_row"].data[0]) + + @pytest.fixture() + def max_disp_grid(self, cost_volumes, nb_rows, nb_cols): + return np.full((nb_rows, nb_cols), cost_volumes.coords["disp_row"].data[-1]) + + @pytest.fixture() + def row_index(self, nb_rows): + return nb_rows // 2 + + @pytest.fixture() + def col_index(self, nb_cols): + return nb_cols // 2 + + @pytest.fixture() + def disparity_to_alter(self): + return "row_disparity" + + @pytest.fixture() + def mock_type(self): + return "used" + + @pytest.fixture() + def mock_set_out_of_disparity_range_to_nan(self, mock_type, mocker: MockerFixture): """ - Instantiate a matching_cost and compute cost_volumes + Used or bypass set_out_of_row_disparity_range_to_other_value. + + :param mock_type: `used` or `not used` + :type mock_type: str + :param mocker: + :type mocker: + :return: Mock if mock_type is `use` + :rtype: Mock or None + :raises: ValueError if mock_type is neither `used` or `not used` """ + if mock_type == "not used": + return mocker.patch( + "pandora2d.matching_cost.matching_cost.set_out_of_row_disparity_range_to_other_value", + side_effect=lambda x, y, z, k, l: x, + ) + if mock_type != "used": + raise ValueError(f"Expected mock_type to be 'used' or 'not used', got {mock_type}.") + + @pytest.fixture() + def disparity_maps( + self, + make_image_fixture, + random_generator, + nb_rows, + nb_cols, + row_index, + col_index, + disparity_to_alter, + mock_set_out_of_disparity_range_to_nan, + ): # pylint: disable=too-many-arguments,unused-argument + """Compute disparity maps and return disp_map_row and disp_map_col.""" + image = make_image_fixture( + disp_col={"init": 0, "range": 2}, + disp_row={"init": 0, "range": 2}, + data=random_generator.integers(0, 255, (nb_rows, nb_cols), endpoint=True), + ) + # Make disparity search out of truth for one point + image[disparity_to_alter].loc[ + { + "band_disp": "min", + "row": row_index, + "col": col_index, + } + ] = 1 cfg = { "pipeline": { "matching_cost": { "matching_cost_method": "ssd", "window_size": 1, - "step": request.param["step"], - "subpix": request.param["subpix"], + "step": [1, 1], + "subpix": 1, } } } + matching_cost_ = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) - disp_row = request.param["disp_row"] - disp_col = request.param["disp_col"] + matching_cost_.allocate_cost_volume_pandora(img_left=image, img_right=image, cfg=cfg) - img_left = make_image_fixture(disp_row, disp_col, request.param["data_left"]) - img_right = make_image_fixture(disp_row, disp_col, request.param["data_right"]) + cost_volumes = matching_cost_.compute_cost_volumes(img_left=image, img_right=image) - matching_cost_ = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) + disparity_matcher = disparity.Disparity({"disparity_method": "wta", "invalid_disparity": -99}) - matching_cost_.allocate_cost_volume_pandora( - img_left=img_left, - img_right=img_right, - grid_min_col=np.full((img_left["im"].shape[0], img_left["im"].shape[1]), disp_col[0]), - grid_max_col=np.full((img_left["im"].shape[0], img_left["im"].shape[1]), disp_col[1]), - cfg=cfg, - ) + disp_map_col, disp_map_row, _ = disparity_matcher.compute_disp_maps(cost_volumes) + return {"row_disparity": disp_map_row, "col_disparity": disp_map_col} - cost_volumes = matching_cost_.compute_cost_volumes( - img_left=img_left, - img_right=img_right, - grid_min_col=np.full((img_left["im"].shape[0], img_left["im"].shape[1]), disp_col[0]), - grid_max_col=np.full((img_left["im"].shape[0], img_left["im"].shape[1]), disp_col[1]), - grid_min_row=np.full((img_left["im"].shape[0], img_left["im"].shape[1]), disp_row[0]), - grid_max_row=np.full((img_left["im"].shape[0], img_left["im"].shape[1]), disp_row[1]), - ) + @pytest.mark.parametrize("disparity_to_alter", ["row_disparity", "col_disparity"]) + def test_effect_on_compute_cost_volume(self, disparity_maps, disparity_to_alter, row_index, col_index): + """Check best candidate out of disparity range is not chosen by wta. + + As we use the very same images, WTA should find a 0 disparity everywhere except for the point where we set a + disparity range that did not include 0. + + Note: `col_disparity` is done by Pandora. + """ + result = disparity_maps[disparity_to_alter] + + assert result[row_index, col_index] != 0 + assert np.all(result[:row_index, :] == 0) + assert np.all(result[row_index + 1 :, :] == 0) + assert np.all(result[:, :col_index] == 0) + assert np.all(result[:, col_index + 1 :] == 0) + + @pytest.mark.parametrize("mock_type", ["not used"]) + def test_when_not_taken_into_account( + self, disparity_maps, disparity_to_alter, mock_set_out_of_disparity_range_to_nan + ): # pylint: disable=unused-argument + """Check best candidate out of disparity range is not chosen by wta. + + Note: `col_disparity` is done by Pandora. + """ + result = disparity_maps[disparity_to_alter] + + assert np.all(result == 0) + assert mock_set_out_of_disparity_range_to_nan.called - return cost_volumes + +class TestSubpix: + """Test subpix parameter""" @pytest.mark.parametrize( ["make_cost_volumes", "shape_expected", "row_disparity", "col_disparity"], @@ -723,13 +807,13 @@ def make_cost_volumes(self, make_image_fixture, request): { "step": [1, 1], "subpix": 1, - "disp_row": [0, 3], - "disp_col": [-2, 2], + "disp_row": {"init": 1, "range": 1}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.full((10, 10), 1), "data_right": np.full((10, 10), 1), }, - (10, 10, 5, 4), # (row, col, disp_col, disp_row) - np.arange(4), # [0, 1, 2, 3] + (10, 10, 5, 3), # (row, col, disp_col, disp_row) + np.arange(3), # [0, 1, 2] np.arange(-2, 3), # [-2, -1, 0, 1, 2] id="subpix=1, step_row=1 and step_col=1", ), @@ -737,13 +821,13 @@ def make_cost_volumes(self, make_image_fixture, request): { "step": [1, 1], "subpix": 2, - "disp_row": [0, 3], - "disp_col": [-2, 2], + "disp_row": {"init": 1, "range": 1}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.full((10, 10), 1), "data_right": np.full((10, 10), 1), }, - (10, 10, 9, 7), # (row, col, disp_col, disp_row) - np.arange(0, 3.5, 0.5), # [0, 0.5, 1, 1.5, 2, 2.5, 3] + (10, 10, 9, 5), # (row, col, disp_col, disp_row) + np.arange(0, 2.5, 0.5), # [0, 0.5, 1, 1.5, 2] np.arange(-2, 2.5, 0.5), # [-2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, 2] id="subpix=2, step_row=1 and step_col=1", ), @@ -751,13 +835,13 @@ def make_cost_volumes(self, make_image_fixture, request): { "step": [2, 3], "subpix": 2, - "disp_row": [0, 3], - "disp_col": [-2, 2], + "disp_row": {"init": 1, "range": 1}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.full((10, 10), 1), "data_right": np.full((10, 10), 1), }, - (5, 4, 9, 7), # (row, col, disp_col, disp_row) - np.arange(0, 3.5, 0.5), # [0, 0.5, 1, 1.5, 2, 2.5, 3] # step has no influence on subpix disparity range + (5, 4, 9, 5), # (row, col, disp_col, disp_row) + np.arange(0, 2.5, 0.5), # [0, 0.5, 1, 1.5, 2] # step has no influence on subpix disparity range np.arange(-2, 2.5, 0.5), # [-2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, 2] id="subpix=2, step_row=2 and step_col=3", ), @@ -765,13 +849,13 @@ def make_cost_volumes(self, make_image_fixture, request): { "step": [1, 1], "subpix": 4, - "disp_row": [0, 3], - "disp_col": [-2, 2], + "disp_row": {"init": 1, "range": 1}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.full((10, 10), 1), "data_right": np.full((10, 10), 1), }, - (10, 10, 17, 13), # (row, col, disp_col, disp_row) - np.arange(0, 3.25, 0.25), # [0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2, 2.25, 2.5, 2.75, 3] + (10, 10, 17, 9), # (row, col, disp_col, disp_row) + np.arange(0, 2.25, 0.25), # [0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2] np.arange( -2, 2.25, 0.25 ), # [-2, -1.75, -1.5, -1.25, -1, -0.75, -0.5, -0.25, 0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2] @@ -781,15 +865,17 @@ def make_cost_volumes(self, make_image_fixture, request): { "step": [3, 2], "subpix": 4, - "disp_row": [0, 3], - "disp_col": [-2, 2], + "disp_row": {"init": 1, "range": 1}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.full((10, 10), 1), "data_right": np.full((10, 10), 1), }, - (4, 5, 17, 13), # (row, col, disp_col, disp_row) + (4, 5, 17, 9), # (row, col, disp_col, disp_row) np.arange( - 0, 3.25, 0.25 # step has no influence on subpix disparity range - ), # [0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2, 2.25, 2.5, 2.75, 3] + 0, + 2.25, + 0.25, # step has no influence on subpix disparity range + ), # [0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2] np.arange( -2, 2.25, 0.25 ), # [-2, -1.75, -1.5, -1.25, -1, -0.75, -0.5, -0.25, 0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2] @@ -819,8 +905,8 @@ def test_subpix(self, shape_expected, row_disparity, col_disparity, make_cost_vo { "step": [1, 1], "subpix": 2, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [3, 3, 3, 3, 3], [4, 4, 4, 4, 4]), dtype=np.float64, @@ -843,8 +929,8 @@ def test_subpix(self, shape_expected, row_disparity, col_disparity, make_cost_vo { "step": [1, 1], "subpix": 4, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [3, 3, 3, 3, 3], [4, 4, 4, 4, 4]), dtype=np.float64, @@ -867,8 +953,8 @@ def test_subpix(self, shape_expected, row_disparity, col_disparity, make_cost_vo { "step": [1, 1], "subpix": 4, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [3, 3, 3, 3, 3], [4, 4, 4, 4, 4]), dtype=np.float64, @@ -905,7 +991,6 @@ def test_cost_volumes_values_subpix_positive_row(self, make_cost_volumes, index_ # If the shift is positive, we test all the rows expect the last one for which the shift is equal to 0. for col in range(cost_volumes["cost_volumes"].shape[1]): for row in range(cost_volumes["cost_volumes"].shape[0] - 1): - # index_min = all minimum value indexes index_min = np.where( cost_volumes["cost_volumes"][row, col, index_disp_col_zero, :] @@ -920,8 +1005,8 @@ def test_cost_volumes_values_subpix_positive_row(self, make_cost_volumes, index_ { "step": [1, 1], "subpix": 2, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [3, 3, 3, 3, 3], [4, 4, 4, 4, 4]), dtype=np.float64, @@ -944,8 +1029,8 @@ def test_cost_volumes_values_subpix_positive_row(self, make_cost_volumes, index_ { "step": [1, 1], "subpix": 4, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [3, 3, 3, 3, 3], [4, 4, 4, 4, 4]), dtype=np.float64, @@ -968,8 +1053,8 @@ def test_cost_volumes_values_subpix_positive_row(self, make_cost_volumes, index_ { "step": [1, 1], "subpix": 4, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [3, 3, 3, 3, 3], [4, 4, 4, 4, 4]), dtype=np.float64, @@ -1006,7 +1091,6 @@ def test_cost_volumes_values_subpix_negative_row(self, make_cost_volumes, index_ # If the shift is negative, we test all the rows expect the first one for which the shift is equal to 0. for col in range(cost_volumes["cost_volumes"].shape[1]): for row in range(1, cost_volumes["cost_volumes"].shape[0]): - # index_min = all minimum value indexes index_min = np.where( cost_volumes["cost_volumes"][row, col, index_disp_col_zero, :] @@ -1021,8 +1105,8 @@ def test_cost_volumes_values_subpix_negative_row(self, make_cost_volumes, index_ { "step": [1, 1], "subpix": 2, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5]), dtype=np.float64, @@ -1045,8 +1129,8 @@ def test_cost_volumes_values_subpix_negative_row(self, make_cost_volumes, index_ { "step": [1, 1], "subpix": 4, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5]), dtype=np.float64, @@ -1069,8 +1153,8 @@ def test_cost_volumes_values_subpix_negative_row(self, make_cost_volumes, index_ { "step": [1, 1], "subpix": 4, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5]), dtype=np.float64, @@ -1107,7 +1191,6 @@ def test_cost_volumes_values_subpix_positive_col(self, make_cost_volumes, index_ # If the shift is positive, we test all the columns expect the last one for which the shift is equal to 0. for col in range(cost_volumes["cost_volumes"].shape[1] - 1): for row in range(cost_volumes["cost_volumes"].shape[0]): - # index_min = all minimum value indexes index_min = np.where( cost_volumes["cost_volumes"][row, col, :, index_disp_row_zero] @@ -1122,8 +1205,8 @@ def test_cost_volumes_values_subpix_positive_col(self, make_cost_volumes, index_ { "step": [1, 1], "subpix": 2, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5]), dtype=np.float64, @@ -1146,8 +1229,8 @@ def test_cost_volumes_values_subpix_positive_col(self, make_cost_volumes, index_ { "step": [1, 1], "subpix": 4, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5]), dtype=np.float64, @@ -1170,8 +1253,8 @@ def test_cost_volumes_values_subpix_positive_col(self, make_cost_volumes, index_ { "step": [1, 1], "subpix": 4, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5]), dtype=np.float64, @@ -1208,7 +1291,6 @@ def test_cost_volumes_values_subpix_negative_col(self, make_cost_volumes, index_ # If the shift is negative, we test all the columns expect the first one for which the shift is equal to 0. for col in range(1, cost_volumes["cost_volumes"].shape[1]): for row in range(cost_volumes["cost_volumes"].shape[0]): - # index_min = all minimum value indexes index_min = np.where( cost_volumes["cost_volumes"][row, col, :, index_disp_row_zero] @@ -1223,8 +1305,8 @@ def test_cost_volumes_values_subpix_negative_col(self, make_cost_volumes, index_ { "step": [1, 1], "subpix": 1, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [3, 3, 3, 3, 3], [4, 4, 4, 4, 4]), dtype=np.float64, @@ -1275,8 +1357,8 @@ def test_cost_volumes_values_subpix_1_row(self, make_cost_volumes, row, col, dis { "step": [1, 1], "subpix": 1, - "disp_row": [-2, 2], - "disp_col": [-2, 2], + "disp_row": {"init": 0, "range": 2}, + "disp_col": {"init": 0, "range": 2}, "data_left": np.array( ([1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5], [1, 2, 3, 4, 5]), dtype=np.float64, @@ -1338,16 +1420,16 @@ def create_datasets(self): coords={"row": np.arange(data.shape[0]), "col": np.arange(data.shape[1])}, ) - add_disparity_grid(left, [0, 1], [-1, 0]) + left.pipe(add_disparity_grid, {"init": 1, "range": 1}, {"init": -1, "range": 1}) - left.attrs = { - "no_data_img": -9999, - "valid_pixels": 0, - "no_data_mask": 1, - "crs": None, - "col_disparity_source": [0, 1], - "row_disparity_source": [-1, 0], - } + left.attrs.update( + { + "no_data_img": -9999, + "valid_pixels": 0, + "no_data_mask": 1, + "crs": None, + } + ) data = np.full((5, 5), 1) right = xr.Dataset( @@ -1360,8 +1442,8 @@ def create_datasets(self): "valid_pixels": 0, "no_data_mask": 1, "crs": None, - "col_disparity_source": [0, 1], - "row_disparity_source": [-1, 0], + "disp_row": {"init": 1, "range": 1}, + "disp_col": {"init": -1, "range": 1}, } return left, right @@ -1373,44 +1455,35 @@ def create_datasets(self): pytest.param( None, 1, - (5, 5, 2, 2), # margins=None -> we do not add disparity margins - [0, 1], - [-1, 0], + (5, 5, 3, 3), # margins=None -> we do not add disparity margins + np.arange(0, 2.25, 1), + np.arange(-2, 0.25, 1), id="Margins=None", ), pytest.param( Margins(0, 0, 0, 0), 1, - (5, 5, 2, 2), - [0, 1], - [-1, 0], # margins=(0,0,0,0) -> we do not add disparity margins + (5, 5, 3, 3), + np.arange(0, 2.25, 1), + np.arange(-2, 0.25, 1), + # margins=(0,0,0,0) -> we do not add disparity margins id="Margins(left=0, up=0, right=0, down=0)", ), pytest.param( Margins(3, 3, 3, 3), 1, - ( - 5, - 5, - 8, - 8, - ), - [-3, -2, -1, 0, 1, 2, 3, 4], - [-4, -3, -2, -1, 0, 1, 2, 3], + (5, 5, 9, 9), + np.arange(-3, 5.25, 1), + np.arange(-5, 3.25, 1), # margins=(3,3,3,3) -> we add a margin of 3 on disp_min_col, disp_max_col, disp_min_row, disp_max_row id="Margins(left=3, up=3, right=3, down=3)", ), pytest.param( Margins(0, 1, 2, 3), 1, - ( - 5, - 5, - 4, - 6, - ), - [0, 1, 2, 3], - [-2, -1, 0, 1, 2, 3], + (5, 5, 5, 7), + np.arange(0, 4.25, 1), + np.arange(-3, 3.25, 1), # margins=(0,1,2,3) -> we add a margin of 0 on disp_min_col, 2 on disp_max_col, # 1 on disp_min_row and 3 on disp_max_row id="Margins(left=0, up=1, right=2, down=3)", @@ -1418,14 +1491,9 @@ def create_datasets(self): pytest.param( Margins(4, 2, 4, 2), 1, - ( - 5, - 5, - 10, - 6, - ), - [-4, -3, -2, -1, 0, 1, 2, 3, 4, 5], - [-3, -2, -1, 0, 1, 2], + (5, 5, 11, 7), + np.arange(-4, 6.25, 1), + np.arange(-4, 2.25, 1), # margins=(4,2,4,2) -> we add a margin of 4 on disp_min_col and on disp_max_col # and of 2 on disp_min_row and disp_max_row id="Margins(left=4, up=2, right=4, down=2)", @@ -1433,14 +1501,9 @@ def create_datasets(self): pytest.param( Margins(2, 6, 2, 6), 1, - ( - 5, - 5, - 6, - 14, - ), - [-2, -1, 0, 1, 2, 3], - [-7, -6, -5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6], + (5, 5, 7, 15), + np.arange(-2, 4.25, 1), + np.arange(-8, 6.25, 1), # margins=(2,6,2,6) -> we add a margin of 2 on disp_min_col and on disp_max_col # and of 6 on disp_min_row and disp_max_row id="Margins(left=2, up=6, right=2, down=6)", @@ -1448,14 +1511,9 @@ def create_datasets(self): pytest.param( Margins(6, 2, 6, 2), 1, - ( - 5, - 5, - 14, - 6, - ), - [-6, -5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7], - [-3, -2, -1, 0, 1, 2], + (5, 5, 15, 7), + np.arange(-6, 8.25, 1), + np.arange(-4, 2.25, 1), # margins=(6,2,6,2) -> we add a margin of 6 on disp_min_col and on disp_max_col # and of 2 on disp_min_row and disp_max_row id="Margins(left=6, up=2, right=6, down=2)", @@ -1463,14 +1521,9 @@ def create_datasets(self): pytest.param( Margins(3, 3, 3, 3), 2, - ( - 5, - 5, - 15, - 15, - ), - [-3, -2.5, -2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4], - [-4, -3.5, -3, -2.5, -2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, 2, 2.5, 3], + (5, 5, 17, 17), + np.arange(-3, 5.25, 0.5), + np.arange(-5, 3.25, 0.5), # margins=(3,3,3,3) and subpix=2 -> we add a margin of 3x2 on disp_min_col, disp_max_col, # disp_min_row, disp_max_row id="Margins(left=3, up=3, right=3, down=3), subpix=2", @@ -1478,14 +1531,9 @@ def create_datasets(self): pytest.param( Margins(0, 1, 2, 3), 2, - ( - 5, - 5, - 7, - 11, - ), - [0, 0.5, 1, 1.5, 2, 2.5, 3], - [-2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, 2, 2.5, 3], + (5, 5, 9, 13), + np.arange(0, 4.25, 0.5), + np.arange(-3, 3.25, 0.5), # margins=(0,1,2,3) -> we add a margin of 0 on disp_min_col, 2x2 on disp_max_col, # 1x2 on disp_min_row and 3x2 on disp_max_row id="Margins(left=0, up=1, right=2, down=3)", @@ -1493,14 +1541,9 @@ def create_datasets(self): pytest.param( Margins(6, 4, 2, 3), 2, - ( - 5, - 5, - 19, - 17, - ), - [-6, -5.5, -5, -4.5, -4, -3.5, -3, -2.5, -2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, 2, 2.5, 3], - [-5, -4.5, -4, -3.5, -3, -2.5, -2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, 2, 2.5, 3], + (5, 5, 21, 19), + np.arange(-6, 4.25, 0.5), + np.arange(-6, 3.25, 0.5), # margins=(6,4,2,3) -> we add a margin of 6x2 on disp_min_col, 2x2 on disp_max_col, # 4x2 on disp_min_row and 3x2 on disp_max_row id="Margins(left=6, up=4, right=2, down=3)", @@ -1508,49 +1551,17 @@ def create_datasets(self): pytest.param( Margins(0, 0, 0, 0), 4, - ( - 5, - 5, - 5, - 5, - ), - [0, 0.25, 0.5, 0.75, 1], - [-1, -0.75, -0.5, -0.25, 0], # we do not add disparity margins + (5, 5, 9, 9), + np.arange(0, 2.25, 0.25), + np.arange(-2, 0.25, 0.25), # we do not add disparity margins id="Margins(left=0, up=0, right=0, down=0), subpix=4", ), pytest.param( Margins(0, 1, 2, 3), 4, - ( - 5, - 5, - 13, - 21, - ), - [0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2, 2.25, 2.5, 2.75, 3], - [ - -2, - -1.75, - -1.5, - -1.25, - -1, - -0.75, - -0.5, - -0.25, - 0, - 0.25, - 0.5, - 0.75, - 1, - 1.25, - 1.5, - 1.75, - 2, - 2.25, - 2.5, - 2.75, - 3, - ], + (5, 5, 17, 25), + np.arange(0, 4.25, 0.25), + np.arange(-3, 3.25, 0.25), # margins=(0,1,2,3) -> we add a margin of 0 on disp_min_col, 2x4 on disp_max_col, # 1x4 on disp_min_row and 3x4 on disp_max_row id="Margins(left=0, up=1, right=2, down=3), subpix=4", @@ -1558,14 +1569,9 @@ def create_datasets(self): pytest.param( Margins(3, 3, 3, 3), 4, - ( - 5, - 5, - 29, - 29, - ), - np.arange(-3, 4.25, 0.25), - np.arange(-4, 3.25, 0.25), + (5, 5, 33, 33), + np.arange(-3, 5.25, 0.25), + np.arange(-5, 3.25, 0.25), # margins=(3,3,3,3) and subpix=4 -> we add a margin of 3x4 on disp_min_col, disp_max_col, # disp_min_row, disp_max_row id="Margins(left=3, up=3, right=3, down=3), subpix=4", @@ -1595,25 +1601,14 @@ def test_compute_cost_volume_margins( matching_cost_matcher.allocate_cost_volume_pandora( img_left=left, img_right=right, - grid_min_col=np.full((5, 5), 0), - grid_max_col=np.full((5, 5), 1), cfg=cfg, margins=margins, ) - # minimum and maximum column disparity may have been modified - # after disparity margins were added in allocate_cost_volume_pandora - disp_col_min = matching_cost_matcher.grid_.disp.min() - disp_col_max = matching_cost_matcher.grid_.disp.max() - # compute cost volumes cost_volumes = matching_cost_matcher.compute_cost_volumes( img_left=left, img_right=right, - grid_min_col=np.full((5, 5), disp_col_min), - grid_max_col=np.full((5, 5), disp_col_max), - grid_min_row=np.full((5, 5), -1), - grid_max_row=np.full((5, 5), 0), margins=margins, ) diff --git a/tests/unit_tests/test_pandora2d.py b/tests/unit_tests/test_pandora2d.py index 5e55f3a..e7998d9 100644 --- a/tests/unit_tests/test_pandora2d.py +++ b/tests/unit_tests/test_pandora2d.py @@ -24,6 +24,7 @@ """ Test state_machine """ + import copy import numpy as np @@ -45,7 +46,9 @@ class TestPandora2D: @staticmethod def test_run_pandora(correct_pipeline, false_pipeline_mc, false_pipeline_disp) -> None: """ - Test function for checking user input section + Description : Test function for checking user input section + Data : + Requirement : EX_CONF_08 """ pandora2d_machine = state_machine.Pandora2DMachine() @@ -69,8 +72,8 @@ def test_run_prepare(left_img_path, right_img_path) -> None: input_config = { "left": {"img": left_img_path, "nodata": -9999}, "right": {"img": right_img_path, "nodata": -9999}, - "col_disparity": [-2, 2], - "row_disparity": [-2, 2], + "col_disparity": {"init": 1, "range": 2}, + "row_disparity": {"init": 1, "range": 2}, } img_left, img_right = create_datasets_from_inputs(input_config=input_config) @@ -79,25 +82,13 @@ def test_run_prepare(left_img_path, right_img_path) -> None: assert pandora2d_machine.left_img == img_left assert pandora2d_machine.right_img == img_right assert pandora2d_machine.completed_cfg == input_config - np.testing.assert_array_equal( - pandora2d_machine.disp_min_col, np.full((img_left.sizes["row"], img_left.sizes["col"]), -2) - ) - np.testing.assert_array_equal( - pandora2d_machine.disp_max_col, np.full((img_left.sizes["row"], img_left.sizes["col"]), 2) - ) - np.testing.assert_array_equal( - pandora2d_machine.disp_min_row, np.full((img_left.sizes["row"], img_left.sizes["col"]), -2) - ) - np.testing.assert_array_equal( - pandora2d_machine.disp_max_row, np.full((img_left.sizes["row"], img_left.sizes["col"]), 2) - ) @pytest.mark.parametrize( ["refinement_config", "expected"], [ pytest.param({"refinement_method": "interpolation"}, Margins(3, 3, 3, 3), id="interpolation"), pytest.param( - {"refinement_method": "dichotomy", "iterations": 3, "filter": "bicubic"}, + {"refinement_method": "dichotomy", "iterations": 3, "filter": {"method": "bicubic"}}, Margins(2, 2, 2, 2), id="dichotomy with bicubic filter", ), diff --git a/tests/unit_tests/test_refinement/test_dichotomy.py b/tests/unit_tests/test_refinement/test_dichotomy.py index 0f1b2f8..9c9c65f 100644 --- a/tests/unit_tests/test_refinement/test_dichotomy.py +++ b/tests/unit_tests/test_refinement/test_dichotomy.py @@ -38,6 +38,7 @@ # Make pylint happy with fixtures: # pylint: disable=redefined-outer-name +# pylint: disable=too-many-lines @pytest.fixture() @@ -75,6 +76,60 @@ def type_measure(): return "max" +@pytest.fixture() +def subpixel(): + return 1 + + +# Once the criteria for identifying extremas at the edge of disparity ranges has been implemented, +# this fixture could possibly be removed. +@pytest.fixture() +def left_img(rows, cols, min_disparity_row, max_disparity_row, min_disparity_col, max_disparity_col): + """ + Creates a left image dataset + """ + + img = xr.Dataset( + {"im": (["row", "col"], np.full((rows.size, cols.size), 0))}, + coords={"row": rows, "col": cols}, + ) + + d_min_col = np.full((rows.size, cols.size), min_disparity_col) + d_max_col = np.full((rows.size, cols.size), max_disparity_col) + d_min_row = np.full((rows.size, cols.size), min_disparity_row) + d_max_row = np.full((rows.size, cols.size), max_disparity_row) + + # Once the variable disparity grids have been introduced into pandora2d, + # it will be possible to call a method such as add_disparity_grid + # to complete img with uniform or non-uniform disparity grids. + + # Here, it is completed by hand because the disparity range is even. + img["col_disparity"] = xr.DataArray( + np.array([d_min_col, d_max_col]), + dims=["band_disp", "row", "col"], + coords={"band_disp": ["min", "max"]}, + ) + + img["row_disparity"] = xr.DataArray( + np.array([d_min_row, d_max_row]), + dims=["band_disp", "row", "col"], + coords={"band_disp": ["min", "max"]}, + ) + + img.attrs.update( + { + "no_data_img": -9999, + "valid_pixels": 0, + "no_data_mask": 1, + "crs": None, + "row_disparity_source": [np.min(d_min_row), np.max(d_max_row)], + "col_disparity_source": [np.min(d_min_col), np.max(d_max_col)], + } + ) + + return img + + @pytest.fixture() def zeros_cost_volumes( rows, @@ -84,10 +139,11 @@ def zeros_cost_volumes( min_disparity_col, max_disparity_col, type_measure, + subpixel, ): """Create a cost_volumes full of zeros.""" - number_of_disparity_col = max_disparity_col - min_disparity_col + 1 - number_of_disparity_row = max_disparity_row - min_disparity_row + 1 + number_of_disparity_col = int((max_disparity_col - min_disparity_col) * subpixel + 1) + number_of_disparity_row = int((max_disparity_row - min_disparity_row) * subpixel + 1) data = np.zeros((rows.size, cols.size, number_of_disparity_col, number_of_disparity_row)) attrs = { @@ -97,14 +153,15 @@ def zeros_cost_volumes( "sampling_interval": 1, "type_measure": type_measure, "step": [1, 1], + "subpixel": subpixel, } return MatchingCost.allocate_cost_volumes( attrs, rows, cols, - np.arange(min_disparity_col, max_disparity_col + 1), - np.arange(min_disparity_row, max_disparity_row + 1), + np.linspace(min_disparity_col, max_disparity_col, number_of_disparity_col), + np.linspace(min_disparity_row, max_disparity_row, number_of_disparity_row), data, ) @@ -144,21 +201,44 @@ def disp_map(invalid_disparity, rows, cols): @pytest.fixture() -def config(): - """Basic configuration expected to be good.""" - return {"refinement_method": "dichotomy", "iterations": 2, "filter": "bicubic"} +def iterations(): + return 2 -def test_factory(config): - """With `refinement_method` equals to `dichotomy`, we should get a Dichotomy object.""" - dichotomy_instance = refinement.AbstractRefinement(config) # type: ignore[abstract] +@pytest.fixture() +def filter_name(): + return "bicubic" + + +@pytest.fixture() +def config(iterations, filter_name): + return { + "refinement_method": "dichotomy", + "iterations": iterations, + "filter": {"method": filter_name}, + } + + +@pytest.fixture() +def dichotomy_instance(config): + return refinement.dichotomy.Dichotomy(config) + +def test_factory(dichotomy_instance): + """ + Description : With `refinement_method` equals to `dichotomy`, we should get a Dichotomy object. + Data : + Requirement : EX_REF_DICH_00 + """ assert isinstance(dichotomy_instance, refinement.dichotomy.Dichotomy) assert isinstance(dichotomy_instance, refinement.AbstractRefinement) class TestCheckConf: - """Test the check_conf method.""" + """ + Description : Test the check_conf method. + Requirement : EX_CONF_08, EX_REF_01, EX_REF_DICH_01 + """ def test_method_field(self, config): """An exception should be raised if `refinement_method` is not `dichotomy`.""" @@ -168,21 +248,20 @@ def test_method_field(self, config): refinement.dichotomy.Dichotomy(config) assert "invalid_method" in err.value.args[0] + @pytest.mark.parametrize("iterations", [0]) def test_iterations_below_minimum(self, config): """An exception should be raised.""" - config["iterations"] = 0 - with pytest.raises(json_checker.core.exceptions.DictCheckerError) as err: refinement.dichotomy.Dichotomy(config) assert "Not valid data" in err.value.args[0] assert "iterations" in err.value.args[0] + @pytest.mark.parametrize("iterations", [10]) def test_iterations_above_maximum(self, config, caplog): """Test that when user set an iteration value above defined maximum, we replace it by this maximum and log a warning. """ - config["iterations"] = 10 - + # caplog does not capture logs from fixture, so we can not use dichotomy_instance fixture dichotomy_instance = refinement.dichotomy.Dichotomy(config) assert dichotomy_instance.cfg["iterations"] == 9 @@ -192,26 +271,74 @@ def test_iterations_above_maximum(self, config, caplog): ) @pytest.mark.parametrize("iterations", [1, 9]) - def test_iterations_in_allowed_range(self, config, iterations): + def test_iterations_in_allowed_range(self, iterations, dichotomy_instance): """It should not fail.""" - config["iterations"] = iterations + assert dichotomy_instance.cfg["iterations"] == iterations - dichotomy_instance = refinement.dichotomy.Dichotomy(config) + @pytest.mark.parametrize( + ["config"], + [ + pytest.param( + { + "refinement_method": "dichotomy", + "iterations": 1, + "filter": {"method": "bicubic"}, + }, + id="bicubic", + ), + pytest.param( + { + "refinement_method": "dichotomy", + "iterations": 1, + "filter": {"method": "sinc"}, + }, + id="sinc", + ), + ], + ) + def test_valid_filter_names(self, config, dichotomy_instance): + """ + Description : Test accepted filter names. + Data : + Requirement : + * EX_REF_BCO_00 + * EX_REF_SINC_00 + """ - assert dichotomy_instance.cfg["iterations"] == iterations + assert dichotomy_instance.cfg["filter"] == config["filter"] - @pytest.mark.parametrize("filter_name", ["bicubic"]) - def test_valid_filter_names(self, config, filter_name): + @pytest.mark.parametrize( + ["config"], + [ + pytest.param( + { + "refinement_method": "dichotomy", + "iterations": 1, + "filter": {"method": "sinc", "size": 42}, + }, + id="sinc", + ), + ], + ) + def test_fails_with_bad_filter_configuration(self, config): """Test accepted filter names.""" - config["filter"] = filter_name - - dichotomy_instance = refinement.dichotomy.Dichotomy(config) + with pytest.raises(json_checker.core.exceptions.DictCheckerError) as err: + refinement.dichotomy.Dichotomy(config) + assert "size" in err.value.args[0] - assert dichotomy_instance.cfg["filter"] == filter_name + @pytest.mark.parametrize("filter_name", ["invalid_name"]) + def test_faild_with_invalid_filter_name(self, config): + with pytest.raises(json_checker.core.exceptions.DictCheckerError) as err: + refinement.dichotomy.Dichotomy(config) + assert "filter" in err.value.args[0] @pytest.mark.parametrize("missing", ["refinement_method", "iterations", "filter"]) def test_fails_on_missing_keys(self, config, missing): - """Should raise an error when a mandatory key is missing.""" + """ + Description : Should raise an error when a mandatory key is missing. + Data : + Requirement : EX_CONF_08 + """ del config[missing] with pytest.raises(json_checker.core.exceptions.MissKeyCheckerError) as err: @@ -264,30 +391,19 @@ def cost_volumes(self, zeros_cost_volumes): def iterations(self): return 1 - @pytest.fixture() - def filter_name(self): - return "bicubic" - - @pytest.fixture() - def config(self, iterations, filter_name): - return { - "refinement_method": "dichotomy", - "iterations": iterations, - "filter": filter_name, - } - - @pytest.fixture() - def dichotomy_instance(self, config): - return refinement.dichotomy.Dichotomy(config) - + @pytest.mark.parametrize("subpixel", [1, 2]) @pytest.mark.parametrize(["iterations", "precision"], [[1, 0.5], [2, 0.25], [3, 0.125]]) def test_precision_is_logged( - self, cost_volumes, disp_map, dichotomy_instance, precision, mocker: MockerFixture, caplog + self, cost_volumes, disp_map, dichotomy_instance, precision, left_img, mocker: MockerFixture, caplog ): - """Precision should be logged.""" + """ + Description : Precision should be logged. + Data : + Requirement : EX_REF_DICH_01 + """ with caplog.at_level(logging.INFO): - dichotomy_instance.refinement_method(cost_volumes, disp_map, img_left=mocker.ANY, img_right=mocker.ANY) - assert caplog.record_tuples == [("root", logging.INFO, f"Dichotomy precision reached: {precision}")] + dichotomy_instance.refinement_method(cost_volumes, disp_map, left_img, img_right=mocker.ANY) + assert ("root", logging.INFO, f"Dichotomy precision reached: {precision}") in caplog.record_tuples @pytest.mark.parametrize( ["type_measure", "expected"], @@ -297,7 +413,7 @@ def test_precision_is_logged( ], ) def test_which_cost_selection_method_is_used( - self, dichotomy_instance, cost_volumes, disp_map, type_measure, expected, mocker: MockerFixture + self, dichotomy_instance, cost_volumes, disp_map, left_img, type_measure, expected, mocker: MockerFixture ): """Test cost_volume's type_measure attrs determines which cost_selection_method is used.""" cost_volumes.attrs["type_measure"] = type_measure @@ -307,7 +423,7 @@ def test_which_cost_selection_method_is_used( return_value=(refinement.dichotomy.Point(0, 0), 0, 0, 0), ) - dichotomy_instance.refinement_method(cost_volumes, disp_map, img_left=mocker.ANY, img_right=mocker.ANY) + dichotomy_instance.refinement_method(cost_volumes, disp_map, left_img, img_right=mocker.ANY) mocked_search_new_best_point.assert_called_with( cost_surface=mocker.ANY, @@ -319,13 +435,13 @@ def test_which_cost_selection_method_is_used( cost_selection_method=expected, ) - def test_result_of_one_iteration(self, dichotomy_instance, cost_volumes, disp_map, mocker: MockerFixture): + def test_result_of_one_iteration(self, dichotomy_instance, cost_volumes, disp_map, left_img, mocker: MockerFixture): """Test result of refinement method is as expected.""" copy_disp_map = copy.deepcopy(disp_map) result_disp_col, result_disp_row, _ = dichotomy_instance.refinement_method( - cost_volumes, copy_disp_map, img_left=mocker.ANY, img_right=mocker.ANY + cost_volumes, copy_disp_map, left_img, img_right=mocker.ANY ) assert result_disp_col[0, 2] == disp_map["col_map"][0, 2] + 0.5 @@ -334,13 +450,15 @@ def test_result_of_one_iteration(self, dichotomy_instance, cost_volumes, disp_ma assert result_disp_col[1, 0] == disp_map["col_map"][1, 0] - 0.5 @pytest.mark.parametrize("iterations", [2]) - def test_result_of_two_iterations(self, dichotomy_instance, cost_volumes, disp_map, mocker: MockerFixture): + def test_result_of_two_iterations( + self, dichotomy_instance, cost_volumes, left_img, disp_map, mocker: MockerFixture + ): """Test result of refinement method is as expected.""" copy_disp_map = copy.deepcopy(disp_map) result_disp_col, result_disp_row, _ = dichotomy_instance.refinement_method( - cost_volumes, copy_disp_map, img_left=mocker.ANY, img_right=mocker.ANY + cost_volumes, copy_disp_map, left_img, img_right=mocker.ANY ) # Different results from the spline filter @@ -349,7 +467,7 @@ def test_result_of_two_iterations(self, dichotomy_instance, cost_volumes, disp_m assert result_disp_row[1, 0] == disp_map["row_map"][1, 0] assert result_disp_col[1, 0] == disp_map["col_map"][1, 0] - 0.25 - def test_with_nans_in_disp_map(self, dichotomy_instance, cost_volumes, disp_map, mocker: MockerFixture): + def test_with_nans_in_disp_map(self, dichotomy_instance, cost_volumes, disp_map, left_img, mocker: MockerFixture): """Test that even with NaNs in disparity maps we can extract values from cost_volumes.""" # Convert disp_map to float so that it can store NaNs: disp_map = disp_map.astype(np.float32) @@ -359,7 +477,7 @@ def test_with_nans_in_disp_map(self, dichotomy_instance, cost_volumes, disp_map, copy_disp_map = copy.deepcopy(disp_map) result_disp_col, result_disp_row, _ = dichotomy_instance.refinement_method( - cost_volumes, copy_disp_map, img_left=mocker.ANY, img_right=mocker.ANY + cost_volumes, copy_disp_map, left_img, img_right=mocker.ANY ) assert result_disp_row[0, 2] == disp_map["row_map"][0, 2] - 0.5 @@ -369,7 +487,7 @@ def test_with_nans_in_disp_map(self, dichotomy_instance, cost_volumes, disp_map, @pytest.mark.parametrize("invalid_disparity", [-9999]) def test_with_invalid_values_in_disp_map( - self, dichotomy_instance, cost_volumes, disp_map, invalid_disparity, mocker: MockerFixture + self, dichotomy_instance, cost_volumes, disp_map, left_img, invalid_disparity, mocker: MockerFixture ): """Test that even with invalid values in disparity maps we can extract other values from cost_volumes.""" # use indexes for row and col to be independent of coordinates which depend on ROI themselves, @@ -381,7 +499,7 @@ def test_with_invalid_values_in_disp_map( result_disp_col, result_disp_row, _ = dichotomy_instance.refinement_method( cost_volumes, copy_disp_map, - img_left=mocker.ANY, + left_img, img_right=mocker.ANY, ) @@ -397,6 +515,7 @@ def test_disparity_map_is_within_range( # pylint: disable=too-many-arguments dichotomy_instance, cost_volumes, disp_map, + left_img, min_disparity_row, min_disparity_col, max_disparity_row, @@ -413,7 +532,7 @@ def test_disparity_map_is_within_range( # pylint: disable=too-many-arguments result_disp_col, result_disp_row, _ = dichotomy_instance.refinement_method( cost_volumes, disp_map, - img_left=mocker.ANY, + left_img, img_right=mocker.ANY, ) @@ -422,17 +541,84 @@ def test_disparity_map_is_within_range( # pylint: disable=too-many-arguments assert np.nanmax(result_disp_row) <= max_disparity_row assert np.nanmax(result_disp_col) <= max_disparity_col + @pytest.mark.parametrize( + ["subpixel", "iterations", "nb_of_skipped"], + [ + pytest.param(2, 1, 1), + pytest.param(4, 1, 2), + pytest.param(4, 2, 2), + pytest.param(8, 3, 3), + ], + ) + def test_skip_iterations_with_subpixel( # pylint: disable=too-many-arguments + self, + dichotomy_instance, + cost_volumes, + disp_map, + left_img, + subpixel, + nb_of_skipped, + caplog, + mocker: MockerFixture, + ): + """First iterations must be skipped since precision is already reached by subpixel.""" + result_disp_map = copy.deepcopy(disp_map) + with caplog.at_level(logging.INFO): + result_disp_col, result_disp_row, _ = dichotomy_instance.refinement_method( + cost_volumes, + result_disp_map, + img_left=left_img, + img_right=mocker.ANY, + ) -def test_margins(): - """ - Test margins of Dichotomy. - """ + np.testing.assert_array_equal(result_disp_row, disp_map["row_map"]) + np.testing.assert_array_equal(result_disp_col, disp_map["col_map"]) + assert ( + f"With subpixel of `{subpixel}` the `{nb_of_skipped}` first dichotomy iterations will be skipped." + in caplog.messages + ) - config = {"refinement_method": "dichotomy", "iterations": 2, "filter": "bicubic"} - dichotomy_instance = refinement.dichotomy.Dichotomy(config) +@pytest.mark.parametrize( + ["filter_name", "iterations", "expected"], + [ + pytest.param("sinc", 1, [0, 0.5], id="sinc - 1 iteration"), + pytest.param("sinc", 2, [0, 0.25, 0.5, 0.75], id="sinc - 2 iteration"), + ], +) +def test_pre_computed_filter_fractional_shifts(dichotomy_instance, expected): + """Test filter.fractional_shifts is consistent with dichotomy iteration number.""" + np.testing.assert_array_equal(dichotomy_instance.filter.fractional_shifts, expected) + - assert dichotomy_instance.margins == Margins(1, 1, 2, 2) +@pytest.mark.parametrize( + ["config", "expected"], + [ + pytest.param( + { + "refinement_method": "dichotomy", + "iterations": 1, + "filter": {"method": "bicubic"}, + }, + Margins(1, 1, 2, 2), + id="bicubic", + ), + pytest.param( + { + "refinement_method": "dichotomy", + "iterations": 1, + "filter": {"method": "sinc", "size": 7}, + }, + Margins(7, 7, 7, 7), + id="sinc", + ), + ], +) +def test_margins(dichotomy_instance, expected): + """ + Test margins of Dichotomy. + """ + assert dichotomy_instance.margins == expected class TestCostSurfaces: @@ -572,8 +758,29 @@ def test_iteration(self, cost_volumes, disp_map): ) +@pytest.fixture() +def make_cost_surface(cost_surface_data, subpix): + """ + Creates a cost surface data array according to given data and subpix + """ + + cost_surface = xr.DataArray(cost_surface_data) + + cost_surface.attrs["subpixel"] = subpix + + return cost_surface + + @pytest.mark.parametrize( - ["cost_surface", "precision", "initial_disparity", "initial_position", "initial_value", "expected"], + [ + "cost_surface_data", + "subpix", + "precision", + "initial_disparity", + "initial_position", + "initial_value", + "expected", + ], [ pytest.param( np.array( @@ -585,6 +792,7 @@ def test_iteration(self, cost_volumes, disp_map): [0, 0, 0, 0, 0], ] ), + 1, 0.5, (2, 2), (2, 2), @@ -603,6 +811,7 @@ def test_iteration(self, cost_volumes, disp_map): [0, 0, 0, 0, 0], ] ), + 1, 0.5, (2, 2), (2, 2), @@ -620,6 +829,7 @@ def test_iteration(self, cost_volumes, disp_map): [0, 0, 0, 0, 0], ] ), + 1, 0.25, (1.5, 2.5), (1.5, 2.5), @@ -637,6 +847,7 @@ def test_iteration(self, cost_volumes, disp_map): [0, 0, 0, 0, 0], ] ), + 1, 0.5, (2, 2), (2, 2), @@ -654,6 +865,7 @@ def test_iteration(self, cost_volumes, disp_map): [0, 0, 0, 0, 0], ] ), + 1, 0.5, (2, 2), (2, 2), @@ -671,6 +883,7 @@ def test_iteration(self, cost_volumes, disp_map): [0, 0, 0, 0, 0], ] ), + 1, 0.5, (2, 2), (2, 2), @@ -678,17 +891,101 @@ def test_iteration(self, cost_volumes, disp_map): (refinement.dichotomy.Point(2.5, 2.5), np.float32(2.5), np.float32(2.5), np.float32(6.64453125)), id="NaN outside of kernel has no effect", ), + pytest.param( + np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 1, 0, 0], + [0, 20, 0, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + 2, + 0.25, + (2, 2), + (2, 2), + 1.0, + (refinement.dichotomy.Point(1.5, 2.5), np.float32(1.75), np.float32(2.25), np.float32(6.64453125)), + id="Bottom left is best and subpix=2", + ), + pytest.param( + np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 1, 0, 0], + [0, 0, 0, 20, 0], + [0, 0, 0, 0, 0], + ] + ), + 2, + 0.125, + (2, 2), + (2, 2), + 1.0, + ( + refinement.dichotomy.Point(2.25, 2.25), + np.float32(2.125), + np.float32(2.125), + np.float32(1.77862548828125), + ), + id="Bottom right is best and subpix=2", + ), + pytest.param( + np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 1, 0, 0], + [0, 20, 0, 0, 0], + [0, 0, 0, 0, 0], + ] + ), + 4, + 0.125, + (2, 2), + (2, 2), + 1.0, + (refinement.dichotomy.Point(1.5, 2.5), np.float32(1.875), np.float32(2.125), np.float32(6.64453125)), + id="Bottom left is best and subpix=4", + ), + pytest.param( + np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 1, 0, 0], + [0, 0, 0, 20, 0], + [0, 0, 0, 0, 0], + ] + ), + 4, + 0.0625, + (2, 2), + (2, 2), + 1.0, + ( + refinement.dichotomy.Point(2.25, 2.25), + np.float32(2.0625), + np.float32(2.0625), + np.float32(1.77862548828125), + ), + id="Bottom right is best and subpix=4", + ), ], ) -def test_search_new_best_point(cost_surface, precision, initial_disparity, initial_position, initial_value, expected): +def test_search_new_best_point( + make_cost_surface, precision, initial_disparity, initial_position, initial_value, expected +): """Test we get new coordinates as expected.""" - filter_dicho = Bicubic("bicubic") + filter_dicho = Bicubic({"method": "bicubic"}) cost_selection_method = np.nanargmax result = refinement.dichotomy.search_new_best_point( - cost_surface, + make_cost_surface, precision, initial_disparity, initial_position, @@ -698,3 +995,206 @@ def test_search_new_best_point(cost_surface, precision, initial_disparity, initi ) assert result == expected + + +class TestExtremaOnEdges: + """ + Test that points for which best cost value is on the edge of disparity range + are not processed by dichotomy loop. + """ + + @pytest.fixture() + def left_img_non_uniform_grid(self, left_img): + """ + Creates a left image dataset with non uniform disparity grids + """ + + # We set the minimum rows disparity at 4 for the point [0,1] + left_img["row_disparity"][0, 0, 1] = 4 + # We set the maximum columns disparity at 0 for the point [1,0] + left_img["col_disparity"][1, 1, 0] = 0 + + return left_img + + @pytest.fixture() + def cost_volumes(self, zeros_cost_volumes, min_disparity_row, max_disparity_col): + """Build cost volumes.""" + # use indexes for row and col to be independent of coordinates which depend on ROI themselves, + # but use coordinates for disp_row and disp_col + + # For point [0,2], the best cost value is set for minimal row disparity + # corresponding cost surface is: + + # [ 0., 0., 0., 0., 0., 0.] + # [ 0., 0., 0., 0., 0., 0.] + # [ 10., 8., 0., 0., 0., 0.] + # [ 8., 9., 0., 0., 0., 0.] + # [ 0., 0., 0., 0., 0., 0.] + # [ 0., 0., 0., 0., 0., 0.] + + zeros_cost_volumes["cost_volumes"].isel(row=0, col=2).loc[ + {"disp_col": [0, 1], "disp_row": min_disparity_row + 1} + ] = [8, 9] + zeros_cost_volumes["cost_volumes"].isel(row=0, col=2).loc[ + {"disp_col": [0, 1], "disp_row": min_disparity_row} + ] = [10, 8] + + # For point [0,1], the best cost value is set for row disparity greater than the minimal one + # corresponding cost surface is: + + # [ 0., 0., 0., 0., 0., 0.] + # [ 0., 0., 0., 0., 0., 0.] + # [ 0., 8., 10., 0., 0., 0.] + # [ 0., 9., 8., 0., 0., 0.] + # [ 0., 0., 0., 0., 0., 0.] + # [ 0., 0., 0., 0., 0., 0.] + + zeros_cost_volumes["cost_volumes"].isel(row=0, col=1).loc[{"disp_col": [0, 1], "disp_row": 3}] = [8, 9] + zeros_cost_volumes["cost_volumes"].isel(row=0, col=1).loc[{"disp_col": [0, 1], "disp_row": 4}] = [10, 8] + + # For point [0,0], the best cost value is set for maximal col disparity + # corresponding cost surface is: + + # [0. , 0. , 0. , 0. , 0. , 0. ] + # [0. , 0. , 0. , 0. , 0. , 0. ] + # [0. , 0. , 0. , 0. , 0. , 0. ] + # [0. , 0. , 4.9 , 0. , 0. , 0. ] + # [0. , 0. , 4.99, 0. , 0. , 0. ] + # [0. , 0. , 5. , 0. , 0. , 0. ] + + zeros_cost_volumes["cost_volumes"].isel(row=0, col=0).loc[ + {"disp_col": [max_disparity_col - 2, max_disparity_col - 1, max_disparity_col], "disp_row": 4} + ] = [ + 4.9, + 4.99, + 5, + ] + + # For point [1,0], the best cost value is set for col disparity lower than the maximal one + # corresponding cost surface is: + + # [0. , 0. , 4.9 , 0. , 0. , 0. ] + # [0. , 0. , 4.99, 0. , 0. , 0. ] + # [0. , 0. , 5. , 0. , 0. , 0. ] + # [0. , 0. , 0. , 0. , 0. , 0. ] + # [0. , 0. , 0. , 0. , 0. , 0. ] + # [0. , 0. , 0. , 0. , 0. , 0. ] + + zeros_cost_volumes["cost_volumes"].isel(row=1, col=0).loc[{"disp_col": [-2, -1, 0], "disp_row": 4}] = [ + 4.9, + 4.99, + 5, + ] + + return zeros_cost_volumes + + @pytest.fixture() + def dataset_disp_maps(self, invalid_disparity, rows, cols, min_disparity_row, max_disparity_col, min_disparity_col): + """Fake disparity maps containing extrema on edges of disparity range.""" + + row = np.full((rows.size, cols.size), 4.0) + row[:, 2] = min_disparity_row + row[1, 1] = min_disparity_row + + # row map is equal to: + # [4., 4., 2.] + # [4., 2., 2.] + + col = np.full((rows.size, cols.size), 0.0) + col[0, 0] = max_disparity_col + col[1, -2:] = min_disparity_col + + # col map is equal to: + # [3., 0., 0.] + # [0., -2., -2.] + + return xr.Dataset( + { + "row_map": (["row", "col"], row.reshape((rows.size, cols.size))), + "col_map": (["row", "col"], col.reshape((rows.size, cols.size))), + }, + coords={ + "row": rows, + "col": cols, + }, + attrs={"invalid_disp": invalid_disparity}, + ) + + def test_uniform_disparity_grid( + self, cost_volumes, dataset_disp_maps, left_img, dichotomy_instance, mocker: MockerFixture + ): + """ + Test that points for which best cost value is on the edge of disparity range + are not processed by dichotomy loop using uniform disparity grids + """ + + copy_disp_map = copy.deepcopy(dataset_disp_maps) + + result_disp_col, result_disp_row, _ = dichotomy_instance.refinement_method( + cost_volumes, copy_disp_map, left_img, img_right=mocker.ANY + ) + + # result_disp_row is equal to: + # [4. 3.75 2. ] + # [4. 2. 2. ] + + # result_disp_col is equal to: + # [3. 0.25 0. ] + # [-0.25. -2. -2. ] + + # Extrema on the edge of row disparity range for point [0,2] --> unchanged row map value after dichotomy loop + assert result_disp_row[0, 2] == dataset_disp_maps["row_map"][0, 2] + # Extrema not on the edge for point [0,1] --> changed row map value after dichotomy loop + assert result_disp_row[0, 1] == dataset_disp_maps["row_map"][0, 1] - 0.25 + + # Extrema on the edge of col disparity range for point [0,0] --> unchanged col map value after dichotomy loop + assert result_disp_col[0, 0] == dataset_disp_maps["col_map"][0, 0] + # Extrema not on the edge for point [1,0] --> changed col map value after dichotomy loop + assert result_disp_col[1, 0] == dataset_disp_maps["col_map"][1, 0] - 0.25 + + def test_non_uniform_disparity_grid( + self, + cost_volumes, + dataset_disp_maps, + left_img_non_uniform_grid, + dichotomy_instance, + max_disparity_row, + min_disparity_col, + mocker: MockerFixture, + ): + """ + Test that points for which best cost value is on the edge of disparity range + are not processed by dichotomy loop using non uniform disparity grids + """ + + copy_disp_map = copy.deepcopy(dataset_disp_maps) + + result_disp_col, result_disp_row, _ = dichotomy_instance.refinement_method( + cost_volumes, copy_disp_map, left_img_non_uniform_grid, img_right=mocker.ANY + ) + + # result_disp_row is equal to: + # [4. 4. 2. ] + # [4. 2. 2. ] + + # result_disp_col is equal to: + # [3. 0. 0. ] + # [0. -2. -2. ] + + # Extrema on the edge of row disparity range for point [0,2] --> unchanged row map value after dichotomy loop + assert result_disp_row[0, 2] == dataset_disp_maps["row_map"][0, 2] + # Extrema on the edge of row disparity range for point [0,1] --> unchanged row map value after dichotomy loop + assert result_disp_row[0, 1] == dataset_disp_maps["row_map"][0, 1] + + # For point [0,1] row disparity range is not [min_disparity_row, max_disparity_row] but [4, max_disparity_row], + # we check that resulting disparity row is in this range. + assert result_disp_row[0, 1] in range(4, max_disparity_row + 1) + + # Extrema on the edge of col disparity range for point [0,0] --> unchanged col map value after dichotomy loop + assert result_disp_col[0, 0] == dataset_disp_maps["col_map"][0, 0] + # Extrema on the edge of col disparity range for point [1,0] --> unchanged col map value after dichotomy loop + assert result_disp_col[1, 0] == dataset_disp_maps["col_map"][1, 0] + + # For point [1,0] col disparity range is not [min_disparity_col, max_disparity_col] but [min_disparity_col, 0], + # we check that resulting disparity row is in this range. + assert result_disp_col[1, 0] in range(min_disparity_col, 0 + 1) diff --git a/tests/unit_tests/test_refinement/test_interpolation.py b/tests/unit_tests/test_refinement/test_interpolation.py index 6b752b5..0aa688b 100644 --- a/tests/unit_tests/test_refinement/test_interpolation.py +++ b/tests/unit_tests/test_refinement/test_interpolation.py @@ -70,6 +70,11 @@ def cv_dataset(): def test_checkconf_fails_if_iterations_is_given(): + """ + Description : Test fails if iterations is given + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(json_checker.core.exceptions.MissKeyCheckerError): refinement.interpolation.Interpolation({"refinement_method": "interpolation", "iterations": 1}) diff --git a/tests/unit_tests/test_refinement/test_optical_flow.py b/tests/unit_tests/test_refinement/test_optical_flow.py index 76bc16c..3d4a70a 100644 --- a/tests/unit_tests/test_refinement/test_optical_flow.py +++ b/tests/unit_tests/test_refinement/test_optical_flow.py @@ -21,7 +21,9 @@ Test refinement step """ -# pylint: disable=redefined-outer-name, protected-access +from typing import Dict + +# pylint: disable=redefined-outer-name, protected-access, unused-argument # mypy: disable-error-code=attr-defined import numpy as np @@ -29,7 +31,9 @@ import xarray as xr from json_checker.core.exceptions import DictCheckerError from pandora.margins import Margins -from pandora2d import refinement +from pandora2d import refinement, common, matching_cost, disparity +from pandora2d.refinement.optical_flow import OpticalFlow +from pandora2d.img_tools import add_disparity_grid @pytest.fixture() @@ -48,8 +52,8 @@ def dataset_image(): "valid_pixels": 0, "no_data_mask": 1, "crs": None, - "col_disparity_source": [-2, 2], - "row_disparity_source": [-2, 2], + "col_disparity_source": [-1, 3], + "row_disparity_source": [-1, 3], "invalid_disparity": np.nan, } @@ -63,13 +67,18 @@ def optical_flow_cfg(): def test_check_conf_passes(optical_flow_cfg): """ - Test the check_conf function + Description : Test the check_conf function + Data : + Requirement : EX_REF_01, EX_REF_FO_00 """ refinement.AbstractRefinement(optical_flow_cfg) # type: ignore[abstract] class TestIterations: - """Test Iteration parameter.""" + """ + Description : Test Iteration parameter. + Requirement : EX_REF_FO_01 + """ def test_iterations_is_not_mandatory(self): """Should not raise error.""" @@ -84,7 +93,11 @@ def test_iterations_is_not_mandatory(self): ], ) def test_fails_with_invalid_iteration_value(self, value): - """Iteration should be only positive integer.""" + """ + Description : Iteration should be only positive integer. + Data : + Requirement : EX_CONF_08 + """ with pytest.raises((KeyError, DictCheckerError)): refinement.optical_flow.OpticalFlow({"refinement_method": "optical_flow", "iterations": value}) @@ -143,7 +156,11 @@ def test_nominal_case(self, optical_flow_cfg, window_size): ], ) def test_check_conf_fails_with_wrong_window_size(self, optical_flow_cfg, window_size): - """Wrong value of window_size""" + """ + Description : Wrong value of window_size + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(DictCheckerError) as err: refinement.AbstractRefinement(optical_flow_cfg, [1, 1], window_size) # type: ignore[abstract] assert "window_size" in err.value.args[0] @@ -176,7 +193,11 @@ def test_nominal_case(self, optical_flow_cfg, step): ], ) def test_check_conf_fails_with_wrong_step(self, optical_flow_cfg, step): - """Wrong value of step""" + """ + Description : Wrong value of step + Data : + Requirement : EX_CONF_08 + """ with pytest.raises(DictCheckerError) as err: refinement.AbstractRefinement(optical_flow_cfg, step) # type: ignore[abstract] assert "step" in err.value.args[0] @@ -189,8 +210,9 @@ def test_reshape_to_matching_cost_window_left(dataset_image): img = dataset_image - refinement_class = refinement.AbstractRefinement({"refinement_method": "optical_flow"}) # type: ignore[abstract] - refinement_class._window_size = 3 + refinement_class = refinement.AbstractRefinement( + {"refinement_method": "optical_flow"}, [1, 1], 3 + ) # type: ignore[abstract] cv = np.zeros((6, 5, 5, 5)) @@ -205,10 +227,28 @@ def test_reshape_to_matching_cost_window_left(dataset_image): "disp_col": disparity_range_col, "disp_row": disparity_range_row, }, + attrs={"offset_row_col": 1}, ) + # get first and last coordinates for row and col in cost volume dataset + first_col_coordinate = cost_volumes.col.data[0] + cost_volumes.offset_row_col + last_col_coordinate = cost_volumes.col.data[-1] - cost_volumes.offset_row_col + col_extrema_coordinates = [ + OpticalFlow.find_nearest_column(first_col_coordinate, cost_volumes.col.data, "+"), + OpticalFlow.find_nearest_column(last_col_coordinate, cost_volumes.col.data, "-"), + ] + + first_row_coordinate = cost_volumes.row.data[0] + cost_volumes.offset_row_col + last_row_coordinate = cost_volumes.row.data[-1] - cost_volumes.offset_row_col + row_extrema_coordinates = [ + OpticalFlow.find_nearest_column(first_row_coordinate, cost_volumes.row.data, "+"), + OpticalFlow.find_nearest_column(last_row_coordinate, cost_volumes.row.data, "-"), + ] + # for left image - reshaped_left = refinement_class.reshape_to_matching_cost_window(img, cost_volumes) + reshaped_left = refinement_class.reshape_to_matching_cost_window( + img, cost_volumes, (row_extrema_coordinates, col_extrema_coordinates) + ) # test four matching_cost idx_1_1 = [[0, 1, 2], [5, 6, 7], [10, 11, 12]] @@ -228,9 +268,9 @@ def test_reshape_to_matching_cost_window_right(dataset_image): """ img = dataset_image - - refinement_class = refinement.AbstractRefinement({"refinement_method": "optical_flow"}) # type: ignore[abstract] - refinement_class._window_size = 3 + refinement_class = refinement.AbstractRefinement( + {"refinement_method": "optical_flow"}, [1, 1], 3 + ) # type: ignore[abstract] # Create disparity maps col_disp_map = [2, 0, 0, 0, 1, 0, 0, 0, 1, -2, 0, 0] @@ -249,10 +289,28 @@ def test_reshape_to_matching_cost_window_right(dataset_image): "disp_col": disparity_range_col, "disp_row": disparity_range_row, }, + attrs={"offset_row_col": 1}, ) + # get first and last coordinates for row and col in cost volume dataset + first_col_coordinate = cost_volumes.col.data[0] + cost_volumes.offset_row_col + last_col_coordinate = cost_volumes.col.data[-1] - cost_volumes.offset_row_col + col_extrema_coordinates = [ + OpticalFlow.find_nearest_column(first_col_coordinate, cost_volumes.col.data, "+"), + OpticalFlow.find_nearest_column(last_col_coordinate, cost_volumes.col.data, "-"), + ] + + first_row_coordinate = cost_volumes.row.data[0] + cost_volumes.offset_row_col + last_row_coordinate = cost_volumes.row.data[-1] - cost_volumes.offset_row_col + row_extrema_coordinates = [ + OpticalFlow.find_nearest_column(first_row_coordinate, cost_volumes.row.data, "+"), + OpticalFlow.find_nearest_column(last_row_coordinate, cost_volumes.row.data, "-"), + ] + # for right image - reshaped_right = refinement_class.reshape_to_matching_cost_window(img, cost_volumes, row_disp_map, col_disp_map) + reshaped_right = refinement_class.reshape_to_matching_cost_window( + img, cost_volumes, (row_extrema_coordinates, col_extrema_coordinates), row_disp_map, col_disp_map + ) # test four matching_cost idx_1_1 = [[12, 13, 14], [17, 18, 19], [22, 23, 24]] @@ -262,35 +320,433 @@ def test_reshape_to_matching_cost_window_right(dataset_image): assert np.array_equal(reshaped_right[:, :, 4], idx_2_2) -def test_warped_image_without_step(): +@pytest.mark.parametrize( + ["window_size", "mc_1", "mc_2", "gt_mc_1", "gt_mc_2"], + [ + pytest.param( + 5, + np.array( + [[0, 1, 2, 3, 4], [6, 7, 8, 9, 10], [12, 13, 14, 15, 16], [18, 19, 20, 21, 22], [24, 25, 26, 27, 28]] + ), + np.array( + [[1, 2, 3, 4, 5], [7, 8, 9, 10, 11], [13, 14, 15, 16, 17], [19, 20, 21, 22, 23], [25, 26, 27, 28, 29]] + ), + np.array( + [ + [19, 20, 21, 22, 22], + [25, 26, 27, 28, 28], + [25, 26, 27, 28, 28], + [19, 20, 21, 22, 22], + [13, 14, 15, 16, 16], + ] + ), + np.array( + [ + [20, 21, 22, 23, 23], + [26, 27, 28, 29, 29], + [26, 27, 28, 29, 29], + [20, 21, 22, 23, 23], + [14, 15, 16, 17, 17], + ] + ), + ), + pytest.param( + 3, + np.array([[0, 1, 2], [3, 4, 5], [6, 7, 8]]), + np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]), + np.array([[7, 8, 8], [4, 5, 5], [1, 2, 2]]), + np.array([[8, 9, 9], [5, 6, 6], [2, 3, 3]]), + ), + ], +) +def test_warped_image_without_step(window_size, mc_1, mc_2, gt_mc_1, gt_mc_2): """ - test warped image + test warped image with different window size + no test for window size at 1 because "window_size": And(int, lambda input: input > 1 and (input % 2) != 0) """ - refinement_class = refinement.AbstractRefinement({"refinement_method": "optical_flow"}) # type: ignore[abstract] - - mc_1 = np.array( - [[0, 1, 2, 3, 4], [6, 7, 8, 9, 10], [12, 13, 14, 15, 16], [18, 19, 20, 21, 22], [24, 25, 26, 27, 28]] - ) - mc_2 = np.array( - [[1, 2, 3, 4, 5], [7, 8, 9, 10, 11], [13, 14, 15, 16, 17], [19, 20, 21, 22, 23], [25, 26, 27, 28, 29]] - ) + refinement_class = refinement.AbstractRefinement( + {"refinement_method": "optical_flow"}, None, window_size + ) # type: ignore[abstract] reshaped_right = np.stack((mc_1, mc_2)).transpose((1, 2, 0)) - delta_row = -3 * np.ones(2) delta_col = -np.ones(2) test_img_shift = refinement_class.warped_img(reshaped_right, delta_row, delta_col, [0, 1]) - gt_mc_1 = np.array( - [[19, 20, 21, 22, 22], [25, 26, 27, 28, 28], [25, 26, 27, 28, 28], [19, 20, 21, 22, 22], [13, 14, 15, 16, 16]] + # check that the generated image is equal to ground truth + assert np.array_equal(gt_mc_1, test_img_shift[:, :, 0]) + assert np.array_equal(gt_mc_2, test_img_shift[:, :, 1]) + + +def test_optical_flow_method(): + """ + test optical flow method with a simple col shift + """ + + # input array creation + array_left = np.array([[0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4]]) + array_right = np.array( + [ + [0.1, 1.2, 2.3, 3.4, 4.5], + [0.1, 1.2, 2.3, 3.4, 4.5], + [0.1, 1.2, 2.3, 3.4, 4.5], + [0.1, 1.2, 2.3, 3.4, 4.5], + [0.1, 1.2, 2.3, 3.4, 4.5], + ] ) - gt_mc_2 = np.array( - [[20, 21, 22, 23, 23], [26, 27, 28, 29, 29], [26, 27, 28, 29, 29], [20, 21, 22, 23, 23], [14, 15, 16, 17, 17]] + one_dim_size = (array_left.shape[0] - 2) * (array_left.shape[1] - 2) # -2 because of margin + + # patch creation + patches_left = np.lib.stride_tricks.sliding_window_view(array_left, [3, 3]) + patches_left = patches_left.reshape((one_dim_size, 3, 3)).transpose((1, 2, 0)) + patches_right = np.lib.stride_tricks.sliding_window_view(array_right, [3, 3]) + patches_right = patches_right.reshape((one_dim_size, 3, 3)).transpose((1, 2, 0)) + + idx_to_compute = np.arange(patches_left.shape[2]).tolist() + + # class initialisation + refinement_class = refinement.AbstractRefinement( + {"refinement_method": "optical_flow"}, [1, 1], 3 + ) # type: ignore[abstract] + + computed_drow, computed_dcol, idx_to_compute = refinement_class.optical_flow( + patches_left, patches_right, idx_to_compute ) - # check that the generated image is equal to ground truth - assert np.array_equal(gt_mc_1, test_img_shift[:, :, 0]) - assert np.array_equal(gt_mc_2, test_img_shift[:, :, 1]) + truth_drow = [0, 0, 0, 0, 0, 0, 0, 0, 0] + truth_dcol = [0.2, 0.3, 0.4, 0.2, 0.3, 0.4, 0.2, 0.3, 0.4] + + assert np.allclose(computed_dcol, truth_dcol, atol=1e-03) + assert np.allclose(computed_drow, truth_drow, atol=1e-03) + + +def test_lucas_kanade_core_algorithm(): + """ + test lucas kanade algorithm with simple flow in x axis + """ + + left_data = np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]], dtype=float) + + right_data = np.array([[2.1, 3.1, 4.1], [2.1, 3.1, 4.1], [2.1, 3.1, 4.1]], dtype=float) + + refinement_class = refinement.AbstractRefinement({"refinement_method": "optical_flow"}) # type: ignore[abstract] + motion_y, motion_x = refinement_class.lucas_kanade_core_algorithm(left_data, right_data) + + expected_motion = [1.1, 0.0] + assert np.allclose([motion_x, motion_y], expected_motion, atol=1e-3) + + +@pytest.fixture() +def make_data(row, col): + return np.random.uniform(0, row * col, (row, col)) + + +def make_img_dataset(data, shift=0): + """ + Instantiate an image dataset with specified rows, columns, and row shift. + """ + # data = np.roll(data, shift, axis=0) + data = data * 2.2 + data = np.round(data, 2) + + return xr.Dataset( + {"im": (["row", "col"], data)}, + coords={"row": np.arange(data.shape[0]), "col": np.arange(data.shape[1])}, + attrs={ + "no_data_img": -9999, + "valid_pixels": 0, + "no_data_mask": 1, + "crs": None, + "invalid_disparity": np.nan, + }, + ).pipe(add_disparity_grid, {"init": 0, "range": 2}, {"init": 0, "range": 2}) + + +@pytest.fixture() +def make_left_right_images(make_data): + data = make_data + return make_img_dataset(data, 0), make_img_dataset(data, 2) + + +def make_cv_dataset(dataset_img, dataset_img_shift, cfg_mc): + """ + Instantiate a cost volume dataset + """ + matching_cost_matcher = matching_cost.MatchingCost(cfg_mc["pipeline"]["matching_cost"]) + + matching_cost_matcher.allocate_cost_volume_pandora( + img_left=dataset_img, + img_right=dataset_img_shift, + cfg=cfg_mc, + ) + + dataset_cv = matching_cost_matcher.compute_cost_volumes(dataset_img, dataset_img_shift) + return dataset_cv + + +def make_disparity_dataset(dataset_cv, cfg_disp): + """ + Instantiate a disparity dataset + """ + disparity_matcher = disparity.Disparity(cfg_disp) + delta_x, delta_y, score = disparity_matcher.compute_disp_maps(dataset_cv) + + data_variables = { + "row_map": (("row", "col"), delta_x), + "col_map": (("row", "col"), delta_y), + "correlation_score": (("row", "col"), score), + } + coords = {"row": dataset_cv.row.data, "col": dataset_cv.col.data} + dataset = xr.Dataset(data_variables, coords) + dataset_disp_map = common.dataset_disp_maps( + dataset.row_map, + dataset.col_map, + dataset.coords, + dataset.correlation_score, + attributes={"invalid_disp": np.nan}, + ) + return dataset_disp_map + + +@pytest.mark.parametrize(["row", "col", "step_row", "step_col"], [(10, 10, 2, 1), (10, 10, 1, 2), (10, 10, 2, 2)]) +def test_step_with_refinement_method(make_left_right_images, row, col, step_row, step_col): + """ + Test refinement method with a step + """ + + # create left image dataset and right image dataset with same as left but with a row shift + dataset_img, dataset_img_shift = make_left_right_images + + # create cost volume dataset + cfg_mc = { + "pipeline": {"matching_cost": {"matching_cost_method": "zncc", "window_size": 3, "step": [step_row, step_col]}} + } + dataset_cv = make_cv_dataset(dataset_img, dataset_img_shift, cfg_mc) + + # create disparity dataset + cfg_disp = {"disparity_method": "wta", "invalid_disparity": np.nan} + dataset_disp_map = make_disparity_dataset(dataset_cv, cfg_disp) + + # Start test + refinement_class = refinement.AbstractRefinement( + {"refinement_method": "optical_flow"}, [step_row, step_col], 3 + ) # type: ignore[abstract] + + refinement_class.refinement_method(dataset_cv, dataset_disp_map, dataset_img, dataset_img_shift) + + +@pytest.mark.parametrize( + ["row", "col", "step_row", "step_col", "window_size"], [(11, 11, 1, 1, 3), (11, 11, 1, 1, 5), (11, 11, 1, 1, 11)] +) +def test_window_size_refinement_method(make_left_right_images, row, col, step_row, step_col, window_size): + """ + Test refinement method with different windows size and check border value, here the step is fixed to 1 + """ + + # create left image dataset and right image dataset with same as left but with a row shift + dataset_img, dataset_img_shift = make_left_right_images + + # create cost volume dataset + cfg_mc = { + "pipeline": { + "matching_cost": {"matching_cost_method": "zncc", "window_size": window_size, "step": [step_row, step_col]} + } + } + + dataset_cv = make_cv_dataset(dataset_img, dataset_img_shift, cfg_mc) + + # create disparity dataset + cfg_disp = {"disparity_method": "wta", "invalid_disparity": np.nan} + dataset_disp_map = make_disparity_dataset(dataset_cv, cfg_disp) + + # Start test + refinement_class = refinement.AbstractRefinement( + {"refinement_method": "optical_flow"}, [step_row, step_col], window_size + ) # type: ignore[abstract] + + delta_col, delta_row, _ = refinement_class.refinement_method( + dataset_cv, dataset_disp_map, dataset_img, dataset_img_shift + ) + + pad = max(window_size // 2 * ele for _ in range(2) for ele in [step_row, step_col]) + + # Check if there are any _invalid_disp inside image without border + assert not np.isnan(delta_col[pad : col - pad, pad : col - pad]).any() + assert not np.isnan(delta_row[pad : row - pad, pad : row - pad]).any() + + # Check _invalid_disp in border zone + assert np.isnan(delta_col[0:pad, col - pad : col]).any() + assert np.isnan(delta_row[0:pad, row - pad : row]).any() + + # Check final image shape + assert np.array_equal(row, delta_row.shape[0]) + assert np.array_equal(row, delta_row.shape[1]) + assert np.array_equal(col, delta_col.shape[0]) + assert np.array_equal(col, delta_col.shape[1]) + + +class TestDisparityGrids: + """Test influence of disparity grids.""" + + @pytest.fixture() + def nb_rows(self) -> int: + return 10 + + @pytest.fixture() + def nb_cols(self) -> int: + return 8 + + @pytest.fixture() + def image( + self, + random_generator: np.random.Generator, + nb_rows: int, + nb_cols: int, + min_row: bool, + max_row: bool, + min_col: bool, + max_col: bool, + ) -> xr.Dataset: + """ + Create random image dataset with disparity grids with a range of 3 or 7. + + :param random_generator: + :type random_generator: np.random.Generator + :param nb_rows: number of rows in the image + :type nb_rows: int + :param nb_cols: number of cols in the image + :type nb_cols: int + :param min_row: if True, row min disparities will be a mix of 1 and 3 else will be all 1. + :type min_row: bool + :param max_row: if True, row max disparities will be a mix of 6 and 8 else will be all 6. + :type max_row: bool + :param min_col: if True, col min disparities will be a mix of 1 and 3 else will be all 1. + :type min_col: bool + :param max_col: if True, col max disparities will be a mix of 6 and 8 else will be all 6. + :type max_col: bool + :return: image dataset + :rtype: xr.Dataset + """ + shape = (nb_rows, nb_cols) + data = random_generator.integers(0, 255, shape, endpoint=True) + + # disparity range must be odd and greater or equal to 5 + fixed_min = np.ones(shape) + random_min = random_generator.choice([1, 3], shape) + fixed_max = np.full(shape, 6) # with min either 1 or 3 we get range 3 or 7 + random_max = random_min + 5 + + row_min_disparity = random_min if min_row else fixed_min + col_min_disparity = random_min if min_col else fixed_min + row_max_disparity = random_max if max_row else fixed_max + col_max_disparity = random_max if max_col else fixed_max + + return xr.Dataset( + { + "im": (["row", "col"], data), + "row_disparity": (["band_disp", "row", "col"], np.array([row_min_disparity, row_max_disparity])), + "col_disparity": (["band_disp", "row", "col"], np.array([col_min_disparity, col_max_disparity])), + }, + coords={"row": np.arange(nb_rows), "col": np.arange(nb_cols), "band_disp": ["min", "max"]}, + attrs={ + "no_data_img": -9999, + "valid_pixels": 0, + "no_data_mask": 1, + "crs": None, + "col_disparity_source": [np.min(col_min_disparity), np.max(col_max_disparity)], + "row_disparity_source": [np.min(row_min_disparity), np.max(col_max_disparity)], + }, + ) + + @pytest.fixture() + def cfg(self) -> Dict: + return { + "pipeline": { + "matching_cost": { + "matching_cost_method": "ssd", + "window_size": 3, + "step": [1, 1], + "subpix": 1, + } + } + } + + @pytest.fixture() + def invalid_value(self) -> int: + return -99 + + @pytest.fixture() + def disparities(self, image: xr.Dataset, cfg: Dict, invalid_value) -> Dict: + """Execute refinement method and return disparities.""" + matching_cost_ = matching_cost.MatchingCost(cfg["pipeline"]["matching_cost"]) + + matching_cost_.allocate_cost_volume_pandora( + img_left=image, + img_right=image, + cfg=cfg, + ) + + cost_volumes = matching_cost_.compute_cost_volumes( + img_left=image, + img_right=image, + ) + + disparity_matcher = disparity.Disparity({"disparity_method": "wta", "invalid_disparity": invalid_value}) + + disp_map_col, disp_map_row, correlation_score = disparity_matcher.compute_disp_maps(cost_volumes) + + data_variables = { + "row_map": (("row", "col"), disp_map_row), + "col_map": (("row", "col"), disp_map_col), + "correlation_score": (("row", "col"), correlation_score), + } + + coords = {"row": image.coords["row"], "col": image.coords["col"]} + + dataset = xr.Dataset(data_variables, coords) + + dataset_disp_map = common.dataset_disp_maps( + dataset.row_map, + dataset.col_map, + dataset.coords, + dataset.correlation_score, + attributes={"invalid_disp": invalid_value}, + ) + + test = refinement.AbstractRefinement( + {"refinement_method": "optical_flow"}, + cfg["pipeline"]["matching_cost"]["step"], + cfg["pipeline"]["matching_cost"]["window_size"], + ) # type: ignore[abstract] + disparity_col, disparity_row, _ = test.refinement_method(cost_volumes, dataset_disp_map, image, image) + return {"row_disparity": disparity_row, "col_disparity": disparity_col} + + @pytest.mark.parametrize("min_row", (True, False)) + @pytest.mark.parametrize("max_row", (True, False)) + @pytest.mark.parametrize("min_col", (True, False)) + @pytest.mark.parametrize("max_col", (True, False)) + def test_variable_grid(self, image, disparities, invalid_value): + """Test resulting disparities are in range defined by grids.""" + # We want to exclude invalid_values from the comparaison + valid_row_mask = disparities["row_disparity"] != invalid_value + valid_col_mask = disparities["col_disparity"] != invalid_value + + assert np.all( + disparities["row_disparity"][valid_row_mask] + >= image["row_disparity"].sel({"band_disp": "min"}).data[valid_row_mask] + ) + assert np.all( + disparities["col_disparity"][valid_col_mask] + >= image["col_disparity"].sel({"band_disp": "min"}).data[valid_col_mask] + ) + assert np.all( + disparities["row_disparity"][valid_row_mask] + <= image["row_disparity"].sel({"band_disp": "max"}).data[valid_row_mask] + ) + assert np.all( + disparities["col_disparity"][valid_col_mask] + <= image["col_disparity"].sel({"band_disp": "max"}).data[valid_col_mask] + ) diff --git a/tests/unit_tests/test_statistics.py b/tests/unit_tests/test_statistics.py index c98a6f4..00be05b 100644 --- a/tests/unit_tests/test_statistics.py +++ b/tests/unit_tests/test_statistics.py @@ -21,6 +21,7 @@ # Make pylint happy with fixtures: # pylint: disable=redefined-outer-name +import warnings import numpy as np import pytest @@ -97,6 +98,9 @@ def test_std(self, data, invalid_values, expected): ) def test_minimal_valid_pixel_ratio(self, data, invalid_values, expected): """Test std statistic result.""" - result = compute_statistics(data, invalid_values) - assert result.minimal_valid_pixel_ratio == expected + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=RuntimeWarning) + result = compute_statistics(data, invalid_values) + + assert result.minimal_valid_pixel_ratio == expected