Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Essential Matrix Optimization #816

Open
wants to merge 11 commits into
base: master
Choose a base branch
from
14 changes: 14 additions & 0 deletions 2024_04_16_15_23_29_545171_aggregated_results_table.tsv

Large diffs are not rendered by default.

14 changes: 14 additions & 0 deletions 2024_04_18_18_10_46_790577_aggregated_results_table.tsv

Large diffs are not rendered by default.

14 changes: 14 additions & 0 deletions 2024_04_20_15_47_13_477451_aggregated_results_table.tsv

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion environment_linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ channels:
- pytorch
- conda-forge
- nvidia
- open3d-admin
dependencies:
# python essentials
- python=3.8
Expand Down Expand Up @@ -45,11 +46,11 @@ dependencies:
- plotly=4.14.3
- tabulate
- simplejson
- open3d
- pyparsing==3.0.9
# testing
- parameterized
- pip:
- open3d
- opencv-python>=4.5.4.58
- pydegensac
- colour
Expand Down
3 changes: 2 additions & 1 deletion environment_linux_cpuonly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ channels:
# channel.
- pytorch
- conda-forge
- open3d-admin
dependencies:
# python essentials
- python=3.8
Expand Down Expand Up @@ -44,11 +45,11 @@ dependencies:
- plotly=4.14.3
- tabulate
- simplejson
- open3d
- pyparsing==3.0.9
# testing
- parameterized
- pip:
- open3d
- opencv-python>=4.5.4.58
- pydegensac
- colour
Expand Down
99 changes: 88 additions & 11 deletions gtsfm/view_graph_estimator/cycle_consistent_rotation_estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,25 +9,28 @@
from pathlib import Path
from typing import Dict, List, Optional, Set, Tuple

import matplotlib.pyplot as plt
import numpy as np
from gtsam import Cal3Bundler, Rot3, Unit3

import gtsam
import gtsfm.utils.geometry_comparisons as comp_utils
import gtsfm.utils.graph as graph_utils
import gtsfm.utils.logger as logger_utils
import matplotlib.pyplot as plt
import numpy as np
from gtsam import Cal3Bundler, Rot3, Unit3
from gtsfm.common.keypoints import Keypoints
from gtsfm.two_view_estimator import TwoViewEstimationReport
from gtsfm.view_graph_estimator.view_graph_estimator_base import ViewGraphEstimatorBase
from gtsfm.view_graph_estimator.view_graph_estimator_base import \
ViewGraphEstimatorBase

logger = logger_utils.get_logger()

# threshold for cycle consistency inference
# Threshold for cycle consistency inference
ERROR_THRESHOLD = 7.0

# threshold for evaluation w.r.t. GT
# Threshold for evaluation w.r.t. GT
MAX_INLIER_MEASUREMENT_ERROR_DEG = 5.0

E = gtsam.symbol_shorthand.E # essential matrix


class EdgeErrorAggregationCriterion(str, Enum):
"""Aggregate cycle errors over each edge by choosing one of the following summary statistics:
Expand Down Expand Up @@ -105,14 +108,17 @@ def run(
logger.info("Input number of edges: %d" % len(i2Ri1_dict))
input_edges: List[Tuple[int, int]] = i2Ri1_dict.keys()
triplets: List[Tuple[int, int, int]] = graph_utils.extract_cyclic_triplets_from_edges(input_edges)

logger.info("Number of triplets: %d" % len(triplets))

# Optimize essential matrices.
i2Ri1_dict, i2Ui1_dict = self.optimize_essential_matrices(
triplets, i2Ri1_dict, i2Ui1_dict, calibrations, corr_idxs_i1i2, keypoints
)

# Compute the cycle error for each triplet, and add it to its edges for aggregation.
per_edge_errors = defaultdict(list)
cycle_errors: List[float] = []
max_gt_error_in_cycle = []

# Compute the cycle error for each triplet, and add it to its edges for aggregation.
for i0, i1, i2 in triplets: # sort order guaranteed
error = comp_utils.compute_cyclic_rotation_error(
i1Ri0=i2Ri1_dict[(i0, i1)], i2Ri1=i2Ri1_dict[(i1, i2)], i2Ri0=i2Ri1_dict[(i0, i2)]
Expand Down Expand Up @@ -149,7 +155,7 @@ def run(
duration_sec,
)

return valid_edges
return valid_edges, i2Ri1_dict, i2Ui1_dict

def __save_plots(
self,
Expand Down Expand Up @@ -232,3 +238,74 @@ def __aggregate_errors_for_edge(self, edge_errors: List[float]) -> float:
return np.amin(edge_errors)
elif self._edge_error_aggregation_criterion == EdgeErrorAggregationCriterion.MEDIAN_EDGE_ERROR:
return np.median(edge_errors)


def optimize_essential_matrices(
self,
triplets: List[Tuple[int, int, int]],
i2Ri1_dict: Dict[Tuple[int, int], Rot3],
i2Ui1_dict: Dict[Tuple[int, int], Unit3],
calibrations: List[Cal3Bundler],
corr_idxs_i1i2: Dict[Tuple[int, int], np.ndarray],
keypoints: List[Keypoints],
use_robust_loss: bool = True,
) -> Tuple[Dict[Tuple[int, int], Rot3], Dict[Tuple[int, int], Unit3]]:
"""Jointly optimize over all essential matrices for edges contained in triplets.

Args:
triplets: List of triplets in view graph.
i2Ri1_dict: Dict from (i1, i2) to relative rotation of i1 with respect to i2.
i2Ui1_dict: Dict from (i1, i2) to relative translation direction of i1 with respect to i2.
calibrations: List of calibrations for each image.
corr_idxs_i1i2: Dict from (i1, i2) to indices of verified correspondences from i1 to i2.
keypoints: keypoints for each images.
use_robust_loss: whether to use Huber loss.

Returns:
Optimized relative rotations and directions.
"""
# Create GTSAM objects.
graph = gtsam.NonlinearFactorGraph()
initial = gtsam.Values()
noise_model=gtsam.noiseModel.Isotropic.Sigma(1, 1e-2)
if use_robust_loss:
huber_loss = gtsam.noiseModel.mEstimator.Huber.Create(1.345)
noise_model = gtsam.noiseModel.Robust.Create(huber_loss, noise_model)

# Loop through triplets and edges.
edge_to_key = {}
for i0, i1, i2 in triplets:
for edge in [(i0, i1), (i0, i2), (i1, i2)]:
if edge not in edge_to_key and corr_idxs_i1i2[edge].shape[0] > 0:
# Add graph key to dictionary for later.
key = len(edge_to_key)
edge_to_key[edge] = key

# Add essential matrix factors.
mkps0 = keypoints[edge[0]].coordinates[corr_idxs_i1i2[edge][:, 0]]
mkps1 = keypoints[edge[1]].coordinates[corr_idxs_i1i2[edge][:, 1]]
initial.insert(key, gtsam.EssentialMatrix(i2Ri1_dict[edge], i2Ui1_dict[edge]))
for kp0, kp1 in zip(mkps0, mkps1):
graph.add(
gtsam.EssentialMatrixFactor(
key,
calibrations[edge[1]].calibrate(kp1),
calibrations[edge[0]].calibrate(kp0),
noise_model,
)
)

# Optimize!
params = gtsam.LevenbergMarquardtParams()
params.setVerbosity("ERROR")
params.setMaxIterations(10)
optimizer = gtsam.LevenbergMarquardtOptimizer(graph, initial, params)
result = optimizer.optimize()

# Add optimized rotations and translations to the dictionary.
for edge, key in edge_to_key.items():
E_opt = result.atEssentialMatrix(key)
i2Ri1_dict[edge] = E_opt.rotation()
i2Ui1_dict[edge] = E_opt.direction()

return i2Ri1_dict, i2Ui1_dict
11 changes: 5 additions & 6 deletions gtsfm/view_graph_estimator/view_graph_estimator_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,18 @@
Authors: Akshay Krishnan, Ayush Baid, John Lambert
"""
import abc
import os
from pathlib import Path
from typing import Dict, List, Optional, Set, Tuple

import dask
import os
import numpy as np
from dask.delayed import Delayed
from gtsam import Cal3Bundler, Rot3, Unit3

import gtsfm.common.types as gtsfm_types
import gtsfm.utils.graph as graph_utils
import gtsfm.utils.logger as logger_utils
import gtsfm.utils.metrics as metrics_utils
import numpy as np
from dask.delayed import Delayed
from gtsam import Cal3Bundler, Rot3, Unit3
from gtsfm.common.keypoints import Keypoints
from gtsfm.evaluation.metrics import GtsfmMetric, GtsfmMetricsGroup
from gtsfm.two_view_estimator import TwoViewEstimationReport
Expand Down Expand Up @@ -302,7 +301,7 @@ def create_computation_graph(
)

# Run view graph estimation.
view_graph_edges = dask.delayed(self.run)(
view_graph_edges, i2Ri1_valid_dict, i2Ui1_valid_dict = dask.delayed(self.run, nout=3)(
i2Ri1_dict=i2Ri1_valid_dict,
i2Ui1_dict=i2Ui1_valid_dict,
calibrations=calibrations,
Expand Down
1 change: 1 addition & 0 deletions thirdparty/RoMa
Submodule RoMa added at fa66db
Loading