-
Notifications
You must be signed in to change notification settings - Fork 0
/
processFrame.m
91 lines (76 loc) · 3.97 KB
/
processFrame.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
function [curr_state, curr_pose] = processFrame(prev_state, prev_img,...
curr_img, cam_params)
% processFrame: returns state and camera pose at the current frame given
% the state and the image at the previous frame and the new image
% @param struct prev_state: The state at the previous frame
% @param matrix prev_img: Image at the previous frame
% @param matrix curr_img: Image at the current frame
% @return struct curr_state: The new state
% @return matrix curr_pose: The new pose (TODO: add description)
persistent tracker
persistent camera_matrix
if isempty(tracker)
tracker = KLTTracker();
end
if nargin > 2
camera_parameters = cam_params;
end
admissible_angular_threshold = 5;
max_temporal_recall = 20;
curr_state.landmarks = landmarks;
curr_state.keypoints = keypoints;
curr_state.candidate_keypoints = [];
curr_state.candidate_first_keypoints = [];
curr_state.candidate_first_poses = [];
curr_state.candidate_time_indxs = [];
% Track keypoints
[curr_pts, val_idx, scores] = tracker.track(prev_img,...
curr_img,...
prev_state.keypoints);
% TODO: check if args in correct format
[R_WC, T_WC, inl_indx] = estimateWorldCameraPose(curr_pts(val_idx,:),...
prev_state.landmarks,...
camera_parameters);
% Disregarding badly tracked correspondences
curr_state.landmarks = prev_state.landmarks(val_idx(inl_indx));
curr_state.keypoints = prev_state.keypoints(val_idx(inl_indx));
curr_pose = [R_WC;T_WC];
[curr_state.landmarks,...
curr_state.keypoints,...
curr_pose] = updateW2D3D(prev_img, curr_img, prev_state, tracker);
%% Introducing candidates if appropriate
[candidate_tracked, val_cand, score] =tracker.track(prev_img,...
curr_img,...
prev_state.candidate_keypoints);
for i=val_cand
cand_landmark = triangulate(prev_state.candidate_first_keypoints(i,:),...
candidate_tracked(i,:),...
prev_state.candidate_first_poses{i},...
curr_pose);
% TODO: define calculate_angle function
if calculate_angle(cand_landmark, prev_state.candidate_first_poses{i},...
curr_pose) > admissible_angular_threshold
curr_state.landmarks = [curr_state.landmarks;...
cand_landmark];
curr_state.keypoints = [curr_state.keypoints;...
candidate_tracked(i,:)];
else
% Discard candidate if has been stored for too long
if prev_state.candidate_time_indxs(i) >= -max_temporal_recall
curr_state.candidate_keypoints = [curr_state.dandidate_keypoints;...
candidate_tracked(i,:)];
curr_state.candidate_first_keypoints = [curr_state.candidate_first_keypoints(i,:);...
prev_state.candidate_first_keypoints(i,:)];
curr_state.candidate_first_poses = [curr_state.candidate_first_poses(i,:);...
prev_state.candidate_first_poses(i)];
curr_state.cadidate_time_indxs = [curr_state.candidate_time_indxs, ...
prev_state.candidate_time_indxs(i)-1];
end
end
end
% Detect new keypoints
keypoints_curr = detectHarrisFeatures(curr_img);
% Select subset of new keypoints
candidateKeypoints = selectCandidateKeypoints(...
curr_state.keypoints.Location, keypoints_curr.Location);
end