From 9612b26cf9cef634b524c67df3a4dd981cff324d Mon Sep 17 00:00:00 2001 From: Jake Date: Wed, 26 Aug 2015 18:11:51 -0400 Subject: [PATCH 001/267] Initial commit From 0fc8c807f1e48176e24744a46cb6cb6b9b0ee313 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Thu, 3 Sep 2015 00:35:09 -0400 Subject: [PATCH 002/267] TOOLS: Add ros_tools utility package --- utils/sub8_ros_tools/CMakeLists.txt | 5 ++ utils/sub8_ros_tools/package.xml | 11 ++++ utils/sub8_ros_tools/setup.py | 12 ++++ .../sub8_ros_tools/sub8_ros_tools/__init__.py | 3 + .../sub8_ros_tools/image_helpers.py | 58 +++++++++++++++++++ .../sub8_ros_tools/init_helpers.py | 26 +++++++++ .../sub8_ros_tools/math_helpers.py | 24 ++++++++ 7 files changed, 139 insertions(+) create mode 100644 utils/sub8_ros_tools/CMakeLists.txt create mode 100644 utils/sub8_ros_tools/package.xml create mode 100644 utils/sub8_ros_tools/setup.py create mode 100644 utils/sub8_ros_tools/sub8_ros_tools/__init__.py create mode 100644 utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py create mode 100644 utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py create mode 100644 utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py diff --git a/utils/sub8_ros_tools/CMakeLists.txt b/utils/sub8_ros_tools/CMakeLists.txt new file mode 100644 index 00000000..7843551f --- /dev/null +++ b/utils/sub8_ros_tools/CMakeLists.txt @@ -0,0 +1,5 @@ +cmake_minimum_required(VERSION 2.8.3) +project(sub8_ros_tools) +find_package(catkin REQUIRED) +catkin_python_setup() +catkin_package() diff --git a/utils/sub8_ros_tools/package.xml b/utils/sub8_ros_tools/package.xml new file mode 100644 index 00000000..5f278ea3 --- /dev/null +++ b/utils/sub8_ros_tools/package.xml @@ -0,0 +1,11 @@ + + + sub8_ros_tools + 1.0.0 + The sub8_ros_tools package + Jacob Panikulam + MIT + catkin + + + \ No newline at end of file diff --git a/utils/sub8_ros_tools/setup.py b/utils/sub8_ros_tools/setup.py new file mode 100644 index 00000000..992c8a45 --- /dev/null +++ b/utils/sub8_ros_tools/setup.py @@ -0,0 +1,12 @@ +## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD + +from distutils.core import setup +from catkin_pkg.python_setup import generate_distutils_setup + +# fetch values from package.xml +setup_args = generate_distutils_setup( + packages=['sub8_ros_tools'], + package_dir={'sub8_ros_tools': '.'}, +) + +setup(**setup_args) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py new file mode 100644 index 00000000..f34f96eb --- /dev/null +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -0,0 +1,3 @@ +from init_helpers import wait_for_param +from image_helpers import Image_Subscriber, Image_Publisher, make_image_msg, get_image_msg +from math_helpers import rosmsg_to_numpy \ No newline at end of file diff --git a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py new file mode 100644 index 00000000..cbadc412 --- /dev/null +++ b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py @@ -0,0 +1,58 @@ +#!/usr/bin/python +''' +Note: + The repeated use of CvBridge (instead of using make_image_msg and get_image_msg in the classes) + is intentional, to avoid the use of a global cvbridge, and to avoid reinstantiating a CvBrige for each use. +''' +import rospy +import cv2 +from cv_bridge import CvBridge, CvBridgeError +from sensor_msgs.msg import Image + + +def make_image_msg(cv_image, encoding='bgr8'): + '''Take a cv image, and produce a ROS image message''' + bridge = CvBridge() + image_message = bridge.cv2_to_imgmsg(cv_image, encoding) + return image_message + + +def get_image_msg(ros_image, encoding='bgr8'): + '''Take a ros image message, and yield an opencv image''' + bridge = CvBridge() + cv_image = bridge.imgmsg_to_cv2(ros_image, desired_encoding=encoding) + return cv_image + + +class Image_Publisher(object): + def __init__(self, topic, encoding="bgr8", queue_size=1): + '''Create an essentially normal publisher, that will publish images without conversion hassle''' + self.im_pub = rospy.Publisher(topic, Image, queue_size=queue_size) + self.bridge = CvBridge() + self.encoding = encoding + + def publish(self, cv_image): + try: + image_message = self.bridge.cv2_to_imgmsg(cv_image, self.encoding) + self.im_pub.publish(image_message) + except CvBridgeError, e: + print e + + +class Image_Subscriber(object): + def __init__(self, topic, callback=None, encoding="bgr8", queue_size=1): + '''Calls $callback on each image every time a new image is published on $topic + Assumes topic of type "sensor_msgs/Image" + This behaves like a conventional subscriber, except handling the additional image conversion + ''' + self.encoding = encoding + self.im_sub = rospy.Subscriber(topic, Image, self.convert, queue_size=queue_size) + self.bridge = CvBridge() + self.callback = callback + + def convert(self, data): + try: + image = self.bridge.imgmsg_to_cv2(data, desired_encoding=self.encoding) + self.callback(image) + except CvBridgeError, e: + print e diff --git a/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py new file mode 100644 index 00000000..6056c721 --- /dev/null +++ b/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py @@ -0,0 +1,26 @@ +import rospy +from time import time + + +def wait_for_param(param_name, timeout=None, poll_rate=0.1): + '''Blocking wait for a parameter named $parameter_name to exist + Poll at frequency $poll_rate + Once the parameter exists, return get and return it. + + This function intentionally leaves failure logging duties to the developer + ''' + start_time = time() + rate = Rospy.Rate(poll_rate) + while not rospy.is_shutdown(): + + # Check if the parameter now exists + if rospy.has_param(param_name): + return rospy.get_param(param_name) + + # If we exceed a defined timeout, return None + if timeout is not None: + if time() - start_time > timeout: + return None + + # Continue to poll at poll_rate + rate.sleep() \ No newline at end of file diff --git a/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py new file mode 100644 index 00000000..b260bde0 --- /dev/null +++ b/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py @@ -0,0 +1,24 @@ +import numpy as np + +def rosmsg_to_numpy(rosmsg, keys=None): + '''Convert a ROS Vector or Quaternion to a numpy vector + Ex: + quat = Quaternion(1.0, 0.0, 0.0, 0.0) + quat is not a vector, you have quat.x, quat.y,... and you can't do math on that + + But wait, there's hope! + rosmsg_to_numpy(quat) -> array([1.0, 0.0, 0.0, 0.0]) + Which you can do math on! + + ''' + if keys is None: + keys = ['x', 'y', 'z', 'w'] + output_array = [] + for key in keys: + # This is not necessarily the fastest way to do this + if hasattr(rosmsg, key): + output_array.append(getattr(rosmsg, key)) + else: + break + + return np.array(output_array) From 4ecbd121dad415170f63bcdaba208ab2ab310414 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Thu, 3 Sep 2015 12:41:04 -0400 Subject: [PATCH 003/267] TOOLS: Add unittests for ros_tools --- utils/sub8_ros_tools/CMakeLists.txt | 8 +++- utils/sub8_ros_tools/package.xml | 4 ++ utils/sub8_ros_tools/readme.md | 11 +++++ .../sub8_ros_tools/image_helpers.py | 0 .../sub8_ros_tools/math_helpers.py | 3 +- .../test_ros_tools/test_ros_tools.py | 48 +++++++++++++++++++ 6 files changed, 72 insertions(+), 2 deletions(-) create mode 100644 utils/sub8_ros_tools/readme.md mode change 100644 => 100755 utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py create mode 100644 utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py diff --git a/utils/sub8_ros_tools/CMakeLists.txt b/utils/sub8_ros_tools/CMakeLists.txt index 7843551f..b5d21353 100644 --- a/utils/sub8_ros_tools/CMakeLists.txt +++ b/utils/sub8_ros_tools/CMakeLists.txt @@ -1,5 +1,11 @@ cmake_minimum_required(VERSION 2.8.3) project(sub8_ros_tools) -find_package(catkin REQUIRED) +find_package(catkin REQUIRED COMPONENTS rostest std_msgs) catkin_python_setup() catkin_package() + +# Add folders to be run by python nosetests +if(CATKIN_ENABLE_TESTING) + catkin_add_nosetests(test_ros_tools) +endif() + diff --git a/utils/sub8_ros_tools/package.xml b/utils/sub8_ros_tools/package.xml index 5f278ea3..3f2986c2 100644 --- a/utils/sub8_ros_tools/package.xml +++ b/utils/sub8_ros_tools/package.xml @@ -6,6 +6,10 @@ Jacob Panikulam MIT catkin + rostest + rostest + rospy + \ No newline at end of file diff --git a/utils/sub8_ros_tools/readme.md b/utils/sub8_ros_tools/readme.md new file mode 100644 index 00000000..e310dd0f --- /dev/null +++ b/utils/sub8_ros_tools/readme.md @@ -0,0 +1,11 @@ +sub8_ros_tools +============== + +Miscellaneous ROS tools that are useful for various tasks. + +Examples: + + + +# TODO +* The items that directly interact with ros topics or the parameter server do not yet have unittests, they require rostest. \ No newline at end of file diff --git a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py old mode 100644 new mode 100755 diff --git a/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py index b260bde0..178f768f 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py @@ -1,5 +1,6 @@ import numpy as np + def rosmsg_to_numpy(rosmsg, keys=None): '''Convert a ROS Vector or Quaternion to a numpy vector Ex: @@ -8,7 +9,7 @@ def rosmsg_to_numpy(rosmsg, keys=None): But wait, there's hope! rosmsg_to_numpy(quat) -> array([1.0, 0.0, 0.0, 0.0]) - Which you can do math on! + Yielding a vector, which you can do math on! ''' if keys is None: diff --git a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py new file mode 100644 index 00000000..3e5d2457 --- /dev/null +++ b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python +import unittest +import numpy as np +from geometry_msgs.msg import Quaternion, Vector3 +from sensor_msgs.msg import Image +from sub8_ros_tools import make_image_msg, get_image_msg +from sub8_ros_tools import rosmsg_to_numpy + + +class TestROSTools(unittest.TestCase): + def test_rosmsg_to_numpy_quaternion(self): + '''Test the quaternion case''' + # I know this is not unit quaternion + q = Quaternion(x=0.7, y=0.7, z=0.1, w=0.2) + numpy_array = rosmsg_to_numpy(q) + + np.testing.assert_allclose( + np.array([0.7, 0.7, 0.1, 0.2]), + numpy_array + ) + + def test_rosmsg_to_numpy_vector(self): + '''Test the vector case''' + v = Vector3(0.1, 99., 21.) + numpy_array = rosmsg_to_numpy(v) + + np.testing.assert_allclose( + np.array([0.1, 99., 21.]), + numpy_array + ) + + def test_make_image_msg(self): + '''Test that make ros image message doesn't fail''' + im = np.ones((255, 255, 3)).astype(np.uint8) + im_msg = make_image_msg(im) + self.assertIsInstance(im_msg, Image) + + def test_get_image_msg(self): + '''Test that a made ros image can be returned to its original form''' + im = np.ones((255, 255, 3)).astype(np.uint8) + im_msg = make_image_msg(im) + + cv_im = get_image_msg(im_msg) + np.testing.assert_array_equal(im, cv_im) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file From 849c5f347fe515b7e42c3066dfdfb7b17d73e025 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Thu, 3 Sep 2015 17:52:58 -0400 Subject: [PATCH 004/267] TOOLS; ros_tools: Apply changes from review --- .../sub8_ros_tools/sub8_ros_tools/__init__.py | 3 +- .../sub8_ros_tools/image_helpers.py | 6 ++- .../sub8_ros_tools/init_helpers.py | 2 +- .../sub8_ros_tools/math_helpers.py | 52 +++++++++++++------ .../sub8_ros_tools/threading_helpers.py | 30 +++++++++++ .../test_ros_tools/test_ros_tools.py | 16 ++++-- 6 files changed, 87 insertions(+), 22 deletions(-) create mode 100644 utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index f34f96eb..1a9b19a9 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -1,3 +1,4 @@ from init_helpers import wait_for_param from image_helpers import Image_Subscriber, Image_Publisher, make_image_msg, get_image_msg -from math_helpers import rosmsg_to_numpy \ No newline at end of file +from math_helpers import rosmsg_to_numpy +from threading_helpers import thread_lock \ No newline at end of file diff --git a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py index cbadc412..1e590a6f 100755 --- a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py @@ -36,7 +36,8 @@ def publish(self, cv_image): image_message = self.bridge.cv2_to_imgmsg(cv_image, self.encoding) self.im_pub.publish(image_message) except CvBridgeError, e: - print e + # Intentionally absorb CvBridge Errors + rospy.logerr(e) class Image_Subscriber(object): @@ -55,4 +56,5 @@ def convert(self, data): image = self.bridge.imgmsg_to_cv2(data, desired_encoding=self.encoding) self.callback(image) except CvBridgeError, e: - print e + # Intentionally absorb CvBridge Errors + rospy.logerr(e) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py index 6056c721..6cb60272 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py @@ -10,7 +10,7 @@ def wait_for_param(param_name, timeout=None, poll_rate=0.1): This function intentionally leaves failure logging duties to the developer ''' start_time = time() - rate = Rospy.Rate(poll_rate) + rate = rospy.Rate(poll_rate) while not rospy.is_shutdown(): # Check if the parameter now exists diff --git a/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py index 178f768f..aad59516 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py @@ -2,24 +2,46 @@ def rosmsg_to_numpy(rosmsg, keys=None): - '''Convert a ROS Vector or Quaternion to a numpy vector + '''Convert an arbitrary ROS msg to a numpy array + With no additional arguments, it will by default handle: + Point2D, Point3D, Vector3D, and quaternions + Ex: - quat = Quaternion(1.0, 0.0, 0.0, 0.0) - quat is not a vector, you have quat.x, quat.y,... and you can't do math on that + quat = Quaternion(1.0, 0.0, 0.0, 0.0) + quat is not a vector, you have quat.x, quat.y,... and you can't do math on that + + But wait, there's hope! + rosmsg_to_numpy(quat) -> array([1.0, 0.0, 0.0, 0.0]) + + Yielding a vector, which you can do math on! + + Further, imagine a bounding box message, BB, with properties BB.x, BB.h, BB.y, and BB.w - But wait, there's hope! - rosmsg_to_numpy(quat) -> array([1.0, 0.0, 0.0, 0.0]) - Yielding a vector, which you can do math on! + rosmsg_to_numpy(BB, ['x', 'h', 'y', 'w']) -> array([BB.x, BB.h, BB.y, BB.w]) + or... + rosmsg_to_numpy(some_Pose2D, ['x', 'y', 'yaw']) = array([x, y, yaw]) + + Note: + - This function is designed to handle the most common use cases (vectors, points and quaternions) + without requiring any additional arguments. ''' if keys is None: keys = ['x', 'y', 'z', 'w'] - output_array = [] - for key in keys: - # This is not necessarily the fastest way to do this - if hasattr(rosmsg, key): - output_array.append(getattr(rosmsg, key)) - else: - break - - return np.array(output_array) + output_array = [] + for key in keys: + # This is not necessarily the fastest way to do this + if hasattr(rosmsg, key): + output_array.append(getattr(rosmsg, key)) + else: + break + + return np.array(output_array).astype(np.float32) + + else: + output_array = np.zeros(len(keys), np.float32) + for n, key in enumerate(keys): + print key + output_array[n] = getattr(rosmsg, key) + + return output_array \ No newline at end of file diff --git a/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py new file mode 100644 index 00000000..ef852f6c --- /dev/null +++ b/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py @@ -0,0 +1,30 @@ +def thread_lock(lock): + '''Use an existing thread lock to thread-lock a function + This prevents the function from being executed by multiple threads at once + + Example: + import threading + lock = threading.Lock() + + @thread_lock(lock) + def my_function(a, b, c): + print a, b, c + ''' + + def lock_thread(function_to_lock): + '''thread_lock(function) -> locked function + Thread locking decorator + If you use this as a decorator for a function, it will apply a threading lock during the execution of that function, + Which guarantees that no ROS callbacks can change the state of data while it is executing. This + is critical to make sure that a new message being sent doesn't cause a weird serial interruption + ''' + def locked_function(*args, **kwargs): + # Get threading lock + with lock: + result = function_to_lock(*args, **kwargs) + # Return, pretending the function hasn't changed at all + return result + # Return the function with locking added + return locked_function + + return thread_lock \ No newline at end of file diff --git a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py index 3e5d2457..6aee5a71 100644 --- a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py +++ b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py @@ -1,7 +1,7 @@ #!/usr/bin/env python import unittest import numpy as np -from geometry_msgs.msg import Quaternion, Vector3 +from geometry_msgs.msg import Quaternion, Vector3, Pose2D from sensor_msgs.msg import Image from sub8_ros_tools import make_image_msg, get_image_msg from sub8_ros_tools import rosmsg_to_numpy @@ -9,7 +9,7 @@ class TestROSTools(unittest.TestCase): def test_rosmsg_to_numpy_quaternion(self): - '''Test the quaternion case''' + '''Test a rosmsg conversion for a geometry_msgs/Quaternion''' # I know this is not unit quaternion q = Quaternion(x=0.7, y=0.7, z=0.1, w=0.2) numpy_array = rosmsg_to_numpy(q) @@ -20,7 +20,7 @@ def test_rosmsg_to_numpy_quaternion(self): ) def test_rosmsg_to_numpy_vector(self): - '''Test the vector case''' + '''Test a rosmsg conversion for a geometry_msgs/Vector''' v = Vector3(0.1, 99., 21.) numpy_array = rosmsg_to_numpy(v) @@ -29,6 +29,16 @@ def test_rosmsg_to_numpy_vector(self): numpy_array ) + def test_rosmsg_to_numpy_custom(self): + '''Test a rosmsg conversion for a custom msg''' + pose_2d = Pose2D(x=1.0, y=2.0, theta=3.14) + numpy_array = rosmsg_to_numpy(pose_2d, ['x', 'y', 'theta']) + + np.testing.assert_allclose( + np.array([1.0, 2.0, 3.14]), + numpy_array + ) + def test_make_image_msg(self): '''Test that make ros image message doesn't fail''' im = np.ones((255, 255, 3)).astype(np.uint8) From 5545e19b51cdb18c213702a026d4cfca7fe611ba Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Fri, 4 Sep 2015 23:13:59 -0400 Subject: [PATCH 005/267] TOOLS: Fix bug in threading_helpers --- utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py | 1 - utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py index aad59516..1c7e4a57 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py @@ -41,7 +41,6 @@ def rosmsg_to_numpy(rosmsg, keys=None): else: output_array = np.zeros(len(keys), np.float32) for n, key in enumerate(keys): - print key output_array[n] = getattr(rosmsg, key) return output_array \ No newline at end of file diff --git a/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py index ef852f6c..5d51e760 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py @@ -27,4 +27,4 @@ def locked_function(*args, **kwargs): # Return the function with locking added return locked_function - return thread_lock \ No newline at end of file + return lock_thread From ed26f9f5166faf8820297f77ac3ab2df17fccc89 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Fri, 4 Sep 2015 23:14:29 -0400 Subject: [PATCH 006/267] TOOLS: Add unittest for thread_lock --- .../test_ros_tools/test_ros_tools.py | 30 +++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py index 6aee5a71..1c4cc65a 100644 --- a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py +++ b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py @@ -5,12 +5,13 @@ from sensor_msgs.msg import Image from sub8_ros_tools import make_image_msg, get_image_msg from sub8_ros_tools import rosmsg_to_numpy +from sub8_ros_tools import thread_lock class TestROSTools(unittest.TestCase): def test_rosmsg_to_numpy_quaternion(self): '''Test a rosmsg conversion for a geometry_msgs/Quaternion''' - # I know this is not unit quaternion + # I know this is not a unit quaternion q = Quaternion(x=0.7, y=0.7, z=0.1, w=0.2) numpy_array = rosmsg_to_numpy(q) @@ -21,7 +22,7 @@ def test_rosmsg_to_numpy_quaternion(self): def test_rosmsg_to_numpy_vector(self): '''Test a rosmsg conversion for a geometry_msgs/Vector''' - v = Vector3(0.1, 99., 21.) + v = Vector3(x=0.1, y=99., z=21.) numpy_array = rosmsg_to_numpy(v) np.testing.assert_allclose( @@ -53,6 +54,31 @@ def test_get_image_msg(self): cv_im = get_image_msg(im_msg) np.testing.assert_array_equal(im, cv_im) + def test_thread_lock(self): + ran = False + + class FakeLock(object): + def __init__(self): + self.entry = False + self.exit = False + + def __enter__(self): + self.entry = True + + def __exit__(self, *args): + self.exit = True + + fake_lock = FakeLock() + @thread_lock(fake_lock) + def lock_test(a): + return (fake_lock.entry is True) and (fake_lock.exit is False) + + result = lock_test(1) + + self.assertTrue(fake_lock.entry, msg='Thread was never locked') + self.assertTrue(fake_lock.exit, msg='Thread was never released') + self.assertTrue(result, msg='Thread was not locked while the function was executed') + if __name__ == '__main__': unittest.main() \ No newline at end of file From 4e4910ed9837c8eff9c007a2d9581177c3ef91ed Mon Sep 17 00:00:00 2001 From: zachgoins Date: Mon, 14 Sep 2015 11:07:13 -0400 Subject: [PATCH 007/267] Initial commit From 004e0213c565dcb3577068891a7a961352fdc6c5 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Thu, 1 Oct 2015 23:46:21 -0400 Subject: [PATCH 008/267] UTILS: Add geometry helper functions, bugfixes - Remove executable flag from image_helpers.py - Add function for generating a matrix to align two vectors - Add functions for decomposing pose messages directly --- .../sub8_ros_tools/sub8_ros_tools/__init__.py | 4 +- .../sub8_ros_tools/geometry_helpers.py | 44 +++++++++++++++++++ .../sub8_ros_tools/image_helpers.py | 0 .../sub8_ros_tools/math_helpers.py | 36 ++++++++++++++- 4 files changed, 82 insertions(+), 2 deletions(-) create mode 100644 utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py mode change 100755 => 100644 utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index 1a9b19a9..0b987e3a 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -1,4 +1,6 @@ from init_helpers import wait_for_param from image_helpers import Image_Subscriber, Image_Publisher, make_image_msg, get_image_msg from math_helpers import rosmsg_to_numpy -from threading_helpers import thread_lock \ No newline at end of file +from threading_helpers import thread_lock +from alarm_helpers import AlarmBroadcaster +from geometry_helpers import * \ No newline at end of file diff --git a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py new file mode 100644 index 00000000..feb82b9a --- /dev/null +++ b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py @@ -0,0 +1,44 @@ +from __future__ import division +import numpy as np + + +def make_rotation(vector_a, vector_b): + '''Determine a 3D rotation that rotates A onto B + In other words, we want a matrix R that aligns a with b + + >> R = make_rotation(a, b) + >> p = R.dot(a) + >> np.dot(p, a) + >>> 0.0 + + [1] Calculate Rotation Matrix to align Vector A to Vector B in 3d? + http://math.stackexchange.com/questions/180418 + ''' + unit_a = vector_a / np.linalg.norm(vector_a) + unit_b = vector_b / np.linalg.norm(vector_b) + + v = np.cross(unit_a, unit_b) + s = np.linalg.norm(v) + + c = np.dot(unit_a, unit_b) + + skew_cross = skew_symmetric_cross(v) + skew_squared = np.linalg.matrix_power(skew_cross, 2) + normalization = (1 - c) / (s ** 2) + + R = np.eye(3) + skew_cross + (skew_squared * normalization) + + return R + +def skew_symmetric_cross(a): + '''Return the skew symmetric matrix representation of a vector + [1] https://en.wikipedia.org/wiki/Cross_product#Skew-symmetric_matrix + ''' + assert len(a) == 3, "a must be in R3" + skew_symm = np.array([ + [0., -a[2], +a[1]], + [+a[2], 0., -a[0]], + [-a[1], +a[0], 0.], + ], dtype=np.float32) + return skew_symm + diff --git a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py old mode 100755 new mode 100644 diff --git a/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py index 1c7e4a57..1d375b16 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py @@ -43,4 +43,38 @@ def rosmsg_to_numpy(rosmsg, keys=None): for n, key in enumerate(keys): output_array[n] = getattr(rosmsg, key) - return output_array \ No newline at end of file + return output_array + + +def pose_to_numpy(pose): + '''TODO: Unit-test + returns (position, orientation) + ''' + position = rosmsg_to_numpy(pose.position) + orientation = rosmsg_to_numpy(pose.orientation) + return position, orientation + + +def twist_to_numpy(twist): + '''TODO: Unit-test + Convert a twist message into a tuple of numpy arrays + returns (linear, angular) + ''' + + linear = rosmsg_to_numpy(twist.linear) + angular = rosmsg_to_numpy(twist.angular) + return linear, angular + + +def odometry_to_numpy(odom): + '''TODO: Unit-test + Convert an odometry message into a tuple of numpy arrays + returns (pose, twist, pose_covariance, twist_covariance) + ''' + pose = pose_to_numpy(odom.pose.pose) + pose_covariance = np.array(odom.pose.covariance).reshape(6, 6) + + twist = twist_to_numpy(odom.twist.twist) + twist_covariance = np.array(odom.twist.covariance).reshape(6, 6) + + return pose, twist, pose_covariance, twist_covariance \ No newline at end of file From 08c13b6a37bf571d99fc6539fe7ac0af47a68047 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Wed, 7 Oct 2015 16:12:38 -0400 Subject: [PATCH 009/267] UTILS: Add unit-tests for make_rotation --- .../test_ros_tools/test_ros_tools.py | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py index 1c4cc65a..ca3edf27 100644 --- a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py +++ b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py @@ -6,6 +6,8 @@ from sub8_ros_tools import make_image_msg, get_image_msg from sub8_ros_tools import rosmsg_to_numpy from sub8_ros_tools import thread_lock +from sub8_ros_tools import AlarmBroadcaster +from sub8_ros_tools import skew_symmetric_cross, make_rotation class TestROSTools(unittest.TestCase): @@ -55,6 +57,7 @@ def test_get_image_msg(self): np.testing.assert_array_equal(im, cv_im) def test_thread_lock(self): + '''Test that the thread lock decorator correctly locks, in the correct order''' ran = False class FakeLock(object): @@ -79,6 +82,43 @@ def lock_test(a): self.assertTrue(fake_lock.exit, msg='Thread was never released') self.assertTrue(result, msg='Thread was not locked while the function was executed') + def test_instantiate_alarm_broadcaster(self): + '''Ensure that the alarm broadcaster instantiates without errors''' + broadcaster = AlarmBroadcaster() + + def test_add_alarm(self): + '''Ensure that adding an alarm succeeds without errors''' + broadcaster = AlarmBroadcaster() + alarm = broadcaster.add_alarm( + name='wake-up', + action_required=True, + severity=1, + problem_description='This is a problem', + json_parameters='{"concern": ["a", "b", "c"]}' + ) + + def test_skew_symmetric_cross(self): + '''Test that the skew symmetric cross product matrix produces the definition + [1] https://en.wikipedia.org/wiki/Cross_product#Skew-symmetric_matrix + ''' + skew_sym = skew_symmetric_cross([1, 2, 3]) + truth = np.array([ + [+0, -3, +2], + [+3, +0, -1], + [-2, +1, +0], + ]) + np.testing.assert_allclose(skew_sym, truth) + + def test_make_rotation(self): + '''Test several random vector pairs, and see if we can generate a valid alignment''' + for k in range(10): + p = np.random.random(3) * 10 + q = np.random.random(3) * 10 + + R = make_rotation(p, q) + p_rotated = R.dot(p) + + np.testing.assert_allclose([0.0, 0.0, 0.0], np.cross(p_rotated, q), atol=1e-5) if __name__ == '__main__': unittest.main() \ No newline at end of file From 2f57025c931eea5623363a5e543fd96f0e229486 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Sat, 10 Oct 2015 22:45:58 -0400 Subject: [PATCH 010/267] UTILS: Address numerical bugs in align_vectors - Fix the both-vectors-aligned case - Fix the anti-parallel case - Add "Normalize" function - Add unit-tests, that are more verbose - Additionally, remove the alarm unittests that should not be there --- .../sub8_ros_tools/sub8_ros_tools/__init__.py | 4 +-- .../sub8_ros_tools/geometry_helpers.py | 21 +++++++++++++-- .../test_ros_tools/test_ros_tools.py | 27 ++++++++++++++++--- 3 files changed, 44 insertions(+), 8 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index 0b987e3a..42689d04 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -2,5 +2,5 @@ from image_helpers import Image_Subscriber, Image_Publisher, make_image_msg, get_image_msg from math_helpers import rosmsg_to_numpy from threading_helpers import thread_lock -from alarm_helpers import AlarmBroadcaster -from geometry_helpers import * \ No newline at end of file +# from alarm_helpers import AlarmBroadcaster +from geometry_helpers import make_rotation, normalize, skew_symmetric_cross \ No newline at end of file diff --git a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py index feb82b9a..dc9aa1a6 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py @@ -13,9 +13,11 @@ def make_rotation(vector_a, vector_b): [1] Calculate Rotation Matrix to align Vector A to Vector B in 3d? http://math.stackexchange.com/questions/180418 + [2] N. Ho, Finding Optimal Rotation...Between Corresponding 3D Points + http://nghiaho.com/?page_id=671 ''' - unit_a = vector_a / np.linalg.norm(vector_a) - unit_b = vector_b / np.linalg.norm(vector_b) + unit_a = normalize(vector_a) + unit_b = normalize(vector_b) v = np.cross(unit_a, unit_b) s = np.linalg.norm(v) @@ -24,10 +26,23 @@ def make_rotation(vector_a, vector_b): skew_cross = skew_symmetric_cross(v) skew_squared = np.linalg.matrix_power(skew_cross, 2) + + if np.isclose(c, 1.0, atol=1e-4): + R = np.eye(3) + return R + elif np.isclose(c, -1.0, atol=1e-4): + R = np.eye(3) + R[2, 2] *= -1 + return R + normalization = (1 - c) / (s ** 2) R = np.eye(3) + skew_cross + (skew_squared * normalization) + # Address the reflection case + if np.linalg.det(R) < 0: + R[:, 3] *= -1 + return R def skew_symmetric_cross(a): @@ -42,3 +57,5 @@ def skew_symmetric_cross(a): ], dtype=np.float32) return skew_symm +def normalize(vector): + return vector / np.linalg.norm(vector) \ No newline at end of file diff --git a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py index ca3edf27..e35b0677 100644 --- a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py +++ b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py @@ -6,8 +6,8 @@ from sub8_ros_tools import make_image_msg, get_image_msg from sub8_ros_tools import rosmsg_to_numpy from sub8_ros_tools import thread_lock -from sub8_ros_tools import AlarmBroadcaster -from sub8_ros_tools import skew_symmetric_cross, make_rotation +# from sub8_ros_tools import AlarmBroadcaster +from sub8_ros_tools import skew_symmetric_cross, make_rotation, normalize class TestROSTools(unittest.TestCase): @@ -82,10 +82,12 @@ def lock_test(a): self.assertTrue(fake_lock.exit, msg='Thread was never released') self.assertTrue(result, msg='Thread was not locked while the function was executed') + @unittest.skip("Not ready for primetime") def test_instantiate_alarm_broadcaster(self): '''Ensure that the alarm broadcaster instantiates without errors''' broadcaster = AlarmBroadcaster() + @unittest.skip("Not ready for primetime") def test_add_alarm(self): '''Ensure that adding an alarm succeeds without errors''' broadcaster = AlarmBroadcaster() @@ -118,7 +120,24 @@ def test_make_rotation(self): R = make_rotation(p, q) p_rotated = R.dot(p) - np.testing.assert_allclose([0.0, 0.0, 0.0], np.cross(p_rotated, q), atol=1e-5) + # Test that the matrix actually aligns p with q + np.testing.assert_allclose([0.0, 0.0, 0.0], np.cross(p_rotated, q), atol=1e-5, + err_msg="The generated rotation matrix did not align the input vectors") + + def test_normalize_vector(self): + '''Test vector normalization''' + for k in range(10): + rand_vec = np.random.random(k) # Make a random k-length vector + + # Ignore the astronomically unlikely case that a vector has near 0 norm + if not np.isclose(np.sum(rand_vec), 0): + normalized = normalize(rand_vec) + norm = np.linalg.norm(normalized) + + # Test that the norm is 1 + np.testing.assert_almost_equal(norm, 1.0, err_msg="The normalized vector did not have length 1") + if __name__ == '__main__': - unittest.main() \ No newline at end of file + suite = unittest.TestLoader().loadTestsFromTestCase(TestROSTools) + unittest.TextTestRunner(verbosity=2).run(suite) \ No newline at end of file From 588c64848749578f22b538bf28250626d77f3be1 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Sun, 11 Oct 2015 03:38:19 -0400 Subject: [PATCH 011/267] UTILS: Add tool for downloading files from web - Also unzips them! --- utils/sub8_ros_tools/readme.md | 5 +- utils/sub8_ros_tools/setup.py | 7 ++- .../sub8_misc_tools/__init__.py | 2 + .../sub8_misc_tools/download.py | 57 +++++++++++++++++++ 4 files changed, 66 insertions(+), 5 deletions(-) create mode 100644 utils/sub8_ros_tools/sub8_misc_tools/__init__.py create mode 100644 utils/sub8_ros_tools/sub8_misc_tools/download.py diff --git a/utils/sub8_ros_tools/readme.md b/utils/sub8_ros_tools/readme.md index e310dd0f..77fa0c13 100644 --- a/utils/sub8_ros_tools/readme.md +++ b/utils/sub8_ros_tools/readme.md @@ -5,7 +5,6 @@ Miscellaneous ROS tools that are useful for various tasks. Examples: - - # TODO -* The items that directly interact with ros topics or the parameter server do not yet have unittests, they require rostest. \ No newline at end of file +* The items that directly interact with ros topics or the parameter server do not yet have unittests, they require rostest. + diff --git a/utils/sub8_ros_tools/setup.py b/utils/sub8_ros_tools/setup.py index 992c8a45..33f8621d 100644 --- a/utils/sub8_ros_tools/setup.py +++ b/utils/sub8_ros_tools/setup.py @@ -5,8 +5,11 @@ # fetch values from package.xml setup_args = generate_distutils_setup( - packages=['sub8_ros_tools'], - package_dir={'sub8_ros_tools': '.'}, + packages=['sub8_ros_tools', 'sub8_misc_tools'], + package_dir={ + 'sub8_ros_tools': '.', + 'sub8_misc_tools': '.', + }, ) setup(**setup_args) diff --git a/utils/sub8_ros_tools/sub8_misc_tools/__init__.py b/utils/sub8_ros_tools/sub8_misc_tools/__init__.py new file mode 100644 index 00000000..d501d40f --- /dev/null +++ b/utils/sub8_ros_tools/sub8_misc_tools/__init__.py @@ -0,0 +1,2 @@ +from download import download_and_unzip +from download import download \ No newline at end of file diff --git a/utils/sub8_ros_tools/sub8_misc_tools/download.py b/utils/sub8_ros_tools/sub8_misc_tools/download.py new file mode 100644 index 00000000..6f81a896 --- /dev/null +++ b/utils/sub8_ros_tools/sub8_misc_tools/download.py @@ -0,0 +1,57 @@ +import urllib2 +import os +import zipfile +import cStringIO as StringIO + +''' +This file contains utilities for downloading a file from the internet +We're using this because I don't want to track 20MB files in Git. + +[1] Extracting zipfiles + http://stackoverflow.com/questions/9431918 + +[2] Unzip binary directly + http://stackoverflow.com/questions/18966672 + +[3] Download a file via http + http://stackoverflow.com/questions/22676 +''' + + +def download_and_unzip(url, output_dir): + '''Download and unzip a file at $url, + then put the contained files at $output_dir + ''' + try: + html = download(url) + except: + raise(IOError("Could not load file at {}".format(url))) + + fake_file = StringIO.StringIO(html) + + zip_ = zipfile.ZipFile(fake_file, "r") + for file_path in zip_.namelist(): + path, file_name = os.path.split(file_path) + file_like = zip_.open(file_path) + + f = open(os.path.join(output_dir, file_name), 'w') + f.write(file_like.read()) + f.close() + + +def download(url, output_filename=None): + '''Download a file at $url, and return the html + If you set an output location, it will also write the file + ''' + response = urllib2.urlopen(url) + html = response.read() + if output_filename is not None: + f = open(output_filename, 'w') + f.write(html) + f.close() + return html + + +if __name__ == '__main__': + sub_model_url = "https://doc-10-5k-docs.googleusercontent.com/docs/securesc/ha0ro937gcuc7l7deffksulhg5h7mbp1/afivhj1jtemmjum4flk2sdl674545lme/1444536000000/10637387184977454351/*/0B5MtehifpQQAa0JEeExzYmF5RlU?e=download" + download_and_unzip(sub_model_url, '.') \ No newline at end of file From 33593bfed09c2c2607542a8ba105ffcab9f8b193 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Sun, 11 Oct 2015 15:50:38 -0400 Subject: [PATCH 012/267] SIM: Add mesh-models to sim_full --- utils/sub8_ros_tools/sub8_misc_tools/download.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/sub8_ros_tools/sub8_misc_tools/download.py b/utils/sub8_ros_tools/sub8_misc_tools/download.py index 6f81a896..2e262d6a 100644 --- a/utils/sub8_ros_tools/sub8_misc_tools/download.py +++ b/utils/sub8_ros_tools/sub8_misc_tools/download.py @@ -53,5 +53,5 @@ def download(url, output_filename=None): if __name__ == '__main__': - sub_model_url = "https://doc-10-5k-docs.googleusercontent.com/docs/securesc/ha0ro937gcuc7l7deffksulhg5h7mbp1/afivhj1jtemmjum4flk2sdl674545lme/1444536000000/10637387184977454351/*/0B5MtehifpQQAa0JEeExzYmF5RlU?e=download" + sub_model_url = "http://goo.gl/f0ennf?gdriveurl" download_and_unzip(sub_model_url, '.') \ No newline at end of file From 54782a0ba757814f51da7670178408b3d634be41 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Sun, 18 Oct 2015 00:50:45 -0400 Subject: [PATCH 013/267] ALARMS: Move AlarmBroadcaster to sub8_alarm pkg - Remove deprecated unit tests - Add new ones (hooray!) - Remove it from setup.py --- .../sub8_ros_tools/sub8_ros_tools/__init__.py | 5 ++- .../test_ros_tools/test_ros_tools.py | 35 ++++++------------- 2 files changed, 13 insertions(+), 27 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index 42689d04..c680294c 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -1,6 +1,5 @@ -from init_helpers import wait_for_param +from init_helpers import wait_for_param, wait_for_subscriber from image_helpers import Image_Subscriber, Image_Publisher, make_image_msg, get_image_msg from math_helpers import rosmsg_to_numpy from threading_helpers import thread_lock -# from alarm_helpers import AlarmBroadcaster -from geometry_helpers import make_rotation, normalize, skew_symmetric_cross \ No newline at end of file +from geometry_helpers import make_rotation, normalize, skew_symmetric_cross, compose_transformation \ No newline at end of file diff --git a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py index e35b0677..cc41d88d 100644 --- a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py +++ b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py @@ -18,7 +18,7 @@ def test_rosmsg_to_numpy_quaternion(self): numpy_array = rosmsg_to_numpy(q) np.testing.assert_allclose( - np.array([0.7, 0.7, 0.1, 0.2]), + np.array([0.7, 0.7, 0.1, 0.2]), numpy_array ) @@ -28,7 +28,7 @@ def test_rosmsg_to_numpy_vector(self): numpy_array = rosmsg_to_numpy(v) np.testing.assert_allclose( - np.array([0.1, 99., 21.]), + np.array([0.1, 99., 21.]), numpy_array ) @@ -38,7 +38,7 @@ def test_rosmsg_to_numpy_custom(self): numpy_array = rosmsg_to_numpy(pose_2d, ['x', 'y', 'theta']) np.testing.assert_allclose( - np.array([1.0, 2.0, 3.14]), + np.array([1.0, 2.0, 3.14]), numpy_array ) @@ -82,23 +82,6 @@ def lock_test(a): self.assertTrue(fake_lock.exit, msg='Thread was never released') self.assertTrue(result, msg='Thread was not locked while the function was executed') - @unittest.skip("Not ready for primetime") - def test_instantiate_alarm_broadcaster(self): - '''Ensure that the alarm broadcaster instantiates without errors''' - broadcaster = AlarmBroadcaster() - - @unittest.skip("Not ready for primetime") - def test_add_alarm(self): - '''Ensure that adding an alarm succeeds without errors''' - broadcaster = AlarmBroadcaster() - alarm = broadcaster.add_alarm( - name='wake-up', - action_required=True, - severity=1, - problem_description='This is a problem', - json_parameters='{"concern": ["a", "b", "c"]}' - ) - def test_skew_symmetric_cross(self): '''Test that the skew symmetric cross product matrix produces the definition [1] https://en.wikipedia.org/wiki/Cross_product#Skew-symmetric_matrix @@ -109,20 +92,24 @@ def test_skew_symmetric_cross(self): [+3, +0, -1], [-2, +1, +0], ]) - np.testing.assert_allclose(skew_sym, truth) + np.testing.assert_allclose(skew_sym, truth, err_msg="Did not make a Skew-symmetric matrix. Pretty big screw-up imho.") def test_make_rotation(self): '''Test several random vector pairs, and see if we can generate a valid alignment''' + scaling = 10 for k in range(10): - p = np.random.random(3) * 10 - q = np.random.random(3) * 10 + p = (np.random.random(3) - 0.5) * scaling + q = (np.random.random(3) - 0.5) * scaling R = make_rotation(p, q) p_rotated = R.dot(p) # Test that the matrix actually aligns p with q np.testing.assert_allclose([0.0, 0.0, 0.0], np.cross(p_rotated, q), atol=1e-5, - err_msg="The generated rotation matrix did not align the input vectors") + err_msg="The generated rotation matrix did not align the input vectors, {} to {}".format( + p, q) + ) + self.assertGreater(np.dot(p_rotated, q), 0.0, msg="The rotation did wacky inversion") def test_normalize_vector(self): '''Test vector normalization''' From 9fa535cafc97d8159f39c54458efd34f384586c3 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Sun, 18 Oct 2015 00:55:16 -0400 Subject: [PATCH 014/267] UTILS: Add init-helper 'wait for subscriber' For integration-testing purposes it is often useful to wait until a particular node subscribes to you --- .../sub8_ros_tools/init_helpers.py | 39 +++++++++++++++++-- 1 file changed, 35 insertions(+), 4 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py index 6cb60272..c2af89b7 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py @@ -1,5 +1,6 @@ import rospy -from time import time +import rostest +import time def wait_for_param(param_name, timeout=None, poll_rate=0.1): @@ -9,7 +10,7 @@ def wait_for_param(param_name, timeout=None, poll_rate=0.1): This function intentionally leaves failure logging duties to the developer ''' - start_time = time() + start_time = time.time() rate = rospy.Rate(poll_rate) while not rospy.is_shutdown(): @@ -19,8 +20,38 @@ def wait_for_param(param_name, timeout=None, poll_rate=0.1): # If we exceed a defined timeout, return None if timeout is not None: - if time() - start_time > timeout: + if time.time() - start_time > timeout: return None # Continue to poll at poll_rate - rate.sleep() \ No newline at end of file + rate.sleep() + + +def wait_for_subscriber(node_name, topic, timeout=5.0): + '''Blocks until $node_name subscribes to $topic + Useful mostly in integration tests -- + I would counsel against use elsewhere + ''' + end_time = time.time() + timeout + + resolved_topic = rospy.resolve_name(topic) + resolved_node = rospy.resolve_name(node_name) + + # Wait for time-out or ros-shutdown + while (time.time() < end_time) and (not rospy.is_shutdown()): + subscribed = rostest.is_subscriber( + rospy.resolve_name(topic), + rospy.resolve_name(node_name) + ) + # Success scenario: node subscribes + if subscribed: + break + time.sleep(0.1) + + # Could do this with a while/else + # But chose to explicitly check + success = rostest.is_subscriber( + rospy.resolve_name(topic), + rospy.resolve_name(node_name) + ) + return success \ No newline at end of file From 07a0b554934380502cde797b96882e279766142d Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Sun, 18 Oct 2015 00:56:57 -0400 Subject: [PATCH 015/267] UTILS: Add compose_transform (R, t) helper --- utils/sub8_ros_tools/package.xml | 2 -- .../sub8_ros_tools/geometry_helpers.py | 17 ++++++++++++++--- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/utils/sub8_ros_tools/package.xml b/utils/sub8_ros_tools/package.xml index 3f2986c2..f95bf1b2 100644 --- a/utils/sub8_ros_tools/package.xml +++ b/utils/sub8_ros_tools/package.xml @@ -7,9 +7,7 @@ MIT catkin rostest - rostest rospy - \ No newline at end of file diff --git a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py index dc9aa1a6..1b2e26a5 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py @@ -8,8 +8,8 @@ def make_rotation(vector_a, vector_b): >> R = make_rotation(a, b) >> p = R.dot(a) - >> np.dot(p, a) - >>> 0.0 + >> np.cross(p, a) + >>> array([0.0, 0.0, 0.0]) [1] Calculate Rotation Matrix to align Vector A to Vector B in 3d? http://math.stackexchange.com/questions/180418 @@ -45,6 +45,7 @@ def make_rotation(vector_a, vector_b): return R + def skew_symmetric_cross(a): '''Return the skew symmetric matrix representation of a vector [1] https://en.wikipedia.org/wiki/Cross_product#Skew-symmetric_matrix @@ -57,5 +58,15 @@ def skew_symmetric_cross(a): ], dtype=np.float32) return skew_symm + def normalize(vector): - return vector / np.linalg.norm(vector) \ No newline at end of file + return vector / np.linalg.norm(vector) + + +def compose_transformation(R, t): + '''Compose a transformation from a rotation matrix and a translation matrix''' + transformation = np.zeros((4, 4)) + transformation[:3, :3] = R + transformation[3, :3] = t + transformation[3, 3] = 1.0 + return transformation From 2a759e039d41b9455220b73d3d37d0a6cc034897 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Thu, 22 Oct 2015 22:46:24 -0400 Subject: [PATCH 016/267] REFACTORING: Rename files - Rename sub8_utils/math_helpers -> msg_helpers, since every tool in there was a helper for rosmsg stuff - Move simulate.py into nodes Neither of these changes has any effect on the behavior of any code. --- utils/sub8_ros_tools/sub8_ros_tools/__init__.py | 3 ++- .../sub8_ros_tools/{math_helpers.py => msg_helpers.py} | 0 2 files changed, 2 insertions(+), 1 deletion(-) rename utils/sub8_ros_tools/sub8_ros_tools/{math_helpers.py => msg_helpers.py} (100%) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index c680294c..bef1ceee 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -1,5 +1,6 @@ from init_helpers import wait_for_param, wait_for_subscriber from image_helpers import Image_Subscriber, Image_Publisher, make_image_msg, get_image_msg -from math_helpers import rosmsg_to_numpy +from msg_helpers import (rosmsg_to_numpy, pose_to_numpy, twist_to_numpy, odometry_to_numpy, + make_wrench_stamped, posetwist_to_numpy, odom_sub) from threading_helpers import thread_lock from geometry_helpers import make_rotation, normalize, skew_symmetric_cross, compose_transformation \ No newline at end of file diff --git a/utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py similarity index 100% rename from utils/sub8_ros_tools/sub8_ros_tools/math_helpers.py rename to utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py From 6e37821589cb248bee0cfb5b8097d4a748617d7e Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Wed, 28 Oct 2015 01:52:30 -0400 Subject: [PATCH 017/267] UTILS: Add misc msg helpers --- .../sub8_ros_tools/sub8_ros_tools/__init__.py | 2 +- .../sub8_ros_tools/msg_helpers.py | 61 ++++++++++++++++++- .../test_ros_tools/test_ros_tools.py | 13 +++- 3 files changed, 71 insertions(+), 5 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index bef1ceee..e97799c1 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -1,6 +1,6 @@ from init_helpers import wait_for_param, wait_for_subscriber from image_helpers import Image_Subscriber, Image_Publisher, make_image_msg, get_image_msg from msg_helpers import (rosmsg_to_numpy, pose_to_numpy, twist_to_numpy, odometry_to_numpy, - make_wrench_stamped, posetwist_to_numpy, odom_sub) + make_wrench_stamped, make_pose_stamped, posetwist_to_numpy, odom_sub, make_header) from threading_helpers import thread_lock from geometry_helpers import make_rotation, normalize, skew_symmetric_cross, compose_transformation \ No newline at end of file diff --git a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py index 1d375b16..6524ddf7 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py @@ -1,4 +1,8 @@ import numpy as np +import geometry_msgs.msg as geometry_msgs +import std_msgs.msg as std_msgs +import nav_msgs.msg as nav_msgs +import rospy def rosmsg_to_numpy(rosmsg, keys=None): @@ -23,7 +27,7 @@ def rosmsg_to_numpy(rosmsg, keys=None): rosmsg_to_numpy(some_Pose2D, ['x', 'y', 'yaw']) = array([x, y, yaw]) Note: - - This function is designed to handle the most common use cases (vectors, points and quaternions) + - This function is designed to handle the most common use cases (vectors, points and quaternions) without requiring any additional arguments. ''' if keys is None: @@ -66,6 +70,12 @@ def twist_to_numpy(twist): return linear, angular +def posetwist_to_numpy(posetwist): + pose = pose_to_numpy(posetwist.pose) + twist = twist_to_numpy(posetwist.twist) + return pose, twist + + def odometry_to_numpy(odom): '''TODO: Unit-test Convert an odometry message into a tuple of numpy arrays @@ -77,4 +87,51 @@ def odometry_to_numpy(odom): twist = twist_to_numpy(odom.twist.twist) twist_covariance = np.array(odom.twist.covariance).reshape(6, 6) - return pose, twist, pose_covariance, twist_covariance \ No newline at end of file + return pose, twist, pose_covariance, twist_covariance + + +def make_header(frame='/body'): + try: + cur_time = rospy.Time.now() + except rospy.ROSInitException: + cur_time = rospy.Time(0) + + header = std_msgs.Header( + stamp=cur_time, + frame_id=frame + ) + return header + + +def make_wrench_stamped(force, torque, frame='/body'): + '''Make a WrenchStamped message without all the fuss + Frame defaults to body + ''' + + wrench = geometry_msgs.WrenchStamped( + header=make_header(frame), + wrench=geometry_msgs.Wrench( + force=geometry_msgs.Vector3(*force), + torque=geometry_msgs.Vector3(*torque) + ) + ) + return wrench + + +def make_pose_stamped(position, orientation, frame='/body'): + wrench = geometry_msgs.WrenchStamped( + header=make_header(frame), + pose=geometry_msgs.Pose( + force=geometry_msgs.Vector3(*position), + orientation=geometry_msgs.Quaternion(*orientation) + ) + ) + return wrench + + +def odom_sub(topic, callback): + def wrapped_callback(*args): + msg = args[-1] + callback(odometry_to_numpy(msg)) + + return rospy.Subscriber(topic, nav_msgs.Odometry, wrapped_callback, queue_size=1) diff --git a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py index cc41d88d..7740acc7 100644 --- a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py +++ b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py @@ -4,9 +4,8 @@ from geometry_msgs.msg import Quaternion, Vector3, Pose2D from sensor_msgs.msg import Image from sub8_ros_tools import make_image_msg, get_image_msg -from sub8_ros_tools import rosmsg_to_numpy +from sub8_ros_tools import rosmsg_to_numpy, make_wrench_stamped from sub8_ros_tools import thread_lock -# from sub8_ros_tools import AlarmBroadcaster from sub8_ros_tools import skew_symmetric_cross, make_rotation, normalize @@ -42,6 +41,16 @@ def test_rosmsg_to_numpy_custom(self): numpy_array ) + def test_make_wrench_stamped(self): + '''Test that wrenchmaking works''' + for k in range(10): + force = np.random.random(3) * 10 + torque = np.random.random(3) * 10 + wrench_msg = make_wrench_stamped(force, torque, frame='/enu') + + msg_force = rosmsg_to_numpy(wrench_msg.wrench.force) + msg_torque = rosmsg_to_numpy(wrench_msg.wrench.torque) + def test_make_image_msg(self): '''Test that make ros image message doesn't fail''' im = np.ones((255, 255, 3)).astype(np.uint8) From 54921a7d7e7be1df6226d72d3a42f133d3e11429 Mon Sep 17 00:00:00 2001 From: Patrick Emami Date: Fri, 20 Nov 2015 20:05:33 -0500 Subject: [PATCH 018/267] MISC: Fixing setup.py files To reflect changes in catkin, our setup.py files had to be altered to fix import errors --- utils/sub8_ros_tools/setup.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/utils/sub8_ros_tools/setup.py b/utils/sub8_ros_tools/setup.py index 33f8621d..4ef788ff 100644 --- a/utils/sub8_ros_tools/setup.py +++ b/utils/sub8_ros_tools/setup.py @@ -6,10 +6,6 @@ # fetch values from package.xml setup_args = generate_distutils_setup( packages=['sub8_ros_tools', 'sub8_misc_tools'], - package_dir={ - 'sub8_ros_tools': '.', - 'sub8_misc_tools': '.', - }, ) setup(**setup_args) From 3f7ae21bf7305378fe75d1d6ede341fed47054b0 Mon Sep 17 00:00:00 2001 From: Zach Goins Date: Wed, 2 Dec 2015 20:50:37 -0500 Subject: [PATCH 019/267] CLEANUP: Change file names in folders and source code --- utils/navigator_tools/CMakeLists.txt | 11 ++++ .../navigator_tools/__init__.py | 2 + .../navigator_tools/geometry_helper.py | 42 +++++++++++++ .../navigator_tools/msg_helper.py | 13 ++++ utils/navigator_tools/package.xml | 13 ++++ utils/navigator_tools/setup.py | 11 ++++ .../tests/math_helpers_test.py | 62 +++++++++++++++++++ 7 files changed, 154 insertions(+) create mode 100644 utils/navigator_tools/CMakeLists.txt create mode 100644 utils/navigator_tools/navigator_tools/__init__.py create mode 100644 utils/navigator_tools/navigator_tools/geometry_helper.py create mode 100644 utils/navigator_tools/navigator_tools/msg_helper.py create mode 100644 utils/navigator_tools/package.xml create mode 100644 utils/navigator_tools/setup.py create mode 100644 utils/navigator_tools/tests/math_helpers_test.py diff --git a/utils/navigator_tools/CMakeLists.txt b/utils/navigator_tools/CMakeLists.txt new file mode 100644 index 00000000..7272f75d --- /dev/null +++ b/utils/navigator_tools/CMakeLists.txt @@ -0,0 +1,11 @@ +cmake_minimum_required(VERSION 2.8.3) +project(navigator_tools) +find_package(catkin REQUIRED COMPONENTS rostest std_msgs) +catkin_python_setup() +catkin_package() + +# Add folders to be run by python nosetests +if(CATKIN_ENABLE_TESTING) + catkin_add_nosetests(tests) +endif() + diff --git a/utils/navigator_tools/navigator_tools/__init__.py b/utils/navigator_tools/navigator_tools/__init__.py new file mode 100644 index 00000000..7f79d00b --- /dev/null +++ b/utils/navigator_tools/navigator_tools/__init__.py @@ -0,0 +1,2 @@ +from geometry_helper import quat_to_euler, euler_to_quat, normalize, compose_transformation +from msg_helper import ros_to_np_3D \ No newline at end of file diff --git a/utils/navigator_tools/navigator_tools/geometry_helper.py b/utils/navigator_tools/navigator_tools/geometry_helper.py new file mode 100644 index 00000000..1d1912ed --- /dev/null +++ b/utils/navigator_tools/navigator_tools/geometry_helper.py @@ -0,0 +1,42 @@ +from __future__ import division +import numpy as np +from tf.transformations import quaternion_from_euler, euler_from_quaternion +from geometry_msgs.msg import Quaternion + +''' + A file to assist with some math that is commonly used in robotics + + Some of the functions here are shared with the UF Machine + Intelligence Lab's SubjuGator robot. All hail Jacob Panikulum, + may he reclaim his honor. + +''' + + +def normalize(vector): + return vector / np.linalg.norm(vector) + + +def compose_transformation(R, t): + '''Compose a transformation from a rotation matrix and a translation matrix''' + transformation = np.zeros((4, 4)) + transformation[:3, :3] = R + transformation[3, :3] = t + transformation[3, 3] = 1.0 + return transformation + +def quat_to_euler(q): + ''' Approximate a quaternion as a euler rotation vector''' + + euler_rot_vec = euler_from_quaternion([q.x, q.y, q.z, q.w]) + final = np.array(([euler_rot_vec[0], euler_rot_vec[1], euler_rot_vec[2]])) + return final + + +def euler_to_quat(rotvec): + ''' convert a euler rotation vector into a ROS quaternion ''' + + quat = quaternion_from_euler(rotvec[0], rotvec[1], rotvec[2]) + return Quaternion(quat[0], quat[1], quat[2], quat[3]) + + diff --git a/utils/navigator_tools/navigator_tools/msg_helper.py b/utils/navigator_tools/navigator_tools/msg_helper.py new file mode 100644 index 00000000..bdc2a734 --- /dev/null +++ b/utils/navigator_tools/navigator_tools/msg_helper.py @@ -0,0 +1,13 @@ +from __future__ import division +import numpy as np +from geometry_msgs.msg import Quaternion + +''' + A file to assist with some messages that are commonly used + on the Wam-V +''' + +def ros_to_np_3D(msg): + + xyz_array = np.array(([msg.x, msg.y, msg.z])) + return xyz_array diff --git a/utils/navigator_tools/package.xml b/utils/navigator_tools/package.xml new file mode 100644 index 00000000..8ecc7aa5 --- /dev/null +++ b/utils/navigator_tools/package.xml @@ -0,0 +1,13 @@ + + + navigator_tools + 1.0.0 + The math_helpers package + Zach Goins + MIT + catkin + rostest + rospy + + + \ No newline at end of file diff --git a/utils/navigator_tools/setup.py b/utils/navigator_tools/setup.py new file mode 100644 index 00000000..39529c00 --- /dev/null +++ b/utils/navigator_tools/setup.py @@ -0,0 +1,11 @@ +## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD + +from distutils.core import setup +from catkin_pkg.python_setup import generate_distutils_setup + +# fetch values from package.xml +setup_args = generate_distutils_setup( + packages=['navigator_tools'] +) + +setup(**setup_args) \ No newline at end of file diff --git a/utils/navigator_tools/tests/math_helpers_test.py b/utils/navigator_tools/tests/math_helpers_test.py new file mode 100644 index 00000000..8f3a7ad7 --- /dev/null +++ b/utils/navigator_tools/tests/math_helpers_test.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python +import unittest +import numpy as np +from geometry_msgs.msg import Quaternion +from navigator_tools import quat_to_euler, euler_to_quat, normalize +from navigator_tools import compose_transformation + + +class TestROSTools(unittest.TestCase): + + def test_normalize_vector(self): + '''Test vector normalization''' + for k in range(10): + rand_vec = np.random.random(k) # Make a random k-length vector + + # Ignore the astronomically unlikely case that a vector has near 0 norm + if not np.isclose(np.sum(rand_vec), 0): + normalized = normalize(rand_vec) + norm = np.linalg.norm(normalized) + + # Test that the norm is 1 + np.testing.assert_almost_equal(norm, 1.0, err_msg="The normalized vector did not have length 1") + + def test_quat_to_euler(self): + ''' Test quaternion to euler angle ''' + + q = Quaternion(x=0.70711, y=0.0, z=0.0, w=0.70711) + numpy_array = quat_to_euler(q) + truth = np.array(([1.57079633, 0.0, 0.0])) + np.testing.assert_almost_equal(numpy_array, truth, err_msg="Quaternion to euler conversion incorrect") + + def test_compose_transformation(self): + ''' Test quaternion to euler angle ''' + R = np.array(([3, 6, 1], + [2, 5, 2], + [1, 4, 3])) + + t = np.array((1, 2, 3)) + + truth = np.array(([3, 6, 1, 0], + [2, 5, 2, 0], + [1, 4, 3, 0], + [1, 2, 3, 1])) + + test = compose_transformation(R, t) + np.testing.assert_almost_equal(test, truth, err_msg="Error composing transformation") + + def test_euler_to_quat(self): + ''' Test quaternion to euler angle ''' + + e = np.array(([1.57079633, 0.0, 0.0])) + testing = euler_to_quat(e) + # strip away ROS data to just test return values + # because unittest doesn't support ROS message operandsss + testing = np.array(([testing.x, testing.y, testing.z, testing.w])) + truth = np.array(([0.70710678, 0.0, 0.0, 0.70710678])) + np.testing.assert_almost_equal(testing, truth, err_msg="Incorrect euler to quaternion conversion") + + +if __name__ == '__main__': + suite = unittest.TestLoader().loadTestsFromTestCase(TestROSTools) + unittest.TextTestRunner(verbosity=2).run(suite) From 9dc671be02117b8c7093af325f981bd30aae31d5 Mon Sep 17 00:00:00 2001 From: Forrest Voight Date: Thu, 4 Feb 2016 16:48:44 -0500 Subject: [PATCH 020/267] MISC: Add uf_common dependency This is /intended/ (Though intentions often fall flat) to be a temporary measure. TODO: - Replace exiting conversion inlines with eigen_conversions - Deprecate socat shenanigans and talk directly over TCP...? --- utils/uf_common/CMakeLists.txt | 29 ++ .../uf_common/include/uf_common/msg_helpers.h | 79 +++++ .../include/uf_common/param_helpers.h | 127 ++++++++ utils/uf_common/msg/Acceleration.msg | 2 + utils/uf_common/msg/Float64Stamped.msg | 2 + utils/uf_common/msg/PoseTwist.msg | 3 + utils/uf_common/msg/PoseTwistStamped.msg | 3 + utils/uf_common/msg/VelocityMeasurement.msg | 3 + utils/uf_common/msg/VelocityMeasurements.msg | 2 + utils/uf_common/package.xml | 36 +++ utils/uf_common/scripts/param_saver | 40 +++ .../scripts/velocitymeasurements_to_vector3 | 38 +++ utils/uf_common/setup.py | 10 + utils/uf_common/uf_common/__init__.py | 0 utils/uf_common/uf_common/interpolate.py | 14 + .../uf_common/orientation_helpers.py | 291 ++++++++++++++++++ 16 files changed, 679 insertions(+) create mode 100644 utils/uf_common/CMakeLists.txt create mode 100644 utils/uf_common/include/uf_common/msg_helpers.h create mode 100644 utils/uf_common/include/uf_common/param_helpers.h create mode 100644 utils/uf_common/msg/Acceleration.msg create mode 100644 utils/uf_common/msg/Float64Stamped.msg create mode 100644 utils/uf_common/msg/PoseTwist.msg create mode 100644 utils/uf_common/msg/PoseTwistStamped.msg create mode 100644 utils/uf_common/msg/VelocityMeasurement.msg create mode 100644 utils/uf_common/msg/VelocityMeasurements.msg create mode 100644 utils/uf_common/package.xml create mode 100755 utils/uf_common/scripts/param_saver create mode 100755 utils/uf_common/scripts/velocitymeasurements_to_vector3 create mode 100644 utils/uf_common/setup.py create mode 100644 utils/uf_common/uf_common/__init__.py create mode 100644 utils/uf_common/uf_common/interpolate.py create mode 100644 utils/uf_common/uf_common/orientation_helpers.py diff --git a/utils/uf_common/CMakeLists.txt b/utils/uf_common/CMakeLists.txt new file mode 100644 index 00000000..7d84efc8 --- /dev/null +++ b/utils/uf_common/CMakeLists.txt @@ -0,0 +1,29 @@ +cmake_minimum_required(VERSION 2.8.3) +project(uf_common) +find_package(catkin REQUIRED COMPONENTS message_generation message_runtime geometry_msgs tf actionlib interactive_markers std_msgs actionlib_msgs cmake_modules) +catkin_python_setup() + +add_message_files( + FILES + PoseTwistStamped.msg + PoseTwist.msg + VelocityMeasurements.msg + Float64Stamped.msg + VelocityMeasurement.msg + Acceleration.msg +) + +find_package(Eigen REQUIRED) +include_directories(${Eigen_INCLUDE_DIRS}) +generate_messages( + DEPENDENCIES std_msgs actionlib_msgs geometry_msgs uf_common +) +catkin_package( + DEPENDS Eigen + CATKIN_DEPENDS message_generation message_runtime geometry_msgs tf actionlib interactive_markers std_msgs actionlib_msgs + INCLUDE_DIRS ${EIGEN_INCLUDE_DIRS} include + LIBRARIES +) + +include_directories(${EIGEN_INCLUDE_DIRS} include ${Boost_INCLUDE_DIR} ${catkin_INCLUDE_DIRS}) +install(PROGRAMS scripts/interactive_marker_moveto scripts/velocitymeasurements_to_vector3 scripts/param_saver DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}) diff --git a/utils/uf_common/include/uf_common/msg_helpers.h b/utils/uf_common/include/uf_common/msg_helpers.h new file mode 100644 index 00000000..548a98f8 --- /dev/null +++ b/utils/uf_common/include/uf_common/msg_helpers.h @@ -0,0 +1,79 @@ +#ifndef UF_COMMON__MSG_HELPERS_H +#define UF_COMMON__MSG_HELPERS_H + +#include +#include + +namespace uf_common { + +template +inline T make_rgba(double r, double g, double b, double a) { + T c; + c.r = r; + c.g = g; + c.b = b; + c.a = a; + return c; +} + +template +inline T make_xyz(double x, double y, double z) { + T p; + p.x = x; + p.y = y; + p.z = z; + return p; +} + +template +inline T vec2xyz(Eigen::Vector3d v) { + return make_xyz(v(0), v(1), v(2)); +} +template +inline T vec2xyz(tf::Vector3 v) { + return make_xyz(v.x(), v.y(), v.z()); +} +template +inline Eigen::Vector3d xyz2vec(T m) { + return Eigen::Vector3d(m.x, m.y, m.z); +} + +inline Eigen::Vector3d vec2vec(tf::Vector3 v) { return Eigen::Vector3d(v[0], v[1], v[2]); } +inline tf::Vector3 vec2vec(Eigen::Vector3d v) { return tf::Vector3(v[0], v[1], v[2]); } +inline Eigen::Quaterniond quat2quat(const tf::Quaternion &q) { + return Eigen::Quaterniond(q[3], q[0], q[1], q[2]); +} +inline tf::Quaternion quat2quat(const Eigen::Quaterniond &q) { + return tf::Quaternion(q.x(), q.y(), q.z(), q.w()); +} + +template +inline T make_xyzw(double x, double y, double z, double w) { + T q; + q.x = x; + q.y = y; + q.z = z; + q.w = w; + return q; +} + +template +inline T vec2xyzw(Eigen::Vector4d v) { + return vec2xyzw(v(0), v(1), v(2), v(3)); +} +template +inline Eigen::Quaterniond xyzw2quat(T m) { + return Eigen::Quaterniond(m.w, m.x, m.y, m.z); +} +template +inline T vec2wxyz(Eigen::Vector4d v) { + return make_xyzw(v(1), v(2), v(3), v(0)); +} +inline tf::Quaternion vec2quat(Eigen::Vector4d v) { return tf::Quaternion(v[0], v[1], v[2], v[3]); } +template +inline T quat2xyzw(Eigen::Quaterniond q) { + return make_xyzw(q.x(), q.y(), q.z(), q.w()); +} +} + +#endif diff --git a/utils/uf_common/include/uf_common/param_helpers.h b/utils/uf_common/include/uf_common/param_helpers.h new file mode 100644 index 00000000..fee648cf --- /dev/null +++ b/utils/uf_common/include/uf_common/param_helpers.h @@ -0,0 +1,127 @@ +#ifndef UF_COMMON__PARAM_HELPERS_H +#define UF_COMMON__PARAM_HELPERS_H + +#include +#include +#include + +#include "msg_helpers.h" + +namespace uf_common { + +void fail(std::string const &error_string) { throw std::runtime_error(error_string); } +template +void fail(FirstType first, SecondType second, MoreTypes... more) { + std::ostringstream ss; + ss << first << second; + return fail(ss.str(), more...); +} + +template +void require(bool cond, ErrorDescTypes... error_desc) { + if (!cond) { + fail(error_desc...); + } +} + +template +bool _getParam(ros::NodeHandle &nh, const std::string &name, Eigen::Matrix &res) { + XmlRpc::XmlRpcValue my_list; + if (!nh.getParam(name, my_list)) return false; + require(my_list.getType() == XmlRpc::XmlRpcValue::TypeArray, "param ", name, " must be an array"); + if (N != Eigen::Dynamic) { + require(my_list.size() == N, "param ", name, "must have length ", N, " (is ", my_list.size(), + ")"); + } + + res.resize(my_list.size()); + for (int32_t i = 0; i < my_list.size(); i++) { + if (my_list[i].getType() == XmlRpc::XmlRpcValue::TypeDouble) { + res(i) = static_cast(my_list[i]); + } else if (my_list[i].getType() == XmlRpc::XmlRpcValue::TypeInt) { + res(i) = static_cast(my_list[i]); + } else { + fail("param ", name, "[", i, "] is not numeric"); + } + } + return true; +} +template +bool _getParam(ros::NodeHandle &nh, const std::string &name, Eigen::Matrix &res) { + static_assert(N != 0, "doesn't work for N = 0"); + static_assert(M != 1, "wrong template specialization used?"); + + XmlRpc::XmlRpcValue my_list; + if (!nh.getParam(name, my_list)) return false; + require(my_list.getType() == XmlRpc::XmlRpcValue::TypeArray, "param ", name, " must be an array"); + require(my_list.size() >= 1, "param " + name + " must not have zero length"); + if (N != Eigen::Dynamic) { + require(my_list.size() == N, "param ", name, "must have length ", N, " (is ", my_list.size(), + ")"); + } + + require(my_list[0].getType() == XmlRpc::XmlRpcValue::TypeArray, "param ", name, + "[0] must be a list"); + if (M != Eigen::Dynamic) { + require(my_list[0].size() == M, "param ", name, "[0] must have length ", M, " (is ", + my_list[0].size(), ")"); + } + + res.resize(my_list.size(), my_list[0].size()); + for (int32_t i = 0; i < my_list.size(); i++) { + XmlRpc::XmlRpcValue row = my_list[i]; + require(row.getType() == XmlRpc::XmlRpcValue::TypeArray, "param ", name, "[", i, + "] must be a list"); + require(row.size() == my_list[0].size(), "param ", name, "[", i, "]'s length doesn't match"); + for (int32_t j = 0; j < row.size(); j++) { + XmlRpc::XmlRpcValue entry = row[j]; + if (entry.getType() == XmlRpc::XmlRpcValue::TypeDouble) { + res(i, j) = static_cast(entry); + } else if (entry.getType() == XmlRpc::XmlRpcValue::TypeInt) { + res(i, j) = static_cast(entry); + } else { + fail("param ", name, "[", i, ", ", j, "] is not numeric"); + } + } + } + return true; +} +template +bool _getParam(ros::NodeHandle &nh, const std::string &name, T &res) { + return nh.getParam(name, res); +} +template <> +bool _getParam(ros::NodeHandle &nh, const std::string &name, ros::Duration &res) { + double x; + if (!nh.getParam(name, x)) return false; + res = ros::Duration(x); + return true; +} +template <> +bool _getParam(ros::NodeHandle &nh, const std::string &name, unsigned int &res) { + int x; + if (!nh.getParam(name, x)) return false; + if (x < 0) { + fail("param ", name, " must be >= 0"); + } + res = static_cast(x); + return true; +} + +template +T getParam(ros::NodeHandle &nh, std::string name) { + T res; + require(_getParam(nh, name, res), "param ", name, " required"); + return res; +} +template +T getParam(ros::NodeHandle &nh, std::string name, T default_value) { + T res; + if (!_getParam(nh, name, res)) { + return default_value; + } + return res; +} +} + +#endif diff --git a/utils/uf_common/msg/Acceleration.msg b/utils/uf_common/msg/Acceleration.msg new file mode 100644 index 00000000..680a69dd --- /dev/null +++ b/utils/uf_common/msg/Acceleration.msg @@ -0,0 +1,2 @@ +geometry_msgs/Vector3 linear +geometry_msgs/Vector3 angular diff --git a/utils/uf_common/msg/Float64Stamped.msg b/utils/uf_common/msg/Float64Stamped.msg new file mode 100644 index 00000000..5c2f1136 --- /dev/null +++ b/utils/uf_common/msg/Float64Stamped.msg @@ -0,0 +1,2 @@ +Header header +float64 data diff --git a/utils/uf_common/msg/PoseTwist.msg b/utils/uf_common/msg/PoseTwist.msg new file mode 100644 index 00000000..a66d185a --- /dev/null +++ b/utils/uf_common/msg/PoseTwist.msg @@ -0,0 +1,3 @@ +geometry_msgs/Pose pose +geometry_msgs/Twist twist +Acceleration acceleration diff --git a/utils/uf_common/msg/PoseTwistStamped.msg b/utils/uf_common/msg/PoseTwistStamped.msg new file mode 100644 index 00000000..631c3523 --- /dev/null +++ b/utils/uf_common/msg/PoseTwistStamped.msg @@ -0,0 +1,3 @@ +# pose is in header.frame_id. twist is in frame defined by pose (ie. body) +Header header +PoseTwist posetwist diff --git a/utils/uf_common/msg/VelocityMeasurement.msg b/utils/uf_common/msg/VelocityMeasurement.msg new file mode 100644 index 00000000..ba14a4bf --- /dev/null +++ b/utils/uf_common/msg/VelocityMeasurement.msg @@ -0,0 +1,3 @@ +geometry_msgs/Vector3 direction +float64 velocity +float64 correlation diff --git a/utils/uf_common/msg/VelocityMeasurements.msg b/utils/uf_common/msg/VelocityMeasurements.msg new file mode 100644 index 00000000..1161df02 --- /dev/null +++ b/utils/uf_common/msg/VelocityMeasurements.msg @@ -0,0 +1,2 @@ +Header header +VelocityMeasurement[] velocity_measurements diff --git a/utils/uf_common/package.xml b/utils/uf_common/package.xml new file mode 100644 index 00000000..5414fab8 --- /dev/null +++ b/utils/uf_common/package.xml @@ -0,0 +1,36 @@ + + uf_common + 1.0.0 + uf_common + Forrest Voight + + BSD + + http://ros.org/wiki/uf_common + Forrest Voight + catkin + + message_runtime + actionlib + interactive_markers + std_msgs + message_generation + geometry_msgs + tf + actionlib_msgs + cmake_modules + + message_runtime + actionlib + interactive_markers + std_msgs + message_generation + geometry_msgs + tf + actionlib_msgs + + + + + + diff --git a/utils/uf_common/scripts/param_saver b/utils/uf_common/scripts/param_saver new file mode 100755 index 00000000..95c114fa --- /dev/null +++ b/utils/uf_common/scripts/param_saver @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +import os +import random +import yaml + +import roslib +roslib.load_manifest('uf_common') +import rospy + + +rospy.init_node('param_saver', anonymous=True) + +class MyDumper(yaml.Dumper): + def represent_mapping(self, tag, mapping, flow_style=False): + return yaml.Dumper.represent_mapping(self, tag, mapping, flow_style) + +while not rospy.is_shutdown(): + rospy.sleep(rospy.Duration(3)) + + entries = rospy.get_param('~') + for entry in entries.itervalues(): + filename = entry['filename'] + file_basepath = entry['file_basepath'] + param_paths = entry['param_paths'] + + p = rospy.get_param(file_basepath) + data = yaml.dump(dict((k, v) for k, v in p.iteritems() if k in param_paths), Dumper=MyDumper) + + if os.path.exists(filename): + with open(filename, 'rb') as f: + if f.read() == data: + continue + + tmp_filename = '%s.%i' % (filename, random.randrange(10000)) + with open(tmp_filename, 'wb') as f: + f.write(data) + os.rename(tmp_filename, filename) + + print 'Saved %s to %s' % (file_basepath, filename) diff --git a/utils/uf_common/scripts/velocitymeasurements_to_vector3 b/utils/uf_common/scripts/velocitymeasurements_to_vector3 new file mode 100755 index 00000000..8b5b6f84 --- /dev/null +++ b/utils/uf_common/scripts/velocitymeasurements_to_vector3 @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +from __future__ import division + +import math + +import numpy + +import roslib +roslib.load_manifest('uf_common') +import rospy +from geometry_msgs.msg import Vector3, Vector3Stamped + +from uf_common.orientation_helpers import xyz_array +from uf_common.msg import VelocityMeasurements + + +rospy.init_node('velocitymeasurements_to_vector3') + +vel_pub = rospy.Publisher('vel', Vector3Stamped) +def cb(msg): + msg.velocity_measurements = [x for x in msg.velocity_measurements + if not math.isnan(x.velocity)] + A = numpy.zeros((len(msg.velocity_measurements), 3)) + for i, velocity_measurement in enumerate(msg.velocity_measurements): + A[i] = xyz_array(velocity_measurement.direction) + b = [velocity_measurement.velocity for velocity_measurement in msg.velocity_measurements] + x, residuals, rank, s = numpy.linalg.lstsq(A, b) + if rank < len(x): + rospy.logerr('velocity not observable!') + return + vel_pub.publish(Vector3Stamped( + header=msg.header, + vector=Vector3(*x), + )) +rospy.Subscriber('velocitymeasurements', VelocityMeasurements, cb) + +rospy.spin() diff --git a/utils/uf_common/setup.py b/utils/uf_common/setup.py new file mode 100644 index 00000000..9ab73768 --- /dev/null +++ b/utils/uf_common/setup.py @@ -0,0 +1,10 @@ +# ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD + +from distutils.core import setup +from catkin_pkg.python_setup import generate_distutils_setup + +setup_args = generate_distutils_setup( + packages=['uf_common'], +) + +setup(**setup_args) diff --git a/utils/uf_common/uf_common/__init__.py b/utils/uf_common/uf_common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/utils/uf_common/uf_common/interpolate.py b/utils/uf_common/uf_common/interpolate.py new file mode 100644 index 00000000..d94d1c86 --- /dev/null +++ b/utils/uf_common/uf_common/interpolate.py @@ -0,0 +1,14 @@ +from __future__ import division + +def sample_curve((xs, ys), x): + assert len(xs) == len(ys) + # assert that xs is sorted? + if x < xs[0]: x = xs[0] + if x > xs[-1]: x = xs[-1] + pairs = zip(xs, ys) + # using bisect would be faster + for (left_x, left_y), (right_x, right_y) in zip(pairs[:-1], pairs[1:]): + if left_x <= x <= right_x: + a = (x - left_x)/(right_x - left_x) + return a * (right_y - left_y) + left_y + assert False diff --git a/utils/uf_common/uf_common/orientation_helpers.py b/utils/uf_common/uf_common/orientation_helpers.py new file mode 100644 index 00000000..bd77e636 --- /dev/null +++ b/utils/uf_common/uf_common/orientation_helpers.py @@ -0,0 +1,291 @@ +from __future__ import division + +import math +import warnings + +import numpy + +import roslib +roslib.load_manifest('uf_common') +import rospy +from tf import transformations +from nav_msgs.msg import Odometry +from uf_common.msg import PoseTwistStamped, PoseTwist, MoveToGoal +from std_msgs.msg import Header +from geometry_msgs.msg import Pose as Pose, Quaternion, Point, Vector3, Twist + + +def normalized(x): + x = numpy.array(x) + if max(map(abs, x)) == 0: + warnings.warn('Normalizing zero-length vector to random unit vector') + x = numpy.random.standard_normal(x.shape) + x = x / max(map(abs, x)) + x = x / numpy.linalg.norm(x) + return x + +def get_perpendicular(a, b=None): + a = numpy.array(a) + if max(map(abs, a)) == 0: + if b is not None: return get_perpendicular(b) + return normalized(numpy.random.standard_normal(a.shape)) + if b is None: + b = numpy.random.standard_normal(a.shape) + b = numpy.array(b) + x = numpy.cross(a, b) + if max(map(abs, x)) == 0: + return get_perpendicular(a) + return normalized(x) + + +def quat_to_rotvec(q): + if q[3] < 0: + q = -q + q = transformations.unit_vector(q) + angle = math.acos(q[3])*2 + axis = normalized(q[0:3]) + return axis * angle + +def rotvec_to_quat(rotvec): + return transformations.quaternion_about_axis(numpy.linalg.norm(rotvec), rotvec) + + +xyz_array = lambda o: numpy.array([o.x, o.y, o.z]) +xyzw_array = lambda o: numpy.array([o.x, o.y, o.z, o.w]) + + +def triad((a1, a2), (b1, b2)): + # returns quaternion that rotates b1 to a1 and b2 near a2 + # can get orientation by passing in (global, local) + aa = get_perpendicular(a1, a2) + A = numpy.array([normalized(a1), aa, normalized(numpy.cross(a1, aa))]) + bb = get_perpendicular(b1, b2) + B = numpy.array([normalized(b1), bb, normalized(numpy.cross(b1, bb))]) + rot = A.T.dot(B) + return transformations.quaternion_from_matrix( + [(a,b,c,0) for a,b,c in rot]+ + [(0,0,0,1)]) + +def test_triad(): + q = transformations.random_quaternion() + + a = numpy.random.standard_normal(3) + b = numpy.random.standard_normal(3) + + m = transformations.quaternion_matrix(q)[:3, :3] + q_ = triad((m.dot(a), m.dot(b)), (a, b)) + + assert numpy.linalg.norm(quat_to_rotvec( + transformations.quaternion_multiply( + q, + transformations.quaternion_inverse(q_), + ) + )) < 1e-6 +test_triad() + + +def look_at(forward, upish=[0, 0, 1]): + # assumes standard forward-left-up body coordinate system + return triad((forward, upish), ([1, 0, 0], [0, 0, 1])) +lookat = look_at # deprecated + +def look_at_without_pitching(forwardish, up=[0, 0, 1]): + # assumes standard forward-left-up body coordinate system + return triad((up, forwardish), ([0, 0, 1], [1, 0, 0])) +lookat_without_pitching = look_at_without_pitching # deprecated + +def look_at_camera(forward, upish=[0, 0, 1]): + # assumes camera right-down-forward coordinate system + return triad((forward, upish), ([0, 0, 1], [0, -1, 0])) +lookat_camera = look_at_camera # deprecated + + +EAST, NORTH, WEST, SOUTH = [transformations.quaternion_about_axis(math.pi/2*i, [0, 0, 1]) for i in xrange(4)] + +def safe_wait_for_message(topic, topic_type): + while True: + try: + return rospy.wait_for_message(topic, topic_type, .5) + except rospy.exceptions.ROSException, e: + if 'timeout' not in e.message: raise + print topic, 'wait_for_message timed out!' + + +class PoseEditor(object): + @classmethod + def from_Odometry_topic(cls, topic='/odom'): + return cls.from_Odometry(safe_wait_for_message(topic, Odometry)) + + @classmethod + def from_Odometry(cls, msg): + return cls.from_Pose(msg.header.frame_id, msg.pose.pose) + + @classmethod + def from_PoseTwistStamped_topic(cls, topic): + return cls.from_PoseTwistStamped(safe_wait_for_message(topic, PoseTwistStamped)) + + @classmethod + def from_PoseTwistStamped(cls, msg): + return cls.from_Pose(msg.header.frame_id, msg.posetwist.pose) + + @classmethod + def from_Pose(cls, frame_id, msg): + return cls(frame_id, xyz_array(msg.position), xyzw_array(msg.orientation)) + + def __init__(self, frame_id, position, orientation): + self.frame_id = frame_id + self.position = position + self.orientation = orientation + + @property + def _rot(self): + return transformations.quaternion_matrix(self.orientation)[:3, :3] + @property + def forward_vector(self): + return self._rot.dot([1, 0, 0]) + @property + def left_vector(self): + return self._rot.dot([0, 1, 0]) + @property + def body_up_vector(self): + return self._rot.dot([0, 0, 1]) + + + def set_position(self, position): + return type(self)(self.frame_id, position, self.orientation) + + def height(self, height): + return self.set_position([self.position[0], self.position[1], height]) + def depth(self, depth): + return self.set_position([self.position[0], self.position[1], -depth]) + + def relative(self, rel_pos): + return self.set_position(self.position + self._rot.dot(rel_pos)) + + def forward(self, distance): return self.relative([ distance, 0, 0]) + def backward(self, distance): return self.relative([-distance, 0, 0]) + def left(self, distance): return self.relative([0, distance, 0]) + def right(self, distance): return self.relative([0, -distance, 0]) + def body_up(self, distance): return self.relative([0, 0, distance]) + def body_down(self, distance): return self.relative([0, 0, -distance]) + + def absolute(self, abs_pos): + return type(self)(self.frame_id, self.position + abs_pos, self.orientation) + + def east(self, distance): return self.absolute([ distance, 0, 0]) + def west(self, distance): return self.absolute([-distance, 0, 0]) + def north(self, distance): return self.absolute([0, distance, 0]) + def south(self, distance): return self.absolute([0, -distance, 0]) + def up(self, distance): return self.absolute([0, 0, distance]) + def down(self, distance): return self.absolute([0, 0, -distance]) + + + def set_orientation(self, orientation): + return type(self)(self.frame_id, self.position, orientation) + + def look_at_rel(self, rel_point): return self.set_orientation(look_at(rel_point)) + def look_at(self, point): return self.look_at_rel(point - self.position) + def look_at_rel_without_pitching(self, rel_point): return self.set_orientation(look_at_without_pitching(rel_point)) + def look_at_without_pitching(self, point): return self.look_at_rel_without_pitching(point - self.position) + + def point_vec_towards(self, body_vec, towards_point): return self.point_vec_towards_rel(body_vec, towards_point - self.pos) + def point_vec_towards_rel(self, body_vec, towards_rel_point): return self.set_orientation(triad((towards_rel_point, [0, 0, 1]), (body_vec, [0, 0, 1]))) + def turn_vec_towards(self, body_vec, towards_point): return self.turn_vec_towards_rel(body_vec, towards_point - self.pos) + def turn_vec_towards_rel(self, body_vec, towards_rel_point): return self.set_orientation(triad(([0, 0, 1], towards_rel_point), ([0, 0, 1], body_vec))) + + def yaw_left(self, angle): + return self.set_orientation(transformations.quaternion_multiply( + transformations.quaternion_about_axis(angle, [0, 0, 1]), + self.orientation, + )) + def yaw_right(self, angle): return self.yaw_left(-angle) + def yaw_left_deg(self, angle_degrees): return self.yaw_left(math.radians(angle_degrees)) + def yaw_right_deg(self, angle_degrees): return self.yaw_right(math.radians(angle_degrees)) + turn_left = yaw_left + turn_right = yaw_right + turn_left_deg = yaw_left_deg + turn_right_deg = yaw_right_deg + def heading(self, heading): + return self.set_orientation( + transformations.quaternion_about_axis(heading, [0, 0, 1]) + ) + def heading_deg(self, heading_deg): + return self.heading(math.radians(heading_deg)) + + def roll_right(self, angle): + return self.set_orientation(transformations.quaternion_multiply( + self.orientation, + transformations.quaternion_about_axis(angle, [1, 0, 0]), + )) + def roll_left(self, angle): return self.roll_right(-angle) + def roll_right_deg(self, angle_degrees): return self.roll_right(math.radians(angle_degrees)) + def roll_left_deg(self, angle_degrees): return self.roll_left(math.radians(angle_degrees)) + def zero_roll(self): + return self.set_orientation(lookat(self.forward_vector)) + + + def pitch_down(self, angle): + return self.set_orientation(transformations.quaternion_multiply( + transformations.quaternion_about_axis(angle, self.zero_roll().left_vector), + self.orientation, + )) + def pitch_up(self, angle): return self.pitch_down(-angle) + def pitch_down_deg(self, angle_degrees): return self.pitch_down(math.radians(angle_degrees)) + def pitch_up_deg(self, angle_degrees): return self.pitch_up(math.radians(angle_degrees)) + def zero_roll_and_pitch(self): + return self.set_orientation(lookat_without_pitching(self.forward_vector)) + + + def as_Pose(self): + return Pose( + position=Point(*self.position), + orientation=Quaternion(*self.orientation), + ) + + def as_PoseTwist(self, linear=[0, 0, 0], angular=[0, 0, 0]): + return PoseTwist( + pose=self.as_Pose(), + twist=Twist( + linear=Vector3(*linear), + angular=Vector3(*angular), + ), + ) + + def as_PoseTwistStamped(self, linear=[0, 0, 0], angular=[0, 0, 0]): + return PoseTwistStamped( + header=Header( + frame_id=self.frame_id, + ), + posetwist=self.as_PoseTwist(linear, angular), + ) + + def as_MoveToGoal(self, linear=[0, 0, 0], angular=[0, 0, 0], **kwargs): + return MoveToGoal( + header=Header( + frame_id=self.frame_id, + ), + posetwist=self.as_PoseTwist(linear, angular), + **kwargs) + + # allow implicit usage in place of a PoseTwistStamped + @property + def header(self): + return Header( + frame_id=self.frame_id, + ) + @property + def posetwist(self): + return self.as_PoseTwist() + # and in place of a MoveToGoal + @property + def speed(self): + return 0 + @property + def uncoordinated(self): + return False + @property + def linear_tolerance(self): + return 0 + @property + def angular_tolerance(self): + return 0 From 8dcc5099918c55fd5801096bdd5400500adb2a7b Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Thu, 4 Feb 2016 22:58:08 -0500 Subject: [PATCH 021/267] LAUNCH: Address PR #56 comments --- .../scripts/velocitymeasurements_to_vector3 | 38 ------------------- utils/uf_common/uf_common/interpolate.py | 14 ------- 2 files changed, 52 deletions(-) delete mode 100755 utils/uf_common/scripts/velocitymeasurements_to_vector3 delete mode 100644 utils/uf_common/uf_common/interpolate.py diff --git a/utils/uf_common/scripts/velocitymeasurements_to_vector3 b/utils/uf_common/scripts/velocitymeasurements_to_vector3 deleted file mode 100755 index 8b5b6f84..00000000 --- a/utils/uf_common/scripts/velocitymeasurements_to_vector3 +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env python - -from __future__ import division - -import math - -import numpy - -import roslib -roslib.load_manifest('uf_common') -import rospy -from geometry_msgs.msg import Vector3, Vector3Stamped - -from uf_common.orientation_helpers import xyz_array -from uf_common.msg import VelocityMeasurements - - -rospy.init_node('velocitymeasurements_to_vector3') - -vel_pub = rospy.Publisher('vel', Vector3Stamped) -def cb(msg): - msg.velocity_measurements = [x for x in msg.velocity_measurements - if not math.isnan(x.velocity)] - A = numpy.zeros((len(msg.velocity_measurements), 3)) - for i, velocity_measurement in enumerate(msg.velocity_measurements): - A[i] = xyz_array(velocity_measurement.direction) - b = [velocity_measurement.velocity for velocity_measurement in msg.velocity_measurements] - x, residuals, rank, s = numpy.linalg.lstsq(A, b) - if rank < len(x): - rospy.logerr('velocity not observable!') - return - vel_pub.publish(Vector3Stamped( - header=msg.header, - vector=Vector3(*x), - )) -rospy.Subscriber('velocitymeasurements', VelocityMeasurements, cb) - -rospy.spin() diff --git a/utils/uf_common/uf_common/interpolate.py b/utils/uf_common/uf_common/interpolate.py deleted file mode 100644 index d94d1c86..00000000 --- a/utils/uf_common/uf_common/interpolate.py +++ /dev/null @@ -1,14 +0,0 @@ -from __future__ import division - -def sample_curve((xs, ys), x): - assert len(xs) == len(ys) - # assert that xs is sorted? - if x < xs[0]: x = xs[0] - if x > xs[-1]: x = xs[-1] - pairs = zip(xs, ys) - # using bisect would be faster - for (left_x, left_y), (right_x, right_y) in zip(pairs[:-1], pairs[1:]): - if left_x <= x <= right_x: - a = (x - left_x)/(right_x - left_x) - return a * (right_y - left_y) + left_y - assert False From d2d78200fd148e140440feb047ae63870ba8c848 Mon Sep 17 00:00:00 2001 From: Zach Goins Date: Fri, 19 Feb 2016 18:58:13 -0500 Subject: [PATCH 022/267] VIS: Add visualization and move helper --- .../navigator_tools/move_helper.py | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100755 utils/navigator_tools/navigator_tools/move_helper.py diff --git a/utils/navigator_tools/navigator_tools/move_helper.py b/utils/navigator_tools/navigator_tools/move_helper.py new file mode 100755 index 00000000..b985339a --- /dev/null +++ b/utils/navigator_tools/navigator_tools/move_helper.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +import rospy +import roslib +import numpy,math,tf,threading +from nav_msgs.msg import Odometry +from geometry_msgs.msg import Point +from navigator_tools import geometry_helper as gh + +rospy.init_node('move_helper') + +class move_helper(object): + # Base class for whatever you are writing + def __init__(self): + + self.odom = Odometry() + self.pose_pub = rospy.Publisher("/set_desired_pose", Point, queue_size = 1) + + rospy.Subscriber("/odom", Odometry, self.odom_cb) + rospy.Subscriber("/move_helper", Point, self.move_cb) + rospy.spin() + + def odom_cb(self, msg): + self.odom = msg + + def move_cb(self, msg): + + to_send = Point() + theta = gh.quat_to_euler(self.odom.pose.pose.orientation) + to_send.x = self.odom.pose.pose.position.x + msg.x + to_send.y = self.odom.pose.pose.position.y + msg.y + to_send.z = theta[2] + msg.z + + self.pose_pub.publish(to_send) + + + +if __name__ == "__main__": + # + helper = move_helper() From 2b9c552f847802536cd7988643068cb571fd628a Mon Sep 17 00:00:00 2001 From: jnez71 Date: Sun, 21 Feb 2016 04:19:20 -0500 Subject: [PATCH 023/267] GNC: bug fixes at lake day, and PD tuning --- utils/navigator_tools/navigator_tools/move_helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/navigator_tools/navigator_tools/move_helper.py b/utils/navigator_tools/navigator_tools/move_helper.py index b985339a..c78bfa89 100755 --- a/utils/navigator_tools/navigator_tools/move_helper.py +++ b/utils/navigator_tools/navigator_tools/move_helper.py @@ -29,7 +29,7 @@ def move_cb(self, msg): theta = gh.quat_to_euler(self.odom.pose.pose.orientation) to_send.x = self.odom.pose.pose.position.x + msg.x to_send.y = self.odom.pose.pose.position.y + msg.y - to_send.z = theta[2] + msg.z + to_send.z = np.rad2deg(theta[2]) + msg.z self.pose_pub.publish(to_send) From 0b46363ebf4f36b26a13f371d10cc7d71b87a95e Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Sun, 7 Feb 2016 21:36:22 -0500 Subject: [PATCH 024/267] UTILS: Add geometry helpers for missions --- .../sub8_ros_tools/sub8_ros_tools/__init__.py | 6 +++-- .../sub8_ros_tools/geometry_helpers.py | 16 +++++++++--- .../sub8_ros_tools/msg_helpers.py | 26 ++++++++++++++++++- 3 files changed, 42 insertions(+), 6 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index e97799c1..67389fce 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -1,6 +1,8 @@ +# flake8: noqa from init_helpers import wait_for_param, wait_for_subscriber from image_helpers import Image_Subscriber, Image_Publisher, make_image_msg, get_image_msg from msg_helpers import (rosmsg_to_numpy, pose_to_numpy, twist_to_numpy, odometry_to_numpy, - make_wrench_stamped, make_pose_stamped, posetwist_to_numpy, odom_sub, make_header) + make_wrench_stamped, make_pose_stamped, posetwist_to_numpy, numpy_to_quaternion, odom_sub, make_header, + numpy_pair_to_pose, numpy_to_point) from threading_helpers import thread_lock -from geometry_helpers import make_rotation, normalize, skew_symmetric_cross, compose_transformation \ No newline at end of file +from geometry_helpers import make_rotation, normalize, skew_symmetric_cross, deskew, compose_transformation, project_pt_to_plane \ No newline at end of file diff --git a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py index 1b2e26a5..235a6ef4 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py @@ -52,13 +52,17 @@ def skew_symmetric_cross(a): ''' assert len(a) == 3, "a must be in R3" skew_symm = np.array([ - [0., -a[2], +a[1]], - [+a[2], 0., -a[0]], - [-a[1], +a[0], 0.], + [0., -a[2], +a[1]], # noqa + [+a[2], 0., -a[0]], # noqa + [-a[1], +a[0], 0.], # noqa ], dtype=np.float32) return skew_symm +def deskew(A): + return np.array([A[2, 1], A[0, 2], A[1, 0]], dtype=np.float32) + + def normalize(vector): return vector / np.linalg.norm(vector) @@ -70,3 +74,9 @@ def compose_transformation(R, t): transformation[3, :3] = t transformation[3, 3] = 1.0 return transformation + + +def project_pt_to_plane(point, plane_normal): + dist = np.dot(plane_normal, point) + projected = point - (dist * plane_normal) + return projected diff --git a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py index 6524ddf7..21168813 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py @@ -1,4 +1,5 @@ import numpy as np +from tf import transformations import geometry_msgs.msg as geometry_msgs import std_msgs.msg as std_msgs import nav_msgs.msg as nav_msgs @@ -64,7 +65,6 @@ def twist_to_numpy(twist): Convert a twist message into a tuple of numpy arrays returns (linear, angular) ''' - linear = rosmsg_to_numpy(twist.linear) angular = rosmsg_to_numpy(twist.angular) return linear, angular @@ -90,6 +90,30 @@ def odometry_to_numpy(odom): return pose, twist, pose_covariance, twist_covariance +def numpy_to_point(np_vector): + return geometry_msgs.Point(*np_vector) + + +def numpy_to_quaternion(np_quaternion): + return geometry_msgs.Quaternion(*np_quaternion) + + +def numpy_matrix_to_quaternion(np_matrix): + '''Given a 3x3 rotation matrix, return its geometry_msgs Quaternion''' + assert np_matrix.shape == (3, 3), "Must submit 3x3 rotation matrix" + pose_mat = np.eye(4) + pose_mat[:3, :3] = np_matrix + np_quaternion = transformations.quaternion_from_matrix(pose_mat) + return geometry_msgs.Quaternion(*np_quaternion) + + +def numpy_pair_to_pose(np_rotation_matrix, np_translation): + '''Convert a R, t pair to a geometry_msgs Pose''' + orientation = numpy_matrix_to_quaternion(np_rotation_matrix) + position = numpy_to_point(np_translation) + return geometry_msgs.Pose(position=position, orientation=orientation) + + def make_header(frame='/body'): try: cur_time = rospy.Time.now() From e118c5303deceb23cec28ebbbc0826a675eea5c0 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Wed, 2 Mar 2016 23:27:00 -0500 Subject: [PATCH 025/267] UTILS: Add quaternion pair_to_msg --- utils/sub8_ros_tools/sub8_ros_tools/__init__.py | 11 +++++++---- utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py | 6 ++++++ 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index 67389fce..19d300b0 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -1,8 +1,11 @@ # flake8: noqa from init_helpers import wait_for_param, wait_for_subscriber from image_helpers import Image_Subscriber, Image_Publisher, make_image_msg, get_image_msg -from msg_helpers import (rosmsg_to_numpy, pose_to_numpy, twist_to_numpy, odometry_to_numpy, - make_wrench_stamped, make_pose_stamped, posetwist_to_numpy, numpy_to_quaternion, odom_sub, make_header, - numpy_pair_to_pose, numpy_to_point) +from msg_helpers import ( + make_header, odom_sub, + rosmsg_to_numpy, pose_to_numpy, twist_to_numpy, odometry_to_numpy, posetwist_to_numpy, + make_wrench_stamped, make_pose_stamped, + numpy_to_quaternion, numpy_pair_to_pose, numpy_to_point, numpy_quat_pair_to_pose +) from threading_helpers import thread_lock -from geometry_helpers import make_rotation, normalize, skew_symmetric_cross, deskew, compose_transformation, project_pt_to_plane \ No newline at end of file +from geometry_helpers import make_rotation, normalize, skew_symmetric_cross, deskew, compose_transformation, project_pt_to_plane diff --git a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py index 21168813..f9325963 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py @@ -114,6 +114,12 @@ def numpy_pair_to_pose(np_rotation_matrix, np_translation): return geometry_msgs.Pose(position=position, orientation=orientation) +def numpy_quat_pair_to_pose(np_quaternion, np_translation): + orientation = numpy_to_quaternion(np_quaternion) + position = numpy_to_point(np_translation) + return geometry_msgs.Pose(position=position, orientation=orientation) + + def make_header(frame='/body'): try: cur_time = rospy.Time.now() From cb2a65f420697749a2f05fdfb1142040919ef526 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Thu, 3 Mar 2016 17:41:56 -0500 Subject: [PATCH 026/267] MISSIONS: Comply orientation-helpers to pep8 --- .../uf_common/orientation_helpers.py | 284 +++++++++++------- 1 file changed, 178 insertions(+), 106 deletions(-) diff --git a/utils/uf_common/uf_common/orientation_helpers.py b/utils/uf_common/uf_common/orientation_helpers.py index bd77e636..54e87e99 100644 --- a/utils/uf_common/uf_common/orientation_helpers.py +++ b/utils/uf_common/uf_common/orientation_helpers.py @@ -1,38 +1,40 @@ from __future__ import division - -import math import warnings -import numpy - -import roslib -roslib.load_manifest('uf_common') +import numpy as np import rospy from tf import transformations from nav_msgs.msg import Odometry from uf_common.msg import PoseTwistStamped, PoseTwist, MoveToGoal from std_msgs.msg import Header from geometry_msgs.msg import Pose as Pose, Quaternion, Point, Vector3, Twist +from sub8_ros_tools import rosmsg_to_numpy + +UP = np.array([0.0, 0.0, 1.0], np.float64) +EAST, NORTH, WEST, SOUTH = [transformations.quaternion_about_axis(np.pi / 2 * i, UP) for i in xrange(4)] def normalized(x): - x = numpy.array(x) + x = np.array(x) if max(map(abs, x)) == 0: warnings.warn('Normalizing zero-length vector to random unit vector') - x = numpy.random.standard_normal(x.shape) + x = np.random.standard_normal(x.shape) x = x / max(map(abs, x)) - x = x / numpy.linalg.norm(x) + x = x / np.linalg.norm(x) return x + def get_perpendicular(a, b=None): - a = numpy.array(a) + a = np.array(a) if max(map(abs, a)) == 0: - if b is not None: return get_perpendicular(b) - return normalized(numpy.random.standard_normal(a.shape)) + if b is not None: + return get_perpendicular(b) + return normalized(np.random.standard_normal(a.shape)) + if b is None: - b = numpy.random.standard_normal(a.shape) - b = numpy.array(b) - x = numpy.cross(a, b) + b = np.random.standard_normal(a.shape) + b = np.array(b) + x = np.cross(a, b) if max(map(abs, x)) == 0: return get_perpendicular(a) return normalized(x) @@ -42,206 +44,270 @@ def quat_to_rotvec(q): if q[3] < 0: q = -q q = transformations.unit_vector(q) - angle = math.acos(q[3])*2 + angle = np.arccos(q[3]) * 2 axis = normalized(q[0:3]) return axis * angle -def rotvec_to_quat(rotvec): - return transformations.quaternion_about_axis(numpy.linalg.norm(rotvec), rotvec) - -xyz_array = lambda o: numpy.array([o.x, o.y, o.z]) -xyzw_array = lambda o: numpy.array([o.x, o.y, o.z, o.w]) +def rotvec_to_quat(rotvec): + return transformations.quaternion_about_axis(np.linalg.norm(rotvec), rotvec) def triad((a1, a2), (b1, b2)): # returns quaternion that rotates b1 to a1 and b2 near a2 # can get orientation by passing in (global, local) aa = get_perpendicular(a1, a2) - A = numpy.array([normalized(a1), aa, normalized(numpy.cross(a1, aa))]) + A = np.array([normalized(a1), aa, normalized(np.cross(a1, aa))]) bb = get_perpendicular(b1, b2) - B = numpy.array([normalized(b1), bb, normalized(numpy.cross(b1, bb))]) + B = np.array([normalized(b1), bb, normalized(np.cross(b1, bb))]) rot = A.T.dot(B) return transformations.quaternion_from_matrix( - [(a,b,c,0) for a,b,c in rot]+ - [(0,0,0,1)]) + [(a, b, c, 0) for a, b, c in rot] + + [(0, 0, 0, 1)]) + def test_triad(): q = transformations.random_quaternion() - - a = numpy.random.standard_normal(3) - b = numpy.random.standard_normal(3) - + + a = np.random.standard_normal(3) + b = np.random.standard_normal(3) + m = transformations.quaternion_matrix(q)[:3, :3] q_ = triad((m.dot(a), m.dot(b)), (a, b)) - - assert numpy.linalg.norm(quat_to_rotvec( + + assert np.linalg.norm(quat_to_rotvec( transformations.quaternion_multiply( q, transformations.quaternion_inverse(q_), ) )) < 1e-6 -test_triad() -def look_at(forward, upish=[0, 0, 1]): +def look_at(forward, upish=UP): # assumes standard forward-left-up body coordinate system - return triad((forward, upish), ([1, 0, 0], [0, 0, 1])) -lookat = look_at # deprecated + return triad((forward, upish), ([1, 0, 0], UP)) +lookat = look_at # deprecated + -def look_at_without_pitching(forwardish, up=[0, 0, 1]): +def look_at_without_pitching(forwardish, up=UP): # assumes standard forward-left-up body coordinate system - return triad((up, forwardish), ([0, 0, 1], [1, 0, 0])) -lookat_without_pitching = look_at_without_pitching # deprecated + return triad((up, forwardish), (UP, [1, 0, 0])) +lookat_without_pitching = look_at_without_pitching # deprecated + -def look_at_camera(forward, upish=[0, 0, 1]): +def look_at_camera(forward, upish=UP): # assumes camera right-down-forward coordinate system - return triad((forward, upish), ([0, 0, 1], [0, -1, 0])) -lookat_camera = look_at_camera # deprecated + return triad((forward, upish), (UP, [0, -1, 0])) +lookat_camera = look_at_camera # deprecated -EAST, NORTH, WEST, SOUTH = [transformations.quaternion_about_axis(math.pi/2*i, [0, 0, 1]) for i in xrange(4)] def safe_wait_for_message(topic, topic_type): while True: try: return rospy.wait_for_message(topic, topic_type, .5) except rospy.exceptions.ROSException, e: - if 'timeout' not in e.message: raise + if 'timeout' not in e.message: + raise print topic, 'wait_for_message timed out!' - + class PoseEditor(object): @classmethod def from_Odometry_topic(cls, topic='/odom'): return cls.from_Odometry(safe_wait_for_message(topic, Odometry)) - + @classmethod def from_Odometry(cls, msg): return cls.from_Pose(msg.header.frame_id, msg.pose.pose) - + @classmethod def from_PoseTwistStamped_topic(cls, topic): return cls.from_PoseTwistStamped(safe_wait_for_message(topic, PoseTwistStamped)) - + @classmethod def from_PoseTwistStamped(cls, msg): return cls.from_Pose(msg.header.frame_id, msg.posetwist.pose) - + @classmethod def from_Pose(cls, frame_id, msg): - return cls(frame_id, xyz_array(msg.position), xyzw_array(msg.orientation)) - + return cls(frame_id, rosmsg_to_numpy(msg.position), rosmsg_to_numpy(msg.orientation)) + def __init__(self, frame_id, position, orientation): self.frame_id = frame_id self.position = position self.orientation = orientation - + + self.frame_id = frame_id + self.position = position + self.orientation = orientation + @property def _rot(self): return transformations.quaternion_matrix(self.orientation)[:3, :3] + @property def forward_vector(self): return self._rot.dot([1, 0, 0]) + @property def left_vector(self): return self._rot.dot([0, 1, 0]) + @property def body_up_vector(self): - return self._rot.dot([0, 0, 1]) - - + return self._rot.dot(UP) + + # Position def set_position(self, position): return type(self)(self.frame_id, position, self.orientation) - + def height(self, height): return self.set_position([self.position[0], self.position[1], height]) + def depth(self, depth): return self.set_position([self.position[0], self.position[1], -depth]) - + def relative(self, rel_pos): return self.set_position(self.position + self._rot.dot(rel_pos)) - - def forward(self, distance): return self.relative([ distance, 0, 0]) - def backward(self, distance): return self.relative([-distance, 0, 0]) - def left(self, distance): return self.relative([0, distance, 0]) - def right(self, distance): return self.relative([0, -distance, 0]) - def body_up(self, distance): return self.relative([0, 0, distance]) - def body_down(self, distance): return self.relative([0, 0, -distance]) - + + def forward(self, distance): + return self.relative([+distance, 0, 0]) + + def backward(self, distance): + return self.relative([-distance, 0, 0]) + + def left(self, distance): + return self.relative([0, +distance, 0]) + + def right(self, distance): + return self.relative([0, -distance, 0]) + + def body_up(self, distance): + return self.relative([0, 0, +distance]) + + def body_down(self, distance): + return self.relative([0, 0, -distance]) + def absolute(self, abs_pos): return type(self)(self.frame_id, self.position + abs_pos, self.orientation) - - def east(self, distance): return self.absolute([ distance, 0, 0]) - def west(self, distance): return self.absolute([-distance, 0, 0]) - def north(self, distance): return self.absolute([0, distance, 0]) - def south(self, distance): return self.absolute([0, -distance, 0]) - def up(self, distance): return self.absolute([0, 0, distance]) - def down(self, distance): return self.absolute([0, 0, -distance]) - - + + def east(self, distance): + return self.absolute([+distance, 0, 0]) + + def west(self, distance): + return self.absolute([-distance, 0, 0]) + + def north(self, distance): + return self.absolute([0, +distance, 0]) + + def south(self, distance): + return self.absolute([0, -distance, 0]) + + def up(self, distance): + return self.absolute([0, 0, +distance]) + + def down(self, distance): + return self.absolute([0, 0, -distance]) + + # Orientation def set_orientation(self, orientation): return type(self)(self.frame_id, self.position, orientation) - - def look_at_rel(self, rel_point): return self.set_orientation(look_at(rel_point)) - def look_at(self, point): return self.look_at_rel(point - self.position) - def look_at_rel_without_pitching(self, rel_point): return self.set_orientation(look_at_without_pitching(rel_point)) - def look_at_without_pitching(self, point): return self.look_at_rel_without_pitching(point - self.position) - - def point_vec_towards(self, body_vec, towards_point): return self.point_vec_towards_rel(body_vec, towards_point - self.pos) - def point_vec_towards_rel(self, body_vec, towards_rel_point): return self.set_orientation(triad((towards_rel_point, [0, 0, 1]), (body_vec, [0, 0, 1]))) - def turn_vec_towards(self, body_vec, towards_point): return self.turn_vec_towards_rel(body_vec, towards_point - self.pos) - def turn_vec_towards_rel(self, body_vec, towards_rel_point): return self.set_orientation(triad(([0, 0, 1], towards_rel_point), ([0, 0, 1], body_vec))) - + + def look_at_rel(self, rel_point): + return self.set_orientation(look_at(rel_point)) + + def look_at(self, point): + return self.look_at_rel(point - self.position) + + def look_at_rel_without_pitching(self, rel_point): + return self.set_orientation(look_at_without_pitching(rel_point)) + + def look_at_without_pitching(self, point): + return self.look_at_rel_without_pitching(point - self.position) + + def point_vec_towards(self, body_vec, towards_point): + return self.point_vec_towards_rel(body_vec, towards_point - self.pos) + + def point_vec_towards_rel(self, body_vec, towards_rel_point): + return self.set_orientation(triad((towards_rel_point, UP), (body_vec, UP))) + + def turn_vec_towards(self, body_vec, towards_point): + return self.turn_vec_towards_rel(body_vec, towards_point - self.pos) + + def turn_vec_towards_rel(self, body_vec, towards_rel_point): + return self.set_orientation(triad((UP, towards_rel_point), (UP, body_vec))) + def yaw_left(self, angle): return self.set_orientation(transformations.quaternion_multiply( - transformations.quaternion_about_axis(angle, [0, 0, 1]), + transformations.quaternion_about_axis(angle, UP), self.orientation, )) - def yaw_right(self, angle): return self.yaw_left(-angle) - def yaw_left_deg(self, angle_degrees): return self.yaw_left(math.radians(angle_degrees)) - def yaw_right_deg(self, angle_degrees): return self.yaw_right(math.radians(angle_degrees)) + + def yaw_right(self, angle): + return self.yaw_left(-angle) + + def yaw_left_deg(self, angle_degrees): + return self.yaw_left(np.radians(angle_degrees)) + + def yaw_right_deg(self, angle_degrees): + return self.yaw_right(np.radians(angle_degrees)) + turn_left = yaw_left turn_right = yaw_right turn_left_deg = yaw_left_deg turn_right_deg = yaw_right_deg + def heading(self, heading): return self.set_orientation( - transformations.quaternion_about_axis(heading, [0, 0, 1]) + transformations.quaternion_about_axis(heading, UP) ) + def heading_deg(self, heading_deg): - return self.heading(math.radians(heading_deg)) - + return self.heading(np.radians(heading_deg)) + def roll_right(self, angle): return self.set_orientation(transformations.quaternion_multiply( self.orientation, transformations.quaternion_about_axis(angle, [1, 0, 0]), )) - def roll_left(self, angle): return self.roll_right(-angle) - def roll_right_deg(self, angle_degrees): return self.roll_right(math.radians(angle_degrees)) - def roll_left_deg(self, angle_degrees): return self.roll_left(math.radians(angle_degrees)) + + def roll_left(self, angle): + return self.roll_right(-angle) + + def roll_right_deg(self, angle_degrees): + return self.roll_right(np.radians(angle_degrees)) + + def roll_left_deg(self, angle_degrees): + return self.roll_left(np.radians(angle_degrees)) + def zero_roll(self): return self.set_orientation(lookat(self.forward_vector)) - - + def pitch_down(self, angle): return self.set_orientation(transformations.quaternion_multiply( transformations.quaternion_about_axis(angle, self.zero_roll().left_vector), self.orientation, )) - def pitch_up(self, angle): return self.pitch_down(-angle) - def pitch_down_deg(self, angle_degrees): return self.pitch_down(math.radians(angle_degrees)) - def pitch_up_deg(self, angle_degrees): return self.pitch_up(math.radians(angle_degrees)) + + def pitch_up(self, angle): + return self.pitch_down(-angle) + + def pitch_down_deg(self, angle_degrees): + return self.pitch_down(np.radians(angle_degrees)) + + def pitch_up_deg(self, angle_degrees): + return self.pitch_up(np.radians(angle_degrees)) + def zero_roll_and_pitch(self): return self.set_orientation(lookat_without_pitching(self.forward_vector)) - - + def as_Pose(self): return Pose( position=Point(*self.position), orientation=Quaternion(*self.orientation), ) - + def as_PoseTwist(self, linear=[0, 0, 0], angular=[0, 0, 0]): return PoseTwist( pose=self.as_Pose(), @@ -250,7 +316,7 @@ def as_PoseTwist(self, linear=[0, 0, 0], angular=[0, 0, 0]): angular=Vector3(*angular), ), ) - + def as_PoseTwistStamped(self, linear=[0, 0, 0], angular=[0, 0, 0]): return PoseTwistStamped( header=Header( @@ -258,34 +324,40 @@ def as_PoseTwistStamped(self, linear=[0, 0, 0], angular=[0, 0, 0]): ), posetwist=self.as_PoseTwist(linear, angular), ) - + def as_MoveToGoal(self, linear=[0, 0, 0], angular=[0, 0, 0], **kwargs): return MoveToGoal( header=Header( frame_id=self.frame_id, ), posetwist=self.as_PoseTwist(linear, angular), - **kwargs) - + **kwargs + ) + # allow implicit usage in place of a PoseTwistStamped @property def header(self): return Header( frame_id=self.frame_id, ) + @property def posetwist(self): return self.as_PoseTwist() # and in place of a MoveToGoal + @property def speed(self): return 0 + @property def uncoordinated(self): return False + @property def linear_tolerance(self): return 0 + @property def angular_tolerance(self): return 0 From 9b8803752c6acd2fc28f119a00ed94247c2a2037 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Thu, 3 Mar 2016 20:11:49 -0500 Subject: [PATCH 027/267] LEGACY: Add MoveTo.action --- utils/uf_common/CMakeLists.txt | 6 ++++++ utils/uf_common/action/MoveTo.action | 11 +++++++++++ utils/uf_common/uf_common/orientation_helpers.py | 9 +++------ 3 files changed, 20 insertions(+), 6 deletions(-) create mode 100644 utils/uf_common/action/MoveTo.action diff --git a/utils/uf_common/CMakeLists.txt b/utils/uf_common/CMakeLists.txt index 7d84efc8..62efc331 100644 --- a/utils/uf_common/CMakeLists.txt +++ b/utils/uf_common/CMakeLists.txt @@ -3,6 +3,12 @@ project(uf_common) find_package(catkin REQUIRED COMPONENTS message_generation message_runtime geometry_msgs tf actionlib interactive_markers std_msgs actionlib_msgs cmake_modules) catkin_python_setup() +add_action_files( + FILES + MoveTo.action +) + + add_message_files( FILES PoseTwistStamped.msg diff --git a/utils/uf_common/action/MoveTo.action b/utils/uf_common/action/MoveTo.action new file mode 100644 index 00000000..c2ec5960 --- /dev/null +++ b/utils/uf_common/action/MoveTo.action @@ -0,0 +1,11 @@ +# goal. copies PoseTwistStamped. +Header header +uf_common/PoseTwist posetwist +float64 speed +bool uncoordinated # false goes in a straight line, true achieves some components before others +float64 linear_tolerance # distance from goal for result to be sent +float64 angular_tolerance +--- +# result +--- +# feedback diff --git a/utils/uf_common/uf_common/orientation_helpers.py b/utils/uf_common/uf_common/orientation_helpers.py index 54e87e99..d81892f2 100644 --- a/utils/uf_common/uf_common/orientation_helpers.py +++ b/utils/uf_common/uf_common/orientation_helpers.py @@ -10,6 +10,7 @@ from geometry_msgs.msg import Pose as Pose, Quaternion, Point, Vector3, Twist from sub8_ros_tools import rosmsg_to_numpy + UP = np.array([0.0, 0.0, 1.0], np.float64) EAST, NORTH, WEST, SOUTH = [transformations.quaternion_about_axis(np.pi / 2 * i, UP) for i in xrange(4)] @@ -86,21 +87,17 @@ def test_triad(): def look_at(forward, upish=UP): # assumes standard forward-left-up body coordinate system return triad((forward, upish), ([1, 0, 0], UP)) -lookat = look_at # deprecated def look_at_without_pitching(forwardish, up=UP): # assumes standard forward-left-up body coordinate system return triad((up, forwardish), (UP, [1, 0, 0])) -lookat_without_pitching = look_at_without_pitching # deprecated def look_at_camera(forward, upish=UP): # assumes camera right-down-forward coordinate system return triad((forward, upish), (UP, [0, -1, 0])) -lookat_camera = look_at_camera # deprecated - def safe_wait_for_message(topic, topic_type): while True: @@ -282,7 +279,7 @@ def roll_left_deg(self, angle_degrees): return self.roll_left(np.radians(angle_degrees)) def zero_roll(self): - return self.set_orientation(lookat(self.forward_vector)) + return self.set_orientation(look_at(self.forward_vector)) def pitch_down(self, angle): return self.set_orientation(transformations.quaternion_multiply( @@ -300,7 +297,7 @@ def pitch_up_deg(self, angle_degrees): return self.pitch_up(np.radians(angle_degrees)) def zero_roll_and_pitch(self): - return self.set_orientation(lookat_without_pitching(self.forward_vector)) + return self.set_orientation(look_at_without_pitching(self.forward_vector)) def as_Pose(self): return Pose( From 2cd4aedfe3d19e1096fc56b88b2cfcfb93d6945f Mon Sep 17 00:00:00 2001 From: jnez71 Date: Fri, 1 Apr 2016 19:51:24 -0400 Subject: [PATCH 028/267] INTERFACE: make move_helper take body-frame commands --- .../navigator_tools/navigator_tools/move_helper.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/move_helper.py b/utils/navigator_tools/navigator_tools/move_helper.py index c78bfa89..186070cf 100755 --- a/utils/navigator_tools/navigator_tools/move_helper.py +++ b/utils/navigator_tools/navigator_tools/move_helper.py @@ -3,6 +3,7 @@ import rospy import roslib import numpy,math,tf,threading +from tf import transformations as trns from nav_msgs.msg import Odometry from geometry_msgs.msg import Point from navigator_tools import geometry_helper as gh @@ -26,10 +27,17 @@ def odom_cb(self, msg): def move_cb(self, msg): to_send = Point() + + R = trns.quaternion_matrix([self.odom.pose.pose.orientation.x, self.odom.pose.pose.orientation.y, self.odom.pose.pose.orientation.z, self.odom.pose.pose.orientation.w])[:2, :2] theta = gh.quat_to_euler(self.odom.pose.pose.orientation) - to_send.x = self.odom.pose.pose.position.x + msg.x - to_send.y = self.odom.pose.pose.position.y + msg.y - to_send.z = np.rad2deg(theta[2]) + msg.z + + current = numpy.array([self.odom.pose.pose.position.x, self.odom.pose.pose.position.y, numpy.rad2deg(theta[2])]) + shift = numpy.concatenate((R.dot([msg.x, msg.y]), [msg.z])) + desired = current + shift + + to_send.x = desired[0] + to_send.y = desired[1] + to_send.z = desired[2] self.pose_pub.publish(to_send) From e8c6b6771b9fa5ca965ecbc9d0961b4011be3c36 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Mon, 4 Apr 2016 02:14:56 -0400 Subject: [PATCH 029/267] UTILS: Add 'clip' analog for vector norms --- utils/sub8_ros_tools/sub8_ros_tools/__init__.py | 3 ++- .../sub8_ros_tools/geometry_helpers.py | 17 +++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index 19d300b0..5fc78017 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -8,4 +8,5 @@ numpy_to_quaternion, numpy_pair_to_pose, numpy_to_point, numpy_quat_pair_to_pose ) from threading_helpers import thread_lock -from geometry_helpers import make_rotation, normalize, skew_symmetric_cross, deskew, compose_transformation, project_pt_to_plane +from geometry_helpers import (make_rotation, normalize, skew_symmetric_cross, deskew, compose_transformation, + project_pt_to_plane, clip_norm) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py index 235a6ef4..20ef99ac 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py @@ -80,3 +80,20 @@ def project_pt_to_plane(point, plane_normal): dist = np.dot(plane_normal, point) projected = point - (dist * plane_normal) return projected + + +def clip_norm(vector, lower_bound, upper_bound): + '''Return a vector pointing the same direction as $vector, + with maximum norm $bound + if norm(vector) < bound, return vector unchanged + + Like np.clip, but for vector norms + ''' + norm = np.linalg.norm(vector) + if lower_bound < norm < upper_bound: + return np.copy(vector) + if norm < lower_bound: + v_new = (vector * lower_bound) / norm + else: + v_new = (vector * upper_bound) / norm + return v_new From ba0471f8778f60be516c17fed0021357c76eeeab Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Mon, 4 Apr 2016 02:18:35 -0400 Subject: [PATCH 030/267] POSE-EDITOR: Add relative strafe --- utils/uf_common/uf_common/orientation_helpers.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/utils/uf_common/uf_common/orientation_helpers.py b/utils/uf_common/uf_common/orientation_helpers.py index d81892f2..baa0be19 100644 --- a/utils/uf_common/uf_common/orientation_helpers.py +++ b/utils/uf_common/uf_common/orientation_helpers.py @@ -160,6 +160,9 @@ def set_position(self, position): return type(self)(self.frame_id, position, self.orientation) def height(self, height): + '''TODO: + Make this distance from bottom + ''' return self.set_position([self.position[0], self.position[1], height]) def depth(self, depth): @@ -168,6 +171,10 @@ def depth(self, depth): def relative(self, rel_pos): return self.set_position(self.position + self._rot.dot(rel_pos)) + def strafe_relative(self, rel_pos_2d): + rel_pos_3d = np.append(rel_pos_2d, 0.0) + return self.set_position(self.position + self._rot.dot(rel_pos_3d)) + def forward(self, distance): return self.relative([+distance, 0, 0]) @@ -236,10 +243,11 @@ def turn_vec_towards_rel(self, body_vec, towards_rel_point): return self.set_orientation(triad((UP, towards_rel_point), (UP, body_vec))) def yaw_left(self, angle): - return self.set_orientation(transformations.quaternion_multiply( + ori = self.set_orientation(transformations.quaternion_multiply( transformations.quaternion_about_axis(angle, UP), self.orientation, )) + return ori def yaw_right(self, angle): return self.yaw_left(-angle) From 6191c6220fe771832a50a86a391a031fd2a3a1e9 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Mon, 4 Apr 2016 02:59:52 -0400 Subject: [PATCH 031/267] MISC: Pre-address comments on #82 --- utils/uf_common/uf_common/orientation_helpers.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/utils/uf_common/uf_common/orientation_helpers.py b/utils/uf_common/uf_common/orientation_helpers.py index baa0be19..dcd74b4f 100644 --- a/utils/uf_common/uf_common/orientation_helpers.py +++ b/utils/uf_common/uf_common/orientation_helpers.py @@ -243,11 +243,10 @@ def turn_vec_towards_rel(self, body_vec, towards_rel_point): return self.set_orientation(triad((UP, towards_rel_point), (UP, body_vec))) def yaw_left(self, angle): - ori = self.set_orientation(transformations.quaternion_multiply( + return self.set_orientation(transformations.quaternion_multiply( transformations.quaternion_about_axis(angle, UP), self.orientation, )) - return ori def yaw_right(self, angle): return self.yaw_left(-angle) From 97305c97b669d5416fe8e0624607ac9291edcace Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Mon, 11 Apr 2016 17:00:00 -0400 Subject: [PATCH 032/267] VISION: Add easy threshold determination tool --- utils/sub8_ros_tools/sub8_ros_tools/__init__.py | 7 +++++++ utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py | 8 ++++---- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index 5fc78017..46b1e564 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -1,4 +1,11 @@ # flake8: noqa +import image_helpers +import init_helpers +import msg_helpers +import threading_helpers +import geometry_helpers + +# TODO: Adjust all existing code to not require these to be top-level imports from init_helpers import wait_for_param, wait_for_subscriber from image_helpers import Image_Subscriber, Image_Publisher, make_image_msg, get_image_msg from msg_helpers import ( diff --git a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py index 7740acc7..05ce0b87 100644 --- a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py +++ b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py @@ -3,10 +3,10 @@ import numpy as np from geometry_msgs.msg import Quaternion, Vector3, Pose2D from sensor_msgs.msg import Image -from sub8_ros_tools import make_image_msg, get_image_msg -from sub8_ros_tools import rosmsg_to_numpy, make_wrench_stamped -from sub8_ros_tools import thread_lock -from sub8_ros_tools import skew_symmetric_cross, make_rotation, normalize +from sub8_ros_tools.image_helpers import make_image_msg, get_image_msg +from sub8_ros_tools.msg_helpers import rosmsg_to_numpy, make_wrench_stamped +from sub8_ros_tools.threading_helpers import thread_lock +from sub8_ros_tools.geometry_helpers import skew_symmetric_cross, make_rotation, normalize class TestROSTools(unittest.TestCase): From fc3384942be1368874974bc419b60a9a2a21868c Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Sun, 10 Apr 2016 15:44:04 -0400 Subject: [PATCH 033/267] VISION: Make channel markers use color config.yaml --- .../sub8_ros_tools/sub8_ros_tools/__init__.py | 2 +- .../sub8_ros_tools/image_helpers.py | 27 ++++++++++++++++--- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index 46b1e564..1a349905 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -7,7 +7,7 @@ # TODO: Adjust all existing code to not require these to be top-level imports from init_helpers import wait_for_param, wait_for_subscriber -from image_helpers import Image_Subscriber, Image_Publisher, make_image_msg, get_image_msg +from image_helpers import Image_Subscriber, Image_Publisher, make_image_msg, get_image_msg, get_parameter_range from msg_helpers import ( make_header, odom_sub, rosmsg_to_numpy, pose_to_numpy, twist_to_numpy, odometry_to_numpy, posetwist_to_numpy, diff --git a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py index 1e590a6f..69819255 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py @@ -5,14 +5,33 @@ is intentional, to avoid the use of a global cvbridge, and to avoid reinstantiating a CvBrige for each use. ''' import rospy -import cv2 +import numpy as np from cv_bridge import CvBridge, CvBridgeError from sensor_msgs.msg import Image +from sub8_ros_tools import wait_for_param + + +def get_parameter_range(parameter_root): + ''' + ex: parameter_root='/vision/buoy/red' + this will then fetch /vision/buoy/red/hsv_low and hsv_high + ''' + low_param, high_param = parameter_root + '/hsv_low', parameter_root + '/hsv_high' + + rospy.logwarn("Blocking -- waiting for parameters {} and {}".format(low_param, high_param)) + + wait_for_param(low_param) + wait_for_param(high_param) + low = rospy.get_param(low_param) + high = rospy.get_param(high_param) + + rospy.loginfo("Got {} and {}".format(low_param, high_param)) + return np.array([low, high]).transpose() def make_image_msg(cv_image, encoding='bgr8'): '''Take a cv image, and produce a ROS image message''' - bridge = CvBridge() + bridge = CvBridge() image_message = bridge.cv2_to_imgmsg(cv_image, encoding) return image_message @@ -28,9 +47,9 @@ class Image_Publisher(object): def __init__(self, topic, encoding="bgr8", queue_size=1): '''Create an essentially normal publisher, that will publish images without conversion hassle''' self.im_pub = rospy.Publisher(topic, Image, queue_size=queue_size) - self.bridge = CvBridge() + self.bridge = CvBridge() self.encoding = encoding - + def publish(self, cv_image): try: image_message = self.bridge.cv2_to_imgmsg(cv_image, self.encoding) From 8a773d39e7164322d51e336ee47c41e6a863e0ba Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Thu, 14 Apr 2016 09:46:22 -0400 Subject: [PATCH 034/267] POOL: Add pool spot-changes --- utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py index 69819255..f04f5057 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py @@ -8,7 +8,7 @@ import numpy as np from cv_bridge import CvBridge, CvBridgeError from sensor_msgs.msg import Image -from sub8_ros_tools import wait_for_param +from sub8_ros_tools.init_helpers import wait_for_param def get_parameter_range(parameter_root): From fd429cd358a63c1d8f5d6b181f30a9aa148d9a13 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Sat, 16 Apr 2016 17:16:09 -0400 Subject: [PATCH 035/267] VISION: Add camerainfo to visionrequest2d --- .../sub8_ros_tools/sub8_ros_tools/image_helpers.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py index f04f5057..8582eb0d 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py @@ -6,8 +6,9 @@ ''' import rospy import numpy as np +from os import path from cv_bridge import CvBridge, CvBridgeError -from sensor_msgs.msg import Image +from sensor_msgs.msg import Image, CameraInfo from sub8_ros_tools.init_helpers import wait_for_param @@ -66,10 +67,20 @@ def __init__(self, topic, callback=None, encoding="bgr8", queue_size=1): This behaves like a conventional subscriber, except handling the additional image conversion ''' self.encoding = encoding + self.camera_info = None self.im_sub = rospy.Subscriber(topic, Image, self.convert, queue_size=queue_size) + + root_topic, image_subtopic = path.split(topic) + self.info_sub = rospy.Subscriber(root_topic + '/camera_info', CameraInfo, self.info_cb, queue_size=queue_size) + self.bridge = CvBridge() self.callback = callback + def info_cb(self, msg): + """The path trick here is a hack""" + self.info_sub.unregister() + self.camera_info = msg + def convert(self, data): try: image = self.bridge.imgmsg_to_cv2(data, desired_encoding=self.encoding) From a3723899d9bd308c19144980d22eca0c10c11194 Mon Sep 17 00:00:00 2001 From: Matthew Langford Date: Sun, 24 Apr 2016 18:41:52 -0400 Subject: [PATCH 036/267] VISION: Add wait_for_camera_info method to Image Subscriber. --- .../sub8_ros_tools/image_helpers.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py index 8582eb0d..4d2c1518 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py @@ -76,6 +76,25 @@ def __init__(self, topic, callback=None, encoding="bgr8", queue_size=1): self.bridge = CvBridge() self.callback = callback + def wait_for_camera_info(self, timeout=10): + ''' + Blocks until camera info has been received. + Note: 'timeout' is in seconds. + ''' + rospy.logwarn("Blocking -- waiting at most %d seconds for camera info." % timeout) + + timeout = rospy.Duration(timeout) + start_time = rospy.Time.now() + print rospy.is_shutdown() + while (start_time - rospy.Time.now() <= timeout) and (not rospy.is_shutdown()): + if self.camera_info is not None: + rospy.loginfo("Camera info found!") + return self.camera_info + rospy.sleep(.2) + + rospy.logerr("Camera info not found.") + return None + def info_cb(self, msg): """The path trick here is a hack""" self.info_sub.unregister() From 1354561516609e920005dbd27b42bef1c6a9abf6 Mon Sep 17 00:00:00 2001 From: Matthew Langford Date: Sun, 24 Apr 2016 21:00:22 -0400 Subject: [PATCH 037/267] MARKER: Correct marker estimates when we are level. --- utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py index 4d2c1518..87715209 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py @@ -85,13 +85,12 @@ def wait_for_camera_info(self, timeout=10): timeout = rospy.Duration(timeout) start_time = rospy.Time.now() - print rospy.is_shutdown() while (start_time - rospy.Time.now() <= timeout) and (not rospy.is_shutdown()): if self.camera_info is not None: rospy.loginfo("Camera info found!") return self.camera_info rospy.sleep(.2) - + rospy.logerr("Camera info not found.") return None From 6aa861308573610c02c56a44602c3b7b8920f72a Mon Sep 17 00:00:00 2001 From: Matthew Langford Date: Sun, 24 Apr 2016 21:01:19 -0400 Subject: [PATCH 038/267] UTILS: Remove height() orientation helper function. --- utils/uf_common/uf_common/orientation_helpers.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/utils/uf_common/uf_common/orientation_helpers.py b/utils/uf_common/uf_common/orientation_helpers.py index dcd74b4f..a0ae4b97 100644 --- a/utils/uf_common/uf_common/orientation_helpers.py +++ b/utils/uf_common/uf_common/orientation_helpers.py @@ -159,11 +159,12 @@ def body_up_vector(self): def set_position(self, position): return type(self)(self.frame_id, position, self.orientation) - def height(self, height): - '''TODO: - Make this distance from bottom - ''' - return self.set_position([self.position[0], self.position[1], height]) + # Removed since the Sub can't go out of the water + # def height(self, height): + # '''TODO: + # Make this distance from bottom + # ''' + # return self.set_position([self.position[0], self.position[1], height]) def depth(self, depth): return self.set_position([self.position[0], self.position[1], -depth]) From 0dff4d5a8154d5cc85e2eccf72f73b3727e21cd2 Mon Sep 17 00:00:00 2001 From: Matthew Langford Date: Tue, 26 Apr 2016 04:25:31 -0400 Subject: [PATCH 039/267] SIM: Add torpedo framework. --- utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py index f9325963..c28b952d 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py @@ -98,6 +98,12 @@ def numpy_to_quaternion(np_quaternion): return geometry_msgs.Quaternion(*np_quaternion) +def numpy_to_twist(linear_vel, angular_vel): + '''TODO: Unit-test + ''' + return geometry_msgs.Twist(linear=geometry_msgs.Vector3(*linear_vel), angular=geometry_msgs.Vector3(*angular_vel)) + + def numpy_matrix_to_quaternion(np_matrix): '''Given a 3x3 rotation matrix, return its geometry_msgs Quaternion''' assert np_matrix.shape == (3, 3), "Must submit 3x3 rotation matrix" From 3edb1dd8e617d481a675ed9721188dd9ad9b8b2d Mon Sep 17 00:00:00 2001 From: Matthew Langford Date: Wed, 27 Apr 2016 03:29:58 -0400 Subject: [PATCH 040/267] UTILS: Swap order of input parameters to match ros styles better. --- utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py index c28b952d..29fca81f 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py @@ -113,14 +113,14 @@ def numpy_matrix_to_quaternion(np_matrix): return geometry_msgs.Quaternion(*np_quaternion) -def numpy_pair_to_pose(np_rotation_matrix, np_translation): +def numpy_pair_to_pose(np_translation, np_rotation_matrix): '''Convert a R, t pair to a geometry_msgs Pose''' orientation = numpy_matrix_to_quaternion(np_rotation_matrix) position = numpy_to_point(np_translation) return geometry_msgs.Pose(position=position, orientation=orientation) -def numpy_quat_pair_to_pose(np_quaternion, np_translation): +def numpy_quat_pair_to_pose(np_translation, np_quaternion): orientation = numpy_to_quaternion(np_quaternion) position = numpy_to_point(np_translation) return geometry_msgs.Pose(position=position, orientation=orientation) From 6bc18165ad479b37272e40941b13d77bcebba6a0 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Wed, 27 Apr 2016 00:03:41 -0400 Subject: [PATCH 041/267] MISC: Move orientation_helpers to pose_editor.py --- .../uf_common/orientation_helpers.py | 368 ------------------ 1 file changed, 368 deletions(-) delete mode 100644 utils/uf_common/uf_common/orientation_helpers.py diff --git a/utils/uf_common/uf_common/orientation_helpers.py b/utils/uf_common/uf_common/orientation_helpers.py deleted file mode 100644 index a0ae4b97..00000000 --- a/utils/uf_common/uf_common/orientation_helpers.py +++ /dev/null @@ -1,368 +0,0 @@ -from __future__ import division -import warnings - -import numpy as np -import rospy -from tf import transformations -from nav_msgs.msg import Odometry -from uf_common.msg import PoseTwistStamped, PoseTwist, MoveToGoal -from std_msgs.msg import Header -from geometry_msgs.msg import Pose as Pose, Quaternion, Point, Vector3, Twist -from sub8_ros_tools import rosmsg_to_numpy - - -UP = np.array([0.0, 0.0, 1.0], np.float64) -EAST, NORTH, WEST, SOUTH = [transformations.quaternion_about_axis(np.pi / 2 * i, UP) for i in xrange(4)] - - -def normalized(x): - x = np.array(x) - if max(map(abs, x)) == 0: - warnings.warn('Normalizing zero-length vector to random unit vector') - x = np.random.standard_normal(x.shape) - x = x / max(map(abs, x)) - x = x / np.linalg.norm(x) - return x - - -def get_perpendicular(a, b=None): - a = np.array(a) - if max(map(abs, a)) == 0: - if b is not None: - return get_perpendicular(b) - return normalized(np.random.standard_normal(a.shape)) - - if b is None: - b = np.random.standard_normal(a.shape) - b = np.array(b) - x = np.cross(a, b) - if max(map(abs, x)) == 0: - return get_perpendicular(a) - return normalized(x) - - -def quat_to_rotvec(q): - if q[3] < 0: - q = -q - q = transformations.unit_vector(q) - angle = np.arccos(q[3]) * 2 - axis = normalized(q[0:3]) - return axis * angle - - -def rotvec_to_quat(rotvec): - return transformations.quaternion_about_axis(np.linalg.norm(rotvec), rotvec) - - -def triad((a1, a2), (b1, b2)): - # returns quaternion that rotates b1 to a1 and b2 near a2 - # can get orientation by passing in (global, local) - aa = get_perpendicular(a1, a2) - A = np.array([normalized(a1), aa, normalized(np.cross(a1, aa))]) - bb = get_perpendicular(b1, b2) - B = np.array([normalized(b1), bb, normalized(np.cross(b1, bb))]) - rot = A.T.dot(B) - return transformations.quaternion_from_matrix( - [(a, b, c, 0) for a, b, c in rot] + - [(0, 0, 0, 1)]) - - -def test_triad(): - q = transformations.random_quaternion() - - a = np.random.standard_normal(3) - b = np.random.standard_normal(3) - - m = transformations.quaternion_matrix(q)[:3, :3] - q_ = triad((m.dot(a), m.dot(b)), (a, b)) - - assert np.linalg.norm(quat_to_rotvec( - transformations.quaternion_multiply( - q, - transformations.quaternion_inverse(q_), - ) - )) < 1e-6 - - -def look_at(forward, upish=UP): - # assumes standard forward-left-up body coordinate system - return triad((forward, upish), ([1, 0, 0], UP)) - - -def look_at_without_pitching(forwardish, up=UP): - # assumes standard forward-left-up body coordinate system - return triad((up, forwardish), (UP, [1, 0, 0])) - - -def look_at_camera(forward, upish=UP): - # assumes camera right-down-forward coordinate system - return triad((forward, upish), (UP, [0, -1, 0])) - - -def safe_wait_for_message(topic, topic_type): - while True: - try: - return rospy.wait_for_message(topic, topic_type, .5) - except rospy.exceptions.ROSException, e: - if 'timeout' not in e.message: - raise - print topic, 'wait_for_message timed out!' - - -class PoseEditor(object): - @classmethod - def from_Odometry_topic(cls, topic='/odom'): - return cls.from_Odometry(safe_wait_for_message(topic, Odometry)) - - @classmethod - def from_Odometry(cls, msg): - return cls.from_Pose(msg.header.frame_id, msg.pose.pose) - - @classmethod - def from_PoseTwistStamped_topic(cls, topic): - return cls.from_PoseTwistStamped(safe_wait_for_message(topic, PoseTwistStamped)) - - @classmethod - def from_PoseTwistStamped(cls, msg): - return cls.from_Pose(msg.header.frame_id, msg.posetwist.pose) - - @classmethod - def from_Pose(cls, frame_id, msg): - return cls(frame_id, rosmsg_to_numpy(msg.position), rosmsg_to_numpy(msg.orientation)) - - def __init__(self, frame_id, position, orientation): - self.frame_id = frame_id - self.position = position - self.orientation = orientation - - self.frame_id = frame_id - self.position = position - self.orientation = orientation - - @property - def _rot(self): - return transformations.quaternion_matrix(self.orientation)[:3, :3] - - @property - def forward_vector(self): - return self._rot.dot([1, 0, 0]) - - @property - def left_vector(self): - return self._rot.dot([0, 1, 0]) - - @property - def body_up_vector(self): - return self._rot.dot(UP) - - # Position - def set_position(self, position): - return type(self)(self.frame_id, position, self.orientation) - - # Removed since the Sub can't go out of the water - # def height(self, height): - # '''TODO: - # Make this distance from bottom - # ''' - # return self.set_position([self.position[0], self.position[1], height]) - - def depth(self, depth): - return self.set_position([self.position[0], self.position[1], -depth]) - - def relative(self, rel_pos): - return self.set_position(self.position + self._rot.dot(rel_pos)) - - def strafe_relative(self, rel_pos_2d): - rel_pos_3d = np.append(rel_pos_2d, 0.0) - return self.set_position(self.position + self._rot.dot(rel_pos_3d)) - - def forward(self, distance): - return self.relative([+distance, 0, 0]) - - def backward(self, distance): - return self.relative([-distance, 0, 0]) - - def left(self, distance): - return self.relative([0, +distance, 0]) - - def right(self, distance): - return self.relative([0, -distance, 0]) - - def body_up(self, distance): - return self.relative([0, 0, +distance]) - - def body_down(self, distance): - return self.relative([0, 0, -distance]) - - def absolute(self, abs_pos): - return type(self)(self.frame_id, self.position + abs_pos, self.orientation) - - def east(self, distance): - return self.absolute([+distance, 0, 0]) - - def west(self, distance): - return self.absolute([-distance, 0, 0]) - - def north(self, distance): - return self.absolute([0, +distance, 0]) - - def south(self, distance): - return self.absolute([0, -distance, 0]) - - def up(self, distance): - return self.absolute([0, 0, +distance]) - - def down(self, distance): - return self.absolute([0, 0, -distance]) - - # Orientation - def set_orientation(self, orientation): - return type(self)(self.frame_id, self.position, orientation) - - def look_at_rel(self, rel_point): - return self.set_orientation(look_at(rel_point)) - - def look_at(self, point): - return self.look_at_rel(point - self.position) - - def look_at_rel_without_pitching(self, rel_point): - return self.set_orientation(look_at_without_pitching(rel_point)) - - def look_at_without_pitching(self, point): - return self.look_at_rel_without_pitching(point - self.position) - - def point_vec_towards(self, body_vec, towards_point): - return self.point_vec_towards_rel(body_vec, towards_point - self.pos) - - def point_vec_towards_rel(self, body_vec, towards_rel_point): - return self.set_orientation(triad((towards_rel_point, UP), (body_vec, UP))) - - def turn_vec_towards(self, body_vec, towards_point): - return self.turn_vec_towards_rel(body_vec, towards_point - self.pos) - - def turn_vec_towards_rel(self, body_vec, towards_rel_point): - return self.set_orientation(triad((UP, towards_rel_point), (UP, body_vec))) - - def yaw_left(self, angle): - return self.set_orientation(transformations.quaternion_multiply( - transformations.quaternion_about_axis(angle, UP), - self.orientation, - )) - - def yaw_right(self, angle): - return self.yaw_left(-angle) - - def yaw_left_deg(self, angle_degrees): - return self.yaw_left(np.radians(angle_degrees)) - - def yaw_right_deg(self, angle_degrees): - return self.yaw_right(np.radians(angle_degrees)) - - turn_left = yaw_left - turn_right = yaw_right - turn_left_deg = yaw_left_deg - turn_right_deg = yaw_right_deg - - def heading(self, heading): - return self.set_orientation( - transformations.quaternion_about_axis(heading, UP) - ) - - def heading_deg(self, heading_deg): - return self.heading(np.radians(heading_deg)) - - def roll_right(self, angle): - return self.set_orientation(transformations.quaternion_multiply( - self.orientation, - transformations.quaternion_about_axis(angle, [1, 0, 0]), - )) - - def roll_left(self, angle): - return self.roll_right(-angle) - - def roll_right_deg(self, angle_degrees): - return self.roll_right(np.radians(angle_degrees)) - - def roll_left_deg(self, angle_degrees): - return self.roll_left(np.radians(angle_degrees)) - - def zero_roll(self): - return self.set_orientation(look_at(self.forward_vector)) - - def pitch_down(self, angle): - return self.set_orientation(transformations.quaternion_multiply( - transformations.quaternion_about_axis(angle, self.zero_roll().left_vector), - self.orientation, - )) - - def pitch_up(self, angle): - return self.pitch_down(-angle) - - def pitch_down_deg(self, angle_degrees): - return self.pitch_down(np.radians(angle_degrees)) - - def pitch_up_deg(self, angle_degrees): - return self.pitch_up(np.radians(angle_degrees)) - - def zero_roll_and_pitch(self): - return self.set_orientation(look_at_without_pitching(self.forward_vector)) - - def as_Pose(self): - return Pose( - position=Point(*self.position), - orientation=Quaternion(*self.orientation), - ) - - def as_PoseTwist(self, linear=[0, 0, 0], angular=[0, 0, 0]): - return PoseTwist( - pose=self.as_Pose(), - twist=Twist( - linear=Vector3(*linear), - angular=Vector3(*angular), - ), - ) - - def as_PoseTwistStamped(self, linear=[0, 0, 0], angular=[0, 0, 0]): - return PoseTwistStamped( - header=Header( - frame_id=self.frame_id, - ), - posetwist=self.as_PoseTwist(linear, angular), - ) - - def as_MoveToGoal(self, linear=[0, 0, 0], angular=[0, 0, 0], **kwargs): - return MoveToGoal( - header=Header( - frame_id=self.frame_id, - ), - posetwist=self.as_PoseTwist(linear, angular), - **kwargs - ) - - # allow implicit usage in place of a PoseTwistStamped - @property - def header(self): - return Header( - frame_id=self.frame_id, - ) - - @property - def posetwist(self): - return self.as_PoseTwist() - # and in place of a MoveToGoal - - @property - def speed(self): - return 0 - - @property - def uncoordinated(self): - return False - - @property - def linear_tolerance(self): - return 0 - - @property - def angular_tolerance(self): - return 0 From 45acfb49c16e2f0c078c1753320c5622c7daad43 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Wed, 27 Apr 2016 00:06:05 -0400 Subject: [PATCH 042/267] UTILS: Add cache decorator --- .../sub8_ros_tools/sub8_ros_tools/__init__.py | 1 + .../sub8_ros_tools/func_helpers.py | 38 +++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 utils/sub8_ros_tools/sub8_ros_tools/func_helpers.py diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index 1a349905..76906122 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -4,6 +4,7 @@ import msg_helpers import threading_helpers import geometry_helpers +import func_helpers # TODO: Adjust all existing code to not require these to be top-level imports from init_helpers import wait_for_param, wait_for_subscriber diff --git a/utils/sub8_ros_tools/sub8_ros_tools/func_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/func_helpers.py new file mode 100644 index 00000000..8b2fc4c1 --- /dev/null +++ b/utils/sub8_ros_tools/sub8_ros_tools/func_helpers.py @@ -0,0 +1,38 @@ +class Cache(object): + """No support for **kwargs**""" + def __init__(self, func): + self.call_dict = {} + self.func = func + + def __call__(self, *args): + if args not in self.call_dict.keys(): + result = self.func(*args) + self.call_dict[args] = result + else: + result = self.call_dict[args] + return result + + +@Cache +def add(a, b): + print 'adding', a, b + return a + b + + +@Cache +def gooberstein(data): + print "Being called" + return data + "balls" + + +if __name__ == '__main__': + + print add(1, 2) + print add(1, 2) + + print add(2, 3) + print add(2, 3) + + print gooberstein("hello") + print gooberstein("hello") + print gooberstein("hello") From d720d85c870aedff162a701edd4905cc52fec9c0 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Wed, 27 Apr 2016 00:25:14 -0400 Subject: [PATCH 043/267] IMAGE_SUB: Fix rare out-of-order issue --- utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py index 87715209..969d3f9e 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py @@ -47,9 +47,9 @@ def get_image_msg(ros_image, encoding='bgr8'): class Image_Publisher(object): def __init__(self, topic, encoding="bgr8", queue_size=1): '''Create an essentially normal publisher, that will publish images without conversion hassle''' - self.im_pub = rospy.Publisher(topic, Image, queue_size=queue_size) self.bridge = CvBridge() self.encoding = encoding + self.im_pub = rospy.Publisher(topic, Image, queue_size=queue_size) def publish(self, cv_image): try: From 7b2c8c94717f07e3d106cf4f895731a2706a30ff Mon Sep 17 00:00:00 2001 From: Jason Nezvadovitz Date: Sat, 30 Apr 2016 10:34:51 -0400 Subject: [PATCH 044/267] MERGE: Lake Day 4/23/16 (#17) * ROS: cleanup * ROS: clean up, waypoint is a pose not a point * LAKE DAY: new gains, and now has option to use external trajectory generator * NEW NODE: launch c3 trajectory generator from sub * INTRODUCE: node for running neural net controller as alternative to MRAC * MRAC TGEN: modified kinematic limits to match lake tuning * DEFAULTS: make external c3 tgen the default * BUG FIX: reference frame issue when using external tgen, and weird list comprehension * BUG FIX: simulator would start with quaternion [0, 0, 0, 0] --- .../navigator_tools/move_helper.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/move_helper.py b/utils/navigator_tools/navigator_tools/move_helper.py index 186070cf..eeb07b35 100755 --- a/utils/navigator_tools/navigator_tools/move_helper.py +++ b/utils/navigator_tools/navigator_tools/move_helper.py @@ -5,18 +5,17 @@ import numpy,math,tf,threading from tf import transformations as trns from nav_msgs.msg import Odometry -from geometry_msgs.msg import Point +from geometry_msgs.msg import PoseStamped, Point from navigator_tools import geometry_helper as gh rospy.init_node('move_helper') class move_helper(object): - # Base class for whatever you are writing + def __init__(self): self.odom = Odometry() - self.pose_pub = rospy.Publisher("/set_desired_pose", Point, queue_size = 1) - + self.pose_pub = rospy.Publisher("/waypoint", PoseStamped, queue_size = 0) rospy.Subscriber("/odom", Odometry, self.odom_cb) rospy.Subscriber("/move_helper", Point, self.move_cb) rospy.spin() @@ -26,7 +25,8 @@ def odom_cb(self, msg): def move_cb(self, msg): - to_send = Point() + to_send = PoseStamped() + to_send.header.frame_id = "/enu" R = trns.quaternion_matrix([self.odom.pose.pose.orientation.x, self.odom.pose.pose.orientation.y, self.odom.pose.pose.orientation.z, self.odom.pose.pose.orientation.w])[:2, :2] theta = gh.quat_to_euler(self.odom.pose.pose.orientation) @@ -34,15 +34,16 @@ def move_cb(self, msg): current = numpy.array([self.odom.pose.pose.position.x, self.odom.pose.pose.position.y, numpy.rad2deg(theta[2])]) shift = numpy.concatenate((R.dot([msg.x, msg.y]), [msg.z])) desired = current + shift + desired_quaternion = trns.quaternion_from_euler(0, 0, numpy.deg2rad(desired[2])) - to_send.x = desired[0] - to_send.y = desired[1] - to_send.z = desired[2] + to_send.pose.position.x = desired[0] + to_send.pose.position.y = desired[1] + to_send.pose.orientation.x, to_send.pose.orientation.y, to_send.pose.orientation.z, to_send.pose.orientation.w = desired_quaternion self.pose_pub.publish(to_send) if __name__ == "__main__": - # + helper = move_helper() From 0a4f443a4a0870c19a75b0b5a53c58751b6d56d9 Mon Sep 17 00:00:00 2001 From: Matthew Langford Date: Sat, 30 Apr 2016 14:50:35 -0400 Subject: [PATCH 045/267] UTILS: Fix numpy_pair_to_pose ordering issue. --- utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py index c28b952d..29fca81f 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py @@ -113,14 +113,14 @@ def numpy_matrix_to_quaternion(np_matrix): return geometry_msgs.Quaternion(*np_quaternion) -def numpy_pair_to_pose(np_rotation_matrix, np_translation): +def numpy_pair_to_pose(np_translation, np_rotation_matrix): '''Convert a R, t pair to a geometry_msgs Pose''' orientation = numpy_matrix_to_quaternion(np_rotation_matrix) position = numpy_to_point(np_translation) return geometry_msgs.Pose(position=position, orientation=orientation) -def numpy_quat_pair_to_pose(np_quaternion, np_translation): +def numpy_quat_pair_to_pose(np_translation, np_quaternion): orientation = numpy_to_quaternion(np_quaternion) position = numpy_to_point(np_translation) return geometry_msgs.Pose(position=position, orientation=orientation) From f7b33d9b7747a95bf12f557c116e34171f74a7e7 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Sun, 1 May 2016 04:31:00 -0400 Subject: [PATCH 046/267] UTILS: Add last_image_time to Image_Subscriber --- utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py index 969d3f9e..6ad3a18d 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py @@ -66,8 +66,13 @@ def __init__(self, topic, callback=None, encoding="bgr8", queue_size=1): Assumes topic of type "sensor_msgs/Image" This behaves like a conventional subscriber, except handling the additional image conversion ''' + if callback is None: + callback = lambda im: setattr(self, 'last_image', im) + self.encoding = encoding self.camera_info = None + self.last_image_time = None + self.last_image = None self.im_sub = rospy.Subscriber(topic, Image, self.convert, queue_size=queue_size) root_topic, image_subtopic = path.split(topic) @@ -100,6 +105,7 @@ def info_cb(self, msg): self.camera_info = msg def convert(self, data): + self.last_image_time = data.header.stamp try: image = self.bridge.imgmsg_to_cv2(data, desired_encoding=self.encoding) self.callback(image) From 274a4d44bcfeb06f19ad78a26117b40414e24b07 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Sun, 1 May 2016 08:18:01 -0400 Subject: [PATCH 047/267] PEP8: Go on warpath for pep8ing --- .../sub8_misc_tools/__init__.py | 3 ++- .../sub8_misc_tools/download.py | 2 +- .../sub8_ros_tools/geometry_helpers.py | 6 +++--- .../sub8_ros_tools/init_helpers.py | 5 +---- .../sub8_ros_tools/threading_helpers.py | 2 +- .../test_ros_tools/test_ros_tools.py | 21 +++++++++++-------- 6 files changed, 20 insertions(+), 19 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_misc_tools/__init__.py b/utils/sub8_ros_tools/sub8_misc_tools/__init__.py index d501d40f..e518ed59 100644 --- a/utils/sub8_ros_tools/sub8_misc_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_misc_tools/__init__.py @@ -1,2 +1,3 @@ +# flake8: noqa from download import download_and_unzip -from download import download \ No newline at end of file +from download import download diff --git a/utils/sub8_ros_tools/sub8_misc_tools/download.py b/utils/sub8_ros_tools/sub8_misc_tools/download.py index 2e262d6a..988d9567 100644 --- a/utils/sub8_ros_tools/sub8_misc_tools/download.py +++ b/utils/sub8_ros_tools/sub8_misc_tools/download.py @@ -54,4 +54,4 @@ def download(url, output_filename=None): if __name__ == '__main__': sub_model_url = "http://goo.gl/f0ennf?gdriveurl" - download_and_unzip(sub_model_url, '.') \ No newline at end of file + download_and_unzip(sub_model_url, '.') diff --git a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py index 20ef99ac..1f533ce5 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py @@ -52,9 +52,9 @@ def skew_symmetric_cross(a): ''' assert len(a) == 3, "a must be in R3" skew_symm = np.array([ - [0., -a[2], +a[1]], # noqa - [+a[2], 0., -a[0]], # noqa - [-a[1], +a[0], 0.], # noqa + [+0.00, -a[2], +a[1]], + [+a[2], +0.00, -a[0]], + [-a[1], +a[0], +0.00], ], dtype=np.float32) return skew_symm diff --git a/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py index c2af89b7..75441670 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py @@ -34,9 +34,6 @@ def wait_for_subscriber(node_name, topic, timeout=5.0): ''' end_time = time.time() + timeout - resolved_topic = rospy.resolve_name(topic) - resolved_node = rospy.resolve_name(node_name) - # Wait for time-out or ros-shutdown while (time.time() < end_time) and (not rospy.is_shutdown()): subscribed = rostest.is_subscriber( @@ -54,4 +51,4 @@ def wait_for_subscriber(node_name, topic, timeout=5.0): rospy.resolve_name(topic), rospy.resolve_name(node_name) ) - return success \ No newline at end of file + return success diff --git a/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py index 5d51e760..255298d4 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py @@ -12,7 +12,7 @@ def my_function(a, b, c): ''' def lock_thread(function_to_lock): - '''thread_lock(function) -> locked function + '''thread_lock(function) -> locked function Thread locking decorator If you use this as a decorator for a function, it will apply a threading lock during the execution of that function, Which guarantees that no ROS callbacks can change the state of data while it is executing. This diff --git a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py index 05ce0b87..6ca2f41b 100644 --- a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py +++ b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py @@ -48,8 +48,10 @@ def test_make_wrench_stamped(self): torque = np.random.random(3) * 10 wrench_msg = make_wrench_stamped(force, torque, frame='/enu') - msg_force = rosmsg_to_numpy(wrench_msg.wrench.force) - msg_torque = rosmsg_to_numpy(wrench_msg.wrench.torque) + msg_force = rosmsg_to_numpy(wrench_msg.wrench.force) # noqa + msg_torque = rosmsg_to_numpy(wrench_msg.wrench.torque) # noqa + self.assertIsNotNone(msg_force) + self.assertIsnotNone(msg_torque) def test_make_image_msg(self): '''Test that make ros image message doesn't fail''' @@ -67,8 +69,6 @@ def test_get_image_msg(self): def test_thread_lock(self): '''Test that the thread lock decorator correctly locks, in the correct order''' - ran = False - class FakeLock(object): def __init__(self): self.entry = False @@ -81,6 +81,7 @@ def __exit__(self, *args): self.exit = True fake_lock = FakeLock() + @thread_lock(fake_lock) def lock_test(a): return (fake_lock.entry is True) and (fake_lock.exit is False) @@ -114,10 +115,12 @@ def test_make_rotation(self): p_rotated = R.dot(p) # Test that the matrix actually aligns p with q - np.testing.assert_allclose([0.0, 0.0, 0.0], np.cross(p_rotated, q), atol=1e-5, - err_msg="The generated rotation matrix did not align the input vectors, {} to {}".format( - p, q) - ) + np.testing.assert_allclose( + [0.0, 0.0, 0.0], np.cross(p_rotated, q), atol=1e-5, + err_msg="The generated rotation matrix did not align the input vectors, {} to {}".format( + p, q + ) + ) self.assertGreater(np.dot(p_rotated, q), 0.0, msg="The rotation did wacky inversion") def test_normalize_vector(self): @@ -136,4 +139,4 @@ def test_normalize_vector(self): if __name__ == '__main__': suite = unittest.TestLoader().loadTestsFromTestCase(TestROSTools) - unittest.TextTestRunner(verbosity=2).run(suite) \ No newline at end of file + unittest.TextTestRunner(verbosity=2).run(suite) From f8991e37939f0e6a784cdde938b31bb1099077ef Mon Sep 17 00:00:00 2001 From: Matthew Langford Date: Sun, 1 May 2016 08:52:37 -0400 Subject: [PATCH 048/267] SIM: Address #121 comments. --- .../sub8_ros_tools/geometry_helpers.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py index 1f533ce5..56b31b0b 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py @@ -1,5 +1,20 @@ from __future__ import division import numpy as np +import tf + + +def rotate_vect_by_quat(v, q): + ''' + Rotate a vector by a quaterion. + v' = q*vq + + q should be [x,y,z,w] (standard ros convention) + ''' + cq = np.array([-q[0], -q[1], -q[2], q[3]]) + cq_v = tf.transformations.quaternion_multiply(cq, v) + v = tf.transformations.quaternion_multiply(cq_v, q) + v[1:] *= -1 # Only seemed to work when I flipped this around, is there a problem with the math here? + return np.array(v)[:3] def make_rotation(vector_a, vector_b): From 22a5fb3a86110c5f7ab948142b9edf1840755777 Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Mon, 2 May 2016 05:50:45 -0400 Subject: [PATCH 049/267] UTILS: Add super simple quaternion_matrix helper --- utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py index 56b31b0b..06c4ff0f 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py @@ -1,5 +1,6 @@ from __future__ import division import numpy as np +from tf import transformations import tf @@ -112,3 +113,8 @@ def clip_norm(vector, lower_bound, upper_bound): else: v_new = (vector * upper_bound) / norm return v_new + + +def quaternion_matrix(q): + mat_h = transformations.quaternion_matrix(q) + return mat_h[:3, :3] / mat_h[3, 3] From 90a58853e59407edc47f12265d49830f89ac30cf Mon Sep 17 00:00:00 2001 From: Matthew Langford Date: Tue, 24 May 2016 23:35:34 -0400 Subject: [PATCH 050/267] VISION: Fix timeout that didn't work. --- utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py index 6ad3a18d..c7b2d93b 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py @@ -90,14 +90,14 @@ def wait_for_camera_info(self, timeout=10): timeout = rospy.Duration(timeout) start_time = rospy.Time.now() - while (start_time - rospy.Time.now() <= timeout) and (not rospy.is_shutdown()): + while (rospy.Time.now() - start_time < timeout) and (not rospy.is_shutdown()): if self.camera_info is not None: rospy.loginfo("Camera info found!") return self.camera_info rospy.sleep(.2) rospy.logerr("Camera info not found.") - return None + raise Exception("Camera info not found.") def info_cb(self, msg): """The path trick here is a hack""" From b930f1aed15c9ac4840c2ab66b4a755f530a7036 Mon Sep 17 00:00:00 2001 From: Santiago Date: Fri, 3 Jun 2016 20:02:52 -0400 Subject: [PATCH 051/267] SONAR: add pinger code location code, change interface to driver --- drivers/sub8_sonar/CMakeLists.txt | 16 ++ drivers/sub8_sonar/README.md | 17 +++ drivers/sub8_sonar/nodes/sonar_driver.py | 184 +++++++++++++++++++++++ drivers/sub8_sonar/package.xml | 18 +++ 4 files changed, 235 insertions(+) create mode 100644 drivers/sub8_sonar/CMakeLists.txt create mode 100644 drivers/sub8_sonar/README.md create mode 100755 drivers/sub8_sonar/nodes/sonar_driver.py create mode 100644 drivers/sub8_sonar/package.xml diff --git a/drivers/sub8_sonar/CMakeLists.txt b/drivers/sub8_sonar/CMakeLists.txt new file mode 100644 index 00000000..0a063ede --- /dev/null +++ b/drivers/sub8_sonar/CMakeLists.txt @@ -0,0 +1,16 @@ +cmake_minimum_required(VERSION 2.8.3) +project(sub8_sonar) + +find_package(catkin REQUIRED COMPONENTS + rospy +) +catkin_package( +# INCLUDE_DIRS include +# LIBRARIES sub8_sonar +# CATKIN_DEPENDS rospy +# DEPENDS system_lib +) + +include_directories( + ${catkin_INCLUDE_DIRS} +) diff --git a/drivers/sub8_sonar/README.md b/drivers/sub8_sonar/README.md new file mode 100644 index 00000000..31b6b67f --- /dev/null +++ b/drivers/sub8_sonar/README.md @@ -0,0 +1,17 @@ +# Sonar Driver + +This script interfaces with Jake's sonar board and provides a ROS service that returns the location and time of emission of the last heard pinger pulse. + +### How to run and use +To start the driver, run: + + roslaunch sub8_sonar sonar.launch + +*Note: In /launch/sonar.launch, make sure that port and baud are set correctly, and that the hydrophones' coordinates in the sonar frame are accurate.* + +In order to ask for hydrophone information: + + rosservice call /sonar/get_pinger_pulse + +*Note: You may have to press tab twice at the end to get a response, since I haven't been able to test it.* +The service should respond with the x, y, z, and t of the emission of the last heard pinger pulse. diff --git a/drivers/sub8_sonar/nodes/sonar_driver.py b/drivers/sub8_sonar/nodes/sonar_driver.py new file mode 100755 index 00000000..59fb50a8 --- /dev/null +++ b/drivers/sub8_sonar/nodes/sonar_driver.py @@ -0,0 +1,184 @@ +#!/usr/bin/env python +import rospy +import rosparam + +import numpy as np + +from sub8_msgs.srv import Sonar, SonarResponse +from sub8_ros_tools import thread_lock, make_header +from sub8_alarm import AlarmBroadcaster + +import threading +import serial +import binascii +import struct +import time +import crc16 + +lock = threading.Lock() +HEADER = 0xAA + + +class Sub8Sonar(): + ''' + Smart sensor that provides high level ROS code with the location of pinger pulses detected with + Jake Easterling's Hydrophone board. + * Adapted from the actuator driver. + + TODO: Add a function to try and reconnect to the serial port if we lose connection. + TODO: Express pulse location in map frame + ''' + def __init__(self, hydrophone_locations, port, baud=9600): + rospy.init_node("sonar") + + alarm_broadcaster = AlarmBroadcaster() + self.disconnection_alarm = alarm_broadcaster.add_alarm( + name='sonar_disconnect', + action_required=True, + severity=0 + ) + self.packet_error_alarm = alarm_broadcaster.add_alarm( + name='sonar_packet_error', + action_required=False, + severity=2 + ) + self.locate_pulse_error_alarm = alarm_broadcaster.add_alarm( + name='sonar_pulse_locating_error', + action_required=False, + severity=2 + ) + + self.ser = serial.Serial(port=port, baudrate=baud, timeout=None) + self.ser.flushInput() + + self.hydrophone_locations = hydrophone_locations + self.sonar_sensor = EchoLocator(hydrophone_locations, wave_propagation_speed=1484) # speed in m/s + + rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) + + rospy.spin() + + def listener(self): + ''' + Parse the response of the board. + ''' + print "Listening..." + + # We've found a packet now disect it. + response = self.ser.read(19) + rospy.loginfo("Received: %s" % response) + if self.check_CRC(response): + print "Found!" + # The checksum matches the data so split the response into each piece of data. + # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters + data = struct.unpack(' + + sub8_sonar + 1.0.0 + The sub8_sonar package + + Matthew Langford + David Soto + + MIT + + catkin + rospy + rospy + + + + From c55a351213ff5dbc12969a6b054ae271cf9a6c16 Mon Sep 17 00:00:00 2001 From: Santiago Date: Mon, 6 Jun 2016 01:09:31 -0400 Subject: [PATCH 052/267] SONAR: fix echolocation algorithm, now working with simulated pulses in most cases, add function to test on data from files --- drivers/sub8_sonar/README.md | 3 +- drivers/sub8_sonar/nodes/sonar_driver.py | 94 +++++++++++++++++------- 2 files changed, 70 insertions(+), 27 deletions(-) diff --git a/drivers/sub8_sonar/README.md b/drivers/sub8_sonar/README.md index 31b6b67f..7e808471 100644 --- a/drivers/sub8_sonar/README.md +++ b/drivers/sub8_sonar/README.md @@ -11,7 +11,6 @@ To start the driver, run: In order to ask for hydrophone information: - rosservice call /sonar/get_pinger_pulse + rosservice call /sonar/get_pinger_pulse *double_tab* -*Note: You may have to press tab twice at the end to get a response, since I haven't been able to test it.* The service should respond with the x, y, z, and t of the emission of the last heard pinger pulse. diff --git a/drivers/sub8_sonar/nodes/sonar_driver.py b/drivers/sub8_sonar/nodes/sonar_driver.py index 59fb50a8..8ae49e54 100755 --- a/drivers/sub8_sonar/nodes/sonar_driver.py +++ b/drivers/sub8_sonar/nodes/sonar_driver.py @@ -3,6 +3,7 @@ import rosparam import numpy as np +from scipy import optimize from sub8_msgs.srv import Sonar, SonarResponse from sub8_ros_tools import thread_lock, make_header @@ -42,14 +43,13 @@ def __init__(self, hydrophone_locations, port, baud=9600): action_required=False, severity=2 ) - self.locate_pulse_error_alarm = alarm_broadcaster.add_alarm( - name='sonar_pulse_locating_error', - action_required=False, - severity=2 - ) - self.ser = serial.Serial(port=port, baudrate=baud, timeout=None) - self.ser.flushInput() + try: + self.ser = serial.Serial(port=port, baudrate=baud, timeout=None) + self.ser.flushInput() + except Exception, e: + print "\x1b[31mSonar serial connection error:\n\t", e, "\x1b[0m" + return None self.hydrophone_locations = hydrophone_locations self.sonar_sensor = EchoLocator(hydrophone_locations, wave_propagation_speed=1484) # speed in m/s @@ -71,9 +71,9 @@ def listener(self): print "Found!" # The checksum matches the data so split the response into each piece of data. # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters - data = struct.unpack('BffffH', response) - return np.array([data[1], data[2], data[3], data[4]]) + return np.array([data[4], data[1], data[2], data[3] ]) else: self.packet_error_alarm.raise_alarm( problem_description="Sonar board checksum error.", @@ -88,17 +88,17 @@ def request_data(self, srv): ''' A polling packet consists of only a header and checksum (CRC-16): HEADER CHECKSUM - [ 0xAA | 0x50 | 0xF5 ] + [ 0x41 | ] ''' self.ser.flushInput() - message = struct.pack('B', HEADER) + message = struct.pack('B', 0x41) message += self.CRC(message) rospy.loginfo("Writing: %s" % binascii.hexlify(message)) try: - self.ser.write(message) + self.ser.write('A') print "Sent!" except: # Except only serial errors in the future. self.disconnection_alarm.raise_alarm( @@ -112,7 +112,7 @@ def CRC(self, message): # You may have to change the checksum type. # Check the crc16 module online to see how to do that. crc = crc16.crc16xmodem(message, 0xFFFF) - return struct.pack('H', crc) + return struct.pack('>H', crc) def check_CRC(self, message): ''' @@ -124,7 +124,7 @@ def check_CRC(self, message): crc = crc16.crc16xmodem(raw_message, 0xFFFF) # If the two match the message was correct - if crc == struct.unpack('H', msg_checksum)[0]: + if crc == struct.unpack('>H', msg_checksum)[0]: return True else: return False @@ -135,25 +135,44 @@ class EchoLocator(object): the pulse by individual sensors. ''' def __init__(self, hydrophone_locations, wave_propagation_speed): # speed in m/s - self.hydrophone_locations = np.array() + self.hydrophone_locations = np.array([1, 1, 1]) # just for apending, will not be included self.wave_propagation_speed = wave_propagation_speed for key in hydrophone_locations: sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) - self.hydrophone_locations = np.vstack(self.hydrophone_locations, sensor_location) + self.hydrophone_locations = np.vstack((self.hydrophone_locations, sensor_location)) + self.hydrophone_locations = self.hydrophone_locations[1:] + + alarm_broadcaster = AlarmBroadcaster() + self.locate_pulse_error_alarm = alarm_broadcaster.add_alarm( + name='sonar_pulse_locating_error', + action_required=False, + severity=2 + ) def getPulseLocation(self, timestamps): ''' Returns a ros message with the location and time of emission of a pinger pulse. ''' - assert timestamps.size == self.hydrophone_locations.size[0] + assert timestamps.size == self.hydrophone_locations.shape[0] self.timestamps = timestamps - init_guess = np.array([0,0,0,0]) + print self.timestamps + init_guess = np.array([0, 0, 0]) + for idx in range(1,4): + if(timestamps[idx] > 0): + init_guess = init_guess - self.hydrophone_locations[idx] + else: + init_guess = init_guess + self.hydrophone_locations[idx] + print "Init guess:", init_guess opt = {'disp': True} opt_method = 'Nelder-Mead' result = optimize.minimize(self._getCost, init_guess, method=opt_method, options=opt) pulse_location = result.x[:3] if(result.success): - resp_data = HydroListenResponse() + resp_data = SonarResponse() + resp_data.x = result.x[0] + resp_data.y = result.x[1] + resp_data.z = result.x[2] + resp_data.t = -np.sqrt(np.sum(np.square(result.x))) / self.wave_propagation_speed return resp_data else: self.locate_pulse_error_alarm.raise_alarm( @@ -165,20 +184,45 @@ def getPulseLocation(self, timestamps): ) return None + def _getCost(self, potential_pulse): ''' Compares the timestamps that would have been generated by $(potential_pulse) with the actual observed timestamps. Close matches generate low cost values. ''' cost = 0 - for row in xrange(self.hydrophone_locations.shape[0]): - distance = np.sqrt(np.sum(np.square(potential_pulse[:3] - self.hydrophone_locations[row,:]))) + dist_to_ref = np.sqrt(np.sum(np.square(potential_pulse - self.hydrophone_locations[0]))) + ref_time_of_flight = dist_to_ref / self.wave_propagation_speed + for row in xrange(1, self.hydrophone_locations.shape[0]): + distance = np.sqrt(np.sum(np.square(potential_pulse - self.hydrophone_locations[row]))) pulse_time_of_flight = distance / self.wave_propagation_speed - cost = cost + self.timestamps[row] - potential_pulse[3] - pulse_time_of_flight + expected_tstamp = pulse_time_of_flight - ref_time_of_flight + cost = cost + np.square(expected_tstamp - self.timestamps[row]) + # print "Cost:", cost, "Pulse:", potential_pulse return cost +def testFile(filename): + ''' + Runs the trilateration algorithm on timestamps written to a file in the following format: + [ ref_tstamp tstamp1 tstamp2 tstamp3 ] + lines that do not begin with '[' are ignored + ''' + hydrophone_locations = rospy.get_param('~/sonar_driver/hydrophones') #DBG + locator = EchoLocator(hydrophone_locations, 1484) + with open(filename, "r") as data_file: + for line in data_file: + if line[0] == '[': + words =line.split() + timestamps = [] + for word in words: + if word[0] != "[" and word[0] != "]": + timestamps += [eval(word)] + print locator.getPulseLocation(np.array(timestamps)), "\n" + + if __name__ == "__main__": - d = Sub8Sonar(rospy.get_param('~/sonar_driver/port'), - rospy.get_param('~/sonar_driver/baud'), - rospy.get_param('~/sonar_driver/hydrophones')) + # d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), + # rospy.get_param('~/sonar_driver/port'), + # rospy.get_param('~/sonar_driver/baud')) + testFile("/home/santiago/bags/SonarTestData.txt") \ No newline at end of file From f9096765eaf53cc8a142cf4d3194f111cd43df0a Mon Sep 17 00:00:00 2001 From: Jacob Panikulam Date: Fri, 10 Jun 2016 20:06:21 -0400 Subject: [PATCH 053/267] ML: Add actual_wrench publisher - Publish the actual actuated wrench (/wrench is unbounded and generally insane during normal operation) - Add a mission for gathering motion data --- utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py index 29fca81f..2897a31a 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py @@ -90,6 +90,12 @@ def odometry_to_numpy(odom): return pose, twist, pose_covariance, twist_covariance +def wrench_to_numpy(wrench): + force = rosmsg_to_numpy(wrench.force) + torque = rosmsg_to_numpy(wrench.torque) + return force, torque + + def numpy_to_point(np_vector): return geometry_msgs.Point(*np_vector) @@ -104,6 +110,13 @@ def numpy_to_twist(linear_vel, angular_vel): return geometry_msgs.Twist(linear=geometry_msgs.Vector3(*linear_vel), angular=geometry_msgs.Vector3(*angular_vel)) +def numpy_to_wrench(forcetorque): + return geometry_msgs.Wrench( + force=geometry_msgs.Vector3(*forcetorque[:3]), + torque=geometry_msgs.Vector3(*forcetorque[3:]) + ) + + def numpy_matrix_to_quaternion(np_matrix): '''Given a 3x3 rotation matrix, return its geometry_msgs Quaternion''' assert np_matrix.shape == (3, 3), "Must submit 3x3 rotation matrix" @@ -143,7 +156,6 @@ def make_wrench_stamped(force, torque, frame='/body'): '''Make a WrenchStamped message without all the fuss Frame defaults to body ''' - wrench = geometry_msgs.WrenchStamped( header=make_header(frame), wrench=geometry_msgs.Wrench( From d92a9327c8de226a24c0a8f21ff34d3d0eebad8b Mon Sep 17 00:00:00 2001 From: DSsoto Date: Sat, 18 Jun 2016 00:17:25 -0400 Subject: [PATCH 054/267] SONAR: create node to continuously call sonar service --- drivers/sub8_sonar/nodes/realtime_plotter.py | 26 ++++++++++++++++++ drivers/sub8_sonar/nodes/sonar_driver.py | 28 +++++++++++--------- 2 files changed, 41 insertions(+), 13 deletions(-) create mode 100755 drivers/sub8_sonar/nodes/realtime_plotter.py diff --git a/drivers/sub8_sonar/nodes/realtime_plotter.py b/drivers/sub8_sonar/nodes/realtime_plotter.py new file mode 100755 index 00000000..258e48ef --- /dev/null +++ b/drivers/sub8_sonar/nodes/realtime_plotter.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +import rospy +import rosparam + +import numpy as np +from scipy import optimize + +from sub8_msgs.srv import Sonar, SonarResponse + +import matplotlib.pyplot as plt +from mpl_toolkits.mplot3d import Axes3D +fig = plt.figure() +ax = fig.add_subplot(111, projection='3d') + +rospy.wait_for_service('~/sonar/get_pinger_pulse') +sonar = rospy.ServiceProxy('~/sonar/get_pinger_pulse', Sonar) +print sonar, "serv proxy established" +i = 0 +while(True): + i += 1 + print "i:", i + pinger_pose = sonar() + print pinger_pose + ax.scatter(pinger_pose.x, pinger_pose.y, pinger_pose.z) + plt.draw() + plt.pause(0.5) \ No newline at end of file diff --git a/drivers/sub8_sonar/nodes/sonar_driver.py b/drivers/sub8_sonar/nodes/sonar_driver.py index 8ae49e54..0b219cbe 100755 --- a/drivers/sub8_sonar/nodes/sonar_driver.py +++ b/drivers/sub8_sonar/nodes/sonar_driver.py @@ -45,7 +45,7 @@ def __init__(self, hydrophone_locations, port, baud=9600): ) try: - self.ser = serial.Serial(port=port, baudrate=baud, timeout=None) + self.ser = serial.Serial(port=port, baudrate=baud, timeout=.5) self.ser.flushInput() except Exception, e: print "\x1b[31mSonar serial connection error:\n\t", e, "\x1b[0m" @@ -55,7 +55,9 @@ def __init__(self, hydrophone_locations, port, baud=9600): self.sonar_sensor = EchoLocator(hydrophone_locations, wave_propagation_speed=1484) # speed in m/s rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) - + while not rospy.is_shutdown(): + self.request_data(None) + time.sleep(.5) rospy.spin() def listener(self): @@ -67,6 +69,9 @@ def listener(self): # We've found a packet now disect it. response = self.ser.read(19) rospy.loginfo("Received: %s" % response) + + if response == "": + return if self.check_CRC(response): print "Found!" # The checksum matches the data so split the response into each piece of data. @@ -83,7 +88,7 @@ def listener(self): ) return None - @thread_lock(lock) + #@thread_lock(lock) def request_data(self, srv): ''' A polling packet consists of only a header and checksum (CRC-16): @@ -91,11 +96,7 @@ def request_data(self, srv): [ 0x41 | ] ''' self.ser.flushInput() - - message = struct.pack('B', 0x41) - message += self.CRC(message) - - rospy.loginfo("Writing: %s" % binascii.hexlify(message)) + self.ser.flushOutput() try: self.ser.write('A') @@ -106,7 +107,8 @@ def request_data(self, srv): ) return False - return self.sonar_sensor.getPulseLocation(self.listener()) + + return self.listener()#self.sonar_sensor.getPulseLocation(self.listener()) def CRC(self, message): # You may have to change the checksum type. @@ -222,7 +224,7 @@ def testFile(filename): if __name__ == "__main__": - # d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), - # rospy.get_param('~/sonar_driver/port'), - # rospy.get_param('~/sonar_driver/baud')) - testFile("/home/santiago/bags/SonarTestData.txt") \ No newline at end of file + d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), + rospy.get_param('~/sonar_driver/port'), + rospy.get_param('~/sonar_driver/baud')) + # testFile("/home/santiago/bags/SonarTestData.txt") \ No newline at end of file From c2c2852ef3554be7afe560fd35974dbf9940637d Mon Sep 17 00:00:00 2001 From: DSsoto Date: Sun, 19 Jun 2016 02:12:46 -0400 Subject: [PATCH 055/267] SONAR: improve plotter node --- drivers/sub8_sonar/nodes/plotter.py | 31 +++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100755 drivers/sub8_sonar/nodes/plotter.py diff --git a/drivers/sub8_sonar/nodes/plotter.py b/drivers/sub8_sonar/nodes/plotter.py new file mode 100755 index 00000000..45791aef --- /dev/null +++ b/drivers/sub8_sonar/nodes/plotter.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +import rospy +import rosparam + +import numpy as np +from scipy import optimize + +from sub8_msgs.srv import Sonar, SonarResponse + +import matplotlib.pyplot as plt +from mpl_toolkits.mplot3d import Axes3D +fig = plt.figure() +ax = fig.add_subplot(111, projection='3d') + +print "Waiting for sonar service" +rospy.wait_for_service('~/sonar/get_pinger_pulse') +sonar = rospy.ServiceProxy('~/sonar/get_pinger_pulse', Sonar) +print "sonar serv proxy created" +#plt.xlim(-1.0, 1.0) +#plt.ylim(-1.0, 1.0) + +try: + while(True): + pinger_pose = sonar() + print "x:", str(pinger_pose.x).rjust(15), "y:", str(pinger_pose.y).rjust(15), "z:", str(pinger_pose.z).rjust(15) + ax.scatter(pinger_pose.x, pinger_pose.y, pinger_pose.z) + plt.draw() + plt.pause(0.1) +except KeyboardInterrupt: + plt.close('all') + print "\nshutting down plotter" From 3791069a7553615d2b39d45cb056614ca2795e29 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Sun, 19 Jun 2016 02:17:56 -0400 Subject: [PATCH 056/267] SONAR: change algorithm for Echolocation --- drivers/sub8_sonar/nodes/sonar_driver.py | 96 +++++++++++++----------- 1 file changed, 54 insertions(+), 42 deletions(-) diff --git a/drivers/sub8_sonar/nodes/sonar_driver.py b/drivers/sub8_sonar/nodes/sonar_driver.py index 0b219cbe..139800c3 100755 --- a/drivers/sub8_sonar/nodes/sonar_driver.py +++ b/drivers/sub8_sonar/nodes/sonar_driver.py @@ -19,6 +19,9 @@ lock = threading.Lock() HEADER = 0xAA +import sys +CURSOR_UP_ONE = '\x1b[1A' +ERASE_LINE = '\x1b[2K' class Sub8Sonar(): ''' @@ -45,19 +48,16 @@ def __init__(self, hydrophone_locations, port, baud=9600): ) try: - self.ser = serial.Serial(port=port, baudrate=baud, timeout=.5) + self.ser = serial.Serial(port=port, baudrate=baud, timeout=None) self.ser.flushInput() except Exception, e: print "\x1b[31mSonar serial connection error:\n\t", e, "\x1b[0m" return None self.hydrophone_locations = hydrophone_locations - self.sonar_sensor = EchoLocator(hydrophone_locations, wave_propagation_speed=1484) # speed in m/s + self.sonar_sensor = EchoLocator(hydrophone_locations, c=1484) # speed of sound in m/s rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) - while not rospy.is_shutdown(): - self.request_data(None) - time.sleep(.5) rospy.spin() def listener(self): @@ -68,12 +68,9 @@ def listener(self): # We've found a packet now disect it. response = self.ser.read(19) - rospy.loginfo("Received: %s" % response) - - if response == "": - return + # rospy.loginfo("Received: %s" % response) #uncomment for debugging if self.check_CRC(response): - print "Found!" + print "Heard response!\n" # The checksum matches the data so split the response into each piece of data. # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters data = struct.unpack('>BffffH', response) @@ -88,7 +85,7 @@ def listener(self): ) return None - #@thread_lock(lock) + @thread_lock(lock) def request_data(self, srv): ''' A polling packet consists of only a header and checksum (CRC-16): @@ -96,19 +93,21 @@ def request_data(self, srv): [ 0x41 | ] ''' self.ser.flushInput() - self.ser.flushOutput() + + message = struct.pack('B', 0x41) + message += self.CRC(message) + + # rospy.loginfo("Writing: %s" % binascii.hexlify(message)) # uncomment for debugging try: self.ser.write('A') - print "Sent!" + print "Sent request..." except: # Except only serial errors in the future. self.disconnection_alarm.raise_alarm( problem_description="Sonar board serial connection has been terminated." ) return False - - - return self.listener()#self.sonar_sensor.getPulseLocation(self.listener()) + return self.sonar_sensor.getPulseLocation(self.listener()) def CRC(self, message): # You may have to change the checksum type. @@ -135,10 +134,11 @@ class EchoLocator(object): ''' Identifies the origin of a pulse in time and space given stamps of the time of detection of the pulse by individual sensors. + c is the wave propagation speed in the medium of operation ''' - def __init__(self, hydrophone_locations, wave_propagation_speed): # speed in m/s + def __init__(self, hydrophone_locations, c): # speed in m/s self.hydrophone_locations = np.array([1, 1, 1]) # just for apending, will not be included - self.wave_propagation_speed = wave_propagation_speed + self.c = c for key in hydrophone_locations: sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) self.hydrophone_locations = np.vstack((self.hydrophone_locations, sensor_location)) @@ -157,26 +157,27 @@ def getPulseLocation(self, timestamps): ''' assert timestamps.size == self.hydrophone_locations.shape[0] self.timestamps = timestamps + delete_last_lines(4) print self.timestamps init_guess = np.array([0, 0, 0]) - for idx in range(1,4): - if(timestamps[idx] > 0): - init_guess = init_guess - self.hydrophone_locations[idx] - else: - init_guess = init_guess + self.hydrophone_locations[idx] - print "Init guess:", init_guess - opt = {'disp': True} + # for idx in range(1,4): + # if(timestamps[idx] > 0): + # init_guess = init_guess - self.hydrophone_locations[idx] + # else: + # init_guess = init_guess + self.hydrophone_locations[idx] + # print "Init guess:", init_guess + opt = {'disp': 1} opt_method = 'Nelder-Mead' result = optimize.minimize(self._getCost, init_guess, method=opt_method, options=opt) - pulse_location = result.x[:3] + resp_data = SonarResponse() if(result.success): - resp_data = SonarResponse() resp_data.x = result.x[0] resp_data.y = result.x[1] resp_data.z = result.x[2] - resp_data.t = -np.sqrt(np.sum(np.square(result.x))) / self.wave_propagation_speed - return resp_data else: + resp_data.x = 0 + resp_data.y = 0 + resp_data.z = 0 self.locate_pulse_error_alarm.raise_alarm( problem_description=("SciPy optimize, using method '" + opt_method + "', failed to converge on a pinger pulse location."), @@ -184,7 +185,7 @@ def getPulseLocation(self, timestamps): 'fault_info': {'data': result.message} } ) - return None + return resp_data def _getCost(self, potential_pulse): @@ -193,17 +194,23 @@ def _getCost(self, potential_pulse): actual observed timestamps. Close matches generate low cost values. ''' cost = 0 - dist_to_ref = np.sqrt(np.sum(np.square(potential_pulse - self.hydrophone_locations[0]))) - ref_time_of_flight = dist_to_ref / self.wave_propagation_speed - for row in xrange(1, self.hydrophone_locations.shape[0]): - distance = np.sqrt(np.sum(np.square(potential_pulse - self.hydrophone_locations[row]))) - pulse_time_of_flight = distance / self.wave_propagation_speed - expected_tstamp = pulse_time_of_flight - ref_time_of_flight - cost = cost + np.square(expected_tstamp - self.timestamps[row]) - # print "Cost:", cost, "Pulse:", potential_pulse + t = self.timestamps + x0 = self.hydrophone_locations[0,0] + y0 = self.hydrophone_locations[0,1] + z0 = self.hydrophone_locations[0,2] + x = potential_pulse[0] + y = potential_pulse[1] + z = potential_pulse[2] + d0 = np.sqrt((x0 - x)**2 + (y0 - x)**2 + (z0 - x)**2) + print "hydrophone info cols:", self.hydrophone_locations.shape[1] + for i in xrange(1, self.hydrophone_locations.shape[0]): + xi = self.hydrophone_locations[i,0] + yi = self.hydrophone_locations[i,1] + zi = self.hydrophone_locations[i,2] + di = np.sqrt((xi - x)**2 + (yi - x)**2 + (zi - x)**2) + cost = cost + (di - d0 - self.c * t[i])**2 return cost - def testFile(filename): ''' Runs the trilateration algorithm on timestamps written to a file in the following format: @@ -222,9 +229,14 @@ def testFile(filename): timestamps += [eval(word)] print locator.getPulseLocation(np.array(timestamps)), "\n" +def delete_last_lines(n=1): + for _ in range(n): + sys.stdout.write(CURSOR_UP_ONE) + sys.stdout.write(ERASE_LINE) + if __name__ == "__main__": d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), - rospy.get_param('~/sonar_driver/port'), - rospy.get_param('~/sonar_driver/baud')) - # testFile("/home/santiago/bags/SonarTestData.txt") \ No newline at end of file + "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", + 19200) + #testFile("/home/santiago/bags/SonarTestData.txt") \ No newline at end of file From 2c8d51ce76f21fc261f43c868e123d38fdd31c91 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Wed, 22 Jun 2016 11:29:13 -0400 Subject: [PATCH 057/267] SONAR: add testing node with launch file --- drivers/sub8_sonar/launch/test.launch | 10 +++++ drivers/sub8_sonar/nodes/sonar_driver.py | 42 ++++++++--------- drivers/sub8_sonar/nodes/sonar_test.py | 57 ++++++++++++++++++++++++ 3 files changed, 88 insertions(+), 21 deletions(-) create mode 100644 drivers/sub8_sonar/launch/test.launch create mode 100755 drivers/sub8_sonar/nodes/sonar_test.py diff --git a/drivers/sub8_sonar/launch/test.launch b/drivers/sub8_sonar/launch/test.launch new file mode 100644 index 00000000..1d57065a --- /dev/null +++ b/drivers/sub8_sonar/launch/test.launch @@ -0,0 +1,10 @@ + + + + { hydro0: {x: 0, y: 0, z: 0}, + hydro1: {x: -0.0254, y: 0, z: 0.0254}, + hydro2: {x: 0.0254, y: 0, z: 0}, + hydro3: {x: 0, y: -0.0254, z: 0} } + + + \ No newline at end of file diff --git a/drivers/sub8_sonar/nodes/sonar_driver.py b/drivers/sub8_sonar/nodes/sonar_driver.py index 139800c3..d72ec3f8 100755 --- a/drivers/sub8_sonar/nodes/sonar_driver.py +++ b/drivers/sub8_sonar/nodes/sonar_driver.py @@ -70,12 +70,13 @@ def listener(self): response = self.ser.read(19) # rospy.loginfo("Received: %s" % response) #uncomment for debugging if self.check_CRC(response): - print "Heard response!\n" + delete_last_lines(2) # Heard response! # The checksum matches the data so split the response into each piece of data. # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters data = struct.unpack('>BffffH', response) - - return np.array([data[4], data[1], data[2], data[3] ]) + timestamps = np.array([data[4], data[1], data[2], data[3] ]) + print timestamps + return timestamps else: self.packet_error_alarm.raise_alarm( problem_description="Sonar board checksum error.", @@ -136,6 +137,7 @@ class EchoLocator(object): the pulse by individual sensors. c is the wave propagation speed in the medium of operation ''' + # hydrophone locations should be the dict returned by rospy.get_param('~//hydrophones def __init__(self, hydrophone_locations, c): # speed in m/s self.hydrophone_locations = np.array([1, 1, 1]) # just for apending, will not be included self.c = c @@ -157,18 +159,11 @@ def getPulseLocation(self, timestamps): ''' assert timestamps.size == self.hydrophone_locations.shape[0] self.timestamps = timestamps - delete_last_lines(4) - print self.timestamps - init_guess = np.array([0, 0, 0]) - # for idx in range(1,4): - # if(timestamps[idx] > 0): - # init_guess = init_guess - self.hydrophone_locations[idx] - # else: - # init_guess = init_guess + self.hydrophone_locations[idx] - # print "Init guess:", init_guess + init_guess = np.array([1, 1, 1]) opt = {'disp': 1} - opt_method = 'Nelder-Mead' - result = optimize.minimize(self._getCost, init_guess, method=opt_method, options=opt) + opt_method = 'Powell' + result = optimize.minimize(self._getCost, init_guess, method=opt_method, options=opt, tol=1e-15) + print result.message resp_data = SonarResponse() if(result.success): resp_data.x = result.x[0] @@ -187,11 +182,16 @@ def getPulseLocation(self, timestamps): ) return resp_data - def _getCost(self, potential_pulse): ''' - Compares the timestamps that would have been generated by $(potential_pulse) with the - actual observed timestamps. Close matches generate low cost values. + Compares the difference in observed and theoretical difference in time of arrival + between the hydrophones and the reference hydrophone for potential source origins. + + Note: when there are 4 timestamps (not including reference), this cost is convex + SciPy Optimize will converge on the correct source origin. + With only 3 time stamps, minimization methods will not convrge to the correct + result, however, repeating the process for the source in the same location, + all of the results from minimizing this cost lie on a single 3D line ''' cost = 0 t = self.timestamps @@ -202,18 +202,18 @@ def _getCost(self, potential_pulse): y = potential_pulse[1] z = potential_pulse[2] d0 = np.sqrt((x0 - x)**2 + (y0 - x)**2 + (z0 - x)**2) - print "hydrophone info cols:", self.hydrophone_locations.shape[1] for i in xrange(1, self.hydrophone_locations.shape[0]): xi = self.hydrophone_locations[i,0] yi = self.hydrophone_locations[i,1] zi = self.hydrophone_locations[i,2] di = np.sqrt((xi - x)**2 + (yi - x)**2 + (zi - x)**2) - cost = cost + (di - d0 - self.c * t[i])**2 + hydro_i_cost = (di - d0 - self.c * t[i])**2 + cost = cost + hydro_i_cost return cost def testFile(filename): ''' - Runs the trilateration algorithm on timestamps written to a file in the following format: + Runs the multilateration algorithm on timestamps written to a file in the following format: [ ref_tstamp tstamp1 tstamp2 tstamp3 ] lines that do not begin with '[' are ignored ''' @@ -239,4 +239,4 @@ def delete_last_lines(n=1): d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", 19200) - #testFile("/home/santiago/bags/SonarTestData.txt") \ No newline at end of file + # testFile("/home/santiago/bags/SonarTestData.txt") \ No newline at end of file diff --git a/drivers/sub8_sonar/nodes/sonar_test.py b/drivers/sub8_sonar/nodes/sonar_test.py new file mode 100755 index 00000000..b86c65f7 --- /dev/null +++ b/drivers/sub8_sonar/nodes/sonar_test.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python +import numpy as np +import rospy +import rosparam +import random +from sonar_driver import EchoLocator + +class SonarSim(object): + def __init__(self, hydrophone_locations, wave_propagation_speed_mps): + self.c = wave_propagation_speed_mps + self.hydrophone_locations = np.array([0, 0, 0]) + for key in hydrophone_locations: + sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) + self.hydrophone_locations = np.vstack((self.hydrophone_locations, sensor_location)) + self.hydrophone_locations = self.hydrophone_locations[1:] + + def listen(self, pulse): + timestamps = [] + for idx in range(4): + src_range = np.sqrt(sum(np.square(pulse.position() - self.hydrophone_locations[idx]))) + timestamps += [pulse.t + src_range / self.c] + return np.array(timestamps) + +class Pulse(object): + def __init__(self, x, y, z, t): + self.x = x + self.y = y + self.z = z + self.t = t + + def position(self): + return np.array([self.x, self.y, self.z]) + + def __repr__(self): + return "Pulse:\t" + "x: " + str(self.x) + " y: " + str(self.y) + " z: " + str(self.z) + + +if __name__ == '__main__': + def print_red(obj): + print '\x1b[31m' + obj.__repr__() + '\x1b[0m' + + hydrophone_locations = rospy.get_param('~/sonar_test/hydrophones') + sonar = SonarSim(hydrophone_locations, 1484) + locator = EchoLocator(hydrophone_locations, 1484) + # pulses will be generated with x, y, z in range [-pulse_range, pulse_range + 1] + pulse_range = 10 + rand_args = [-pulse_range, pulse_range + 1] + for i in range(10): + pulse = Pulse(random.randrange(*rand_args), + random.randrange(*rand_args), + random.randrange(*rand_args), 0) + tstamps = sonar.listen(pulse) + tstamps = tstamps - tstamps[0] + print_red(pulse) + print "Simulated timestamps:\n\t", tstamps + response = locator.getPulseLocation(np.array(tstamps)) + print "Reconstructed Pulse:\n\t" + "x: " + str(response.x) + " y: " + str(response.y) + " z: " + str(response.z) \ No newline at end of file From 7e529864f98d69d18929e9ced86b83b365228ee7 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Sun, 26 Jun 2016 01:18:26 -0400 Subject: [PATCH 058/267] SONAR: address PR163 comments --- drivers/sub8_sonar/nodes/plotter.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/drivers/sub8_sonar/nodes/plotter.py b/drivers/sub8_sonar/nodes/plotter.py index 45791aef..d1144ab2 100755 --- a/drivers/sub8_sonar/nodes/plotter.py +++ b/drivers/sub8_sonar/nodes/plotter.py @@ -20,12 +20,12 @@ #plt.ylim(-1.0, 1.0) try: - while(True): - pinger_pose = sonar() - print "x:", str(pinger_pose.x).rjust(15), "y:", str(pinger_pose.y).rjust(15), "z:", str(pinger_pose.z).rjust(15) - ax.scatter(pinger_pose.x, pinger_pose.y, pinger_pose.z) - plt.draw() - plt.pause(0.1) + while(True): + pinger_pose = sonar() + print "x:", str(pinger_pose.x).rjust(15), "y:", str(pinger_pose.y).rjust(15), "z:", str(pinger_pose.z).rjust(15) + ax.scatter(pinger_pose.x, pinger_pose.y, pinger_pose.z) + plt.draw() + plt.pause(0.1) except KeyboardInterrupt: - plt.close('all') - print "\nshutting down plotter" + plt.close('all') + print "\nshutting down plotter" From bd5583cb304888996c354c78a59b94a2eed93ebd Mon Sep 17 00:00:00 2001 From: mattlangford Date: Thu, 23 Jun 2016 22:42:17 -0400 Subject: [PATCH 059/267] SONAR: Exposed sonar_driver to outside world. --- drivers/sub8_sonar/CMakeLists.txt | 3 +++ drivers/sub8_sonar/setup.py | 11 ++++++++ drivers/sub8_sonar/sub8_sonar/__init__.py | 1 + drivers/sub8_sonar/sub8_sonar/plotter.py | 25 +++++++++++++++++++ .../{nodes => sub8_sonar}/sonar_driver.py | 3 +-- 5 files changed, 41 insertions(+), 2 deletions(-) create mode 100644 drivers/sub8_sonar/setup.py create mode 100644 drivers/sub8_sonar/sub8_sonar/__init__.py create mode 100755 drivers/sub8_sonar/sub8_sonar/plotter.py rename drivers/sub8_sonar/{nodes => sub8_sonar}/sonar_driver.py (99%) diff --git a/drivers/sub8_sonar/CMakeLists.txt b/drivers/sub8_sonar/CMakeLists.txt index 0a063ede..15603797 100644 --- a/drivers/sub8_sonar/CMakeLists.txt +++ b/drivers/sub8_sonar/CMakeLists.txt @@ -4,6 +4,9 @@ project(sub8_sonar) find_package(catkin REQUIRED COMPONENTS rospy ) + +catkin_python_setup() + catkin_package( # INCLUDE_DIRS include # LIBRARIES sub8_sonar diff --git a/drivers/sub8_sonar/setup.py b/drivers/sub8_sonar/setup.py new file mode 100644 index 00000000..997ca783 --- /dev/null +++ b/drivers/sub8_sonar/setup.py @@ -0,0 +1,11 @@ +# ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD + +from distutils.core import setup +from catkin_pkg.python_setup import generate_distutils_setup + +# fetch values from package.xml +setup_args = generate_distutils_setup( + packages=['sub8_sonar'], +) + +setup(**setup_args) diff --git a/drivers/sub8_sonar/sub8_sonar/__init__.py b/drivers/sub8_sonar/sub8_sonar/__init__.py new file mode 100644 index 00000000..772682e0 --- /dev/null +++ b/drivers/sub8_sonar/sub8_sonar/__init__.py @@ -0,0 +1 @@ +from sonar_driver import EchoLocator \ No newline at end of file diff --git a/drivers/sub8_sonar/sub8_sonar/plotter.py b/drivers/sub8_sonar/sub8_sonar/plotter.py new file mode 100755 index 00000000..89b5c2d7 --- /dev/null +++ b/drivers/sub8_sonar/sub8_sonar/plotter.py @@ -0,0 +1,25 @@ +import rospy +import rosparam + +import numpy as np +from scipy import optimize + +from sub8_msgs.srv import Sonar, SonarResponse + +import matplotlib.pyplot as plt +from mpl_toolkits.mplot3d import Axes3D +fig = plt.figure() +ax = fig.add_subplot(111, projection='3d') + +rospy.wait_for_service('~/sonar/get_pinger_pulse') +sonar = rospy.ServiceProxy('~/sonar/get_pinger_pulse', Sonar) +print sonar, "serv proxy established" +i = 0 +while(True): + i += 1 + print "i:", i + pinger_pose = sonar() + print pinger_pose + ax.scatter(pinger_pose.x, pinger_pose.y, pinger_pose.z) + plt.draw() + plt.pause(0.1) diff --git a/drivers/sub8_sonar/nodes/sonar_driver.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py similarity index 99% rename from drivers/sub8_sonar/nodes/sonar_driver.py rename to drivers/sub8_sonar/sub8_sonar/sonar_driver.py index d72ec3f8..8b459c75 100755 --- a/drivers/sub8_sonar/nodes/sonar_driver.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py @@ -17,7 +17,6 @@ import crc16 lock = threading.Lock() -HEADER = 0xAA import sys CURSOR_UP_ONE = '\x1b[1A' @@ -239,4 +238,4 @@ def delete_last_lines(n=1): d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", 19200) - # testFile("/home/santiago/bags/SonarTestData.txt") \ No newline at end of file + # testFile("/home/santiago/bags/SonarTestData.txt") From 61f43c6283252c092584c12d647698ed6a99c83f Mon Sep 17 00:00:00 2001 From: mattlangford Date: Tue, 28 Jun 2016 21:00:34 -0400 Subject: [PATCH 060/267] SIM: Update pinger sim to match current driver. --- drivers/sub8_sonar/nodes/plotter.py | 2 +- drivers/sub8_sonar/sub8_sonar/plotter.py | 25 ------------------------ 2 files changed, 1 insertion(+), 26 deletions(-) mode change 100755 => 100644 drivers/sub8_sonar/nodes/plotter.py delete mode 100755 drivers/sub8_sonar/sub8_sonar/plotter.py diff --git a/drivers/sub8_sonar/nodes/plotter.py b/drivers/sub8_sonar/nodes/plotter.py old mode 100755 new mode 100644 index d1144ab2..084d245c --- a/drivers/sub8_sonar/nodes/plotter.py +++ b/drivers/sub8_sonar/nodes/plotter.py @@ -28,4 +28,4 @@ plt.pause(0.1) except KeyboardInterrupt: plt.close('all') - print "\nshutting down plotter" + print "\nshutting down plotter" \ No newline at end of file diff --git a/drivers/sub8_sonar/sub8_sonar/plotter.py b/drivers/sub8_sonar/sub8_sonar/plotter.py deleted file mode 100755 index 89b5c2d7..00000000 --- a/drivers/sub8_sonar/sub8_sonar/plotter.py +++ /dev/null @@ -1,25 +0,0 @@ -import rospy -import rosparam - -import numpy as np -from scipy import optimize - -from sub8_msgs.srv import Sonar, SonarResponse - -import matplotlib.pyplot as plt -from mpl_toolkits.mplot3d import Axes3D -fig = plt.figure() -ax = fig.add_subplot(111, projection='3d') - -rospy.wait_for_service('~/sonar/get_pinger_pulse') -sonar = rospy.ServiceProxy('~/sonar/get_pinger_pulse', Sonar) -print sonar, "serv proxy established" -i = 0 -while(True): - i += 1 - print "i:", i - pinger_pose = sonar() - print pinger_pose - ax.scatter(pinger_pose.x, pinger_pose.y, pinger_pose.z) - plt.draw() - plt.pause(0.1) From 42a614a93d0f99140436207fd438f6a9860a2575 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Wed, 29 Jun 2016 15:55:53 -0400 Subject: [PATCH 061/267] SONAR: change multilateration approach from least-squares to Bancroft's Algorithm --- drivers/sub8_sonar/nodes/plotter.py | 2 - drivers/sub8_sonar/nodes/realtime_plotter.py | 26 -- drivers/sub8_sonar/sub8_sonar/sonar_driver.py | 175 +++++++------ .../sub8_sonar/sonar_driver_least_squares.py | 241 ++++++++++++++++++ 4 files changed, 331 insertions(+), 113 deletions(-) mode change 100644 => 100755 drivers/sub8_sonar/nodes/plotter.py delete mode 100755 drivers/sub8_sonar/nodes/realtime_plotter.py create mode 100755 drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py diff --git a/drivers/sub8_sonar/nodes/plotter.py b/drivers/sub8_sonar/nodes/plotter.py old mode 100644 new mode 100755 index 084d245c..f81499d8 --- a/drivers/sub8_sonar/nodes/plotter.py +++ b/drivers/sub8_sonar/nodes/plotter.py @@ -16,8 +16,6 @@ rospy.wait_for_service('~/sonar/get_pinger_pulse') sonar = rospy.ServiceProxy('~/sonar/get_pinger_pulse', Sonar) print "sonar serv proxy created" -#plt.xlim(-1.0, 1.0) -#plt.ylim(-1.0, 1.0) try: while(True): diff --git a/drivers/sub8_sonar/nodes/realtime_plotter.py b/drivers/sub8_sonar/nodes/realtime_plotter.py deleted file mode 100755 index 258e48ef..00000000 --- a/drivers/sub8_sonar/nodes/realtime_plotter.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python -import rospy -import rosparam - -import numpy as np -from scipy import optimize - -from sub8_msgs.srv import Sonar, SonarResponse - -import matplotlib.pyplot as plt -from mpl_toolkits.mplot3d import Axes3D -fig = plt.figure() -ax = fig.add_subplot(111, projection='3d') - -rospy.wait_for_service('~/sonar/get_pinger_pulse') -sonar = rospy.ServiceProxy('~/sonar/get_pinger_pulse', Sonar) -print sonar, "serv proxy established" -i = 0 -while(True): - i += 1 - print "i:", i - pinger_pose = sonar() - print pinger_pose - ax.scatter(pinger_pose.x, pinger_pose.y, pinger_pose.z) - plt.draw() - plt.pause(0.5) \ No newline at end of file diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py index 8b459c75..543dc039 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py @@ -1,9 +1,11 @@ -#!/usr/bin/env python -import rospy -import rosparam +#!/usr/bin/python +from __future__ import division +import math import numpy as np -from scipy import optimize + +import rospy +import rosparam from sub8_msgs.srv import Sonar, SonarResponse from sub8_ros_tools import thread_lock, make_header @@ -73,7 +75,7 @@ def listener(self): # The checksum matches the data so split the response into each piece of data. # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters data = struct.unpack('>BffffH', response) - timestamps = np.array([data[4], data[1], data[2], data[3] ]) + timestamps = [data[4], data[1], data[2], data[3] ] print timestamps return timestamps else: @@ -140,10 +142,10 @@ class EchoLocator(object): def __init__(self, hydrophone_locations, c): # speed in m/s self.hydrophone_locations = np.array([1, 1, 1]) # just for apending, will not be included self.c = c + self.hydrophone_location = [] for key in hydrophone_locations: sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) - self.hydrophone_locations = np.vstack((self.hydrophone_locations, sensor_location)) - self.hydrophone_locations = self.hydrophone_locations[1:] + self.hydrophone_locations += sensor_location alarm_broadcaster = AlarmBroadcaster() self.locate_pulse_error_alarm = alarm_broadcaster.add_alarm( @@ -156,86 +158,89 @@ def getPulseLocation(self, timestamps): ''' Returns a ros message with the location and time of emission of a pinger pulse. ''' - assert timestamps.size == self.hydrophone_locations.shape[0] - self.timestamps = timestamps - init_guess = np.array([1, 1, 1]) - opt = {'disp': 1} - opt_method = 'Powell' - result = optimize.minimize(self._getCost, init_guess, method=opt_method, options=opt, tol=1e-15) - print result.message - resp_data = SonarResponse() - if(result.success): - resp_data.x = result.x[0] - resp_data.y = result.x[1] - resp_data.z = result.x[2] - else: - resp_data.x = 0 - resp_data.y = 0 - resp_data.z = 0 - self.locate_pulse_error_alarm.raise_alarm( - problem_description=("SciPy optimize, using method '" + opt_method - + "', failed to converge on a pinger pulse location."), - parameters={ - 'fault_info': {'data': result.message} - } - ) - return resp_data - - def _getCost(self, potential_pulse): - ''' - Compares the difference in observed and theoretical difference in time of arrival - between the hydrophones and the reference hydrophone for potential source origins. - - Note: when there are 4 timestamps (not including reference), this cost is convex - SciPy Optimize will converge on the correct source origin. - With only 3 time stamps, minimization methods will not convrge to the correct - result, however, repeating the process for the source in the same location, - all of the results from minimizing this cost lie on a single 3D line - ''' - cost = 0 - t = self.timestamps - x0 = self.hydrophone_locations[0,0] - y0 = self.hydrophone_locations[0,1] - z0 = self.hydrophone_locations[0,2] - x = potential_pulse[0] - y = potential_pulse[1] - z = potential_pulse[2] - d0 = np.sqrt((x0 - x)**2 + (y0 - x)**2 + (z0 - x)**2) - for i in xrange(1, self.hydrophone_locations.shape[0]): - xi = self.hydrophone_locations[i,0] - yi = self.hydrophone_locations[i,1] - zi = self.hydrophone_locations[i,2] - di = np.sqrt((xi - x)**2 + (yi - x)**2 + (zi - x)**2) - hydro_i_cost = (di - d0 - self.c * t[i])**2 - cost = cost + hydro_i_cost - return cost - -def testFile(filename): - ''' - Runs the multilateration algorithm on timestamps written to a file in the following format: - [ ref_tstamp tstamp1 tstamp2 tstamp3 ] - lines that do not begin with '[' are ignored - ''' - hydrophone_locations = rospy.get_param('~/sonar_driver/hydrophones') #DBG - locator = EchoLocator(hydrophone_locations, 1484) - with open(filename, "r") as data_file: - for line in data_file: - if line[0] == '[': - words =line.split() - timestamps = [] - for word in words: - if word[0] != "[" and word[0] != "]": - timestamps += [eval(word)] - print locator.getPulseLocation(np.array(timestamps)), "\n" - -def delete_last_lines(n=1): - for _ in range(n): - sys.stdout.write(CURSOR_UP_ONE) - sys.stdout.write(ERASE_LINE) - + res = estimate_pos(reception_times, positions) + source = [x for x in res if x[2] < 0] # Assume that the source is below us + source = source[0] + response = SonarResponse() + response.x = source[0] + response.y = source[1] + response.z = source[2] + return response + +def quadratic(a, b, c): + discriminant = b*b - 4*a*c + if discriminant >= 0: + first_times_a = (-b+math.copysign(math.sqrt(discriminant), -b))/2 + return [first_times_a/a, c/first_times_a] + else: + return [] + +def estimate_pos(reception_times, positions): + assert len(positions) == len(reception_times) + + N = len(reception_times) + assert N >= 4 + + L = lambda a, b: a[0]*b[0] + a[1]*b[1] + a[2]*b[2] - a[3]*b[3] + + def get_B(delta): + B = np.zeros((N, 4)) + for i in xrange(N): + B[i] = np.concatenate([positions[i], [-reception_times[i]]]) + delta + return B + + delta = min([.1*np.random.randn(4) for i in xrange(10)], key=lambda delta: np.linalg.cond(get_B(delta))) + + B = get_B(delta) + a = np.array([0.5 * L(B[i], B[i]) for i in xrange(N)]) + e = np.ones(N) + + Bpe = np.linalg.lstsq(B, e)[0] + Bpa = np.linalg.lstsq(B, a)[0] + + Lambdas = quadratic( + L(Bpe, Bpe), + 2*(L(Bpa, Bpe) - 1), + L(Bpa, Bpa)) + if not Lambdas: return [] + + res = [] + for Lambda in Lambdas: + u = Bpa + Lambda * Bpe + position = u[:3] - delta[:3] + time = u[3] + delta[3] + + if any(reception_times[i] < time for i in xrange(N)): continue + + res.append(position) + + return res if __name__ == "__main__": d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", 19200) - # testFile("/home/santiago/bags/SonarTestData.txt") + + +# @apply +# def main(): +# np.random.seed(1) + +# r = 0.0254 +# positions = map(np.array, [(0, +r, 0), (-r, 0, 0), (r, 0, 0), (0, -r, 0)]) + +# def valid(correct, solution): +# return np.linalg.norm(correct - solution) < 1e-2 + +# while True: +# test_pos = 10*np.random.randn(3) +# test_time = 10*np.random.randn() + +# reception_times = [test_time + np.linalg.norm(p - test_pos) for p in positions] +# print 'problem:', test_pos, test_time +# res = estimate_pos(reception_times, positions) +# assert res +# good = any(valid(test_pos, x) for x in res) +# print 'result:', res +# assert good +# print \ No newline at end of file diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py new file mode 100755 index 00000000..8b459c75 --- /dev/null +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py @@ -0,0 +1,241 @@ +#!/usr/bin/env python +import rospy +import rosparam + +import numpy as np +from scipy import optimize + +from sub8_msgs.srv import Sonar, SonarResponse +from sub8_ros_tools import thread_lock, make_header +from sub8_alarm import AlarmBroadcaster + +import threading +import serial +import binascii +import struct +import time +import crc16 + +lock = threading.Lock() + +import sys +CURSOR_UP_ONE = '\x1b[1A' +ERASE_LINE = '\x1b[2K' + +class Sub8Sonar(): + ''' + Smart sensor that provides high level ROS code with the location of pinger pulses detected with + Jake Easterling's Hydrophone board. + * Adapted from the actuator driver. + + TODO: Add a function to try and reconnect to the serial port if we lose connection. + TODO: Express pulse location in map frame + ''' + def __init__(self, hydrophone_locations, port, baud=9600): + rospy.init_node("sonar") + + alarm_broadcaster = AlarmBroadcaster() + self.disconnection_alarm = alarm_broadcaster.add_alarm( + name='sonar_disconnect', + action_required=True, + severity=0 + ) + self.packet_error_alarm = alarm_broadcaster.add_alarm( + name='sonar_packet_error', + action_required=False, + severity=2 + ) + + try: + self.ser = serial.Serial(port=port, baudrate=baud, timeout=None) + self.ser.flushInput() + except Exception, e: + print "\x1b[31mSonar serial connection error:\n\t", e, "\x1b[0m" + return None + + self.hydrophone_locations = hydrophone_locations + self.sonar_sensor = EchoLocator(hydrophone_locations, c=1484) # speed of sound in m/s + + rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) + rospy.spin() + + def listener(self): + ''' + Parse the response of the board. + ''' + print "Listening..." + + # We've found a packet now disect it. + response = self.ser.read(19) + # rospy.loginfo("Received: %s" % response) #uncomment for debugging + if self.check_CRC(response): + delete_last_lines(2) # Heard response! + # The checksum matches the data so split the response into each piece of data. + # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters + data = struct.unpack('>BffffH', response) + timestamps = np.array([data[4], data[1], data[2], data[3] ]) + print timestamps + return timestamps + else: + self.packet_error_alarm.raise_alarm( + problem_description="Sonar board checksum error.", + parameters={ + 'fault_info': {'data': response} + } + ) + return None + + @thread_lock(lock) + def request_data(self, srv): + ''' + A polling packet consists of only a header and checksum (CRC-16): + HEADER CHECKSUM + [ 0x41 | ] + ''' + self.ser.flushInput() + + message = struct.pack('B', 0x41) + message += self.CRC(message) + + # rospy.loginfo("Writing: %s" % binascii.hexlify(message)) # uncomment for debugging + + try: + self.ser.write('A') + print "Sent request..." + except: # Except only serial errors in the future. + self.disconnection_alarm.raise_alarm( + problem_description="Sonar board serial connection has been terminated." + ) + return False + return self.sonar_sensor.getPulseLocation(self.listener()) + + def CRC(self, message): + # You may have to change the checksum type. + # Check the crc16 module online to see how to do that. + crc = crc16.crc16xmodem(message, 0xFFFF) + return struct.pack('>H', crc) + + def check_CRC(self, message): + ''' + Given a message with a checksum as the last two bytes, this will return True or False + if the checksum matches the given message. + ''' + msg_checksum = message[-2:] + raw_message = message[:-2] + crc = crc16.crc16xmodem(raw_message, 0xFFFF) + + # If the two match the message was correct + if crc == struct.unpack('>H', msg_checksum)[0]: + return True + else: + return False + +class EchoLocator(object): + ''' + Identifies the origin of a pulse in time and space given stamps of the time of detection of + the pulse by individual sensors. + c is the wave propagation speed in the medium of operation + ''' + # hydrophone locations should be the dict returned by rospy.get_param('~//hydrophones + def __init__(self, hydrophone_locations, c): # speed in m/s + self.hydrophone_locations = np.array([1, 1, 1]) # just for apending, will not be included + self.c = c + for key in hydrophone_locations: + sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) + self.hydrophone_locations = np.vstack((self.hydrophone_locations, sensor_location)) + self.hydrophone_locations = self.hydrophone_locations[1:] + + alarm_broadcaster = AlarmBroadcaster() + self.locate_pulse_error_alarm = alarm_broadcaster.add_alarm( + name='sonar_pulse_locating_error', + action_required=False, + severity=2 + ) + + def getPulseLocation(self, timestamps): + ''' + Returns a ros message with the location and time of emission of a pinger pulse. + ''' + assert timestamps.size == self.hydrophone_locations.shape[0] + self.timestamps = timestamps + init_guess = np.array([1, 1, 1]) + opt = {'disp': 1} + opt_method = 'Powell' + result = optimize.minimize(self._getCost, init_guess, method=opt_method, options=opt, tol=1e-15) + print result.message + resp_data = SonarResponse() + if(result.success): + resp_data.x = result.x[0] + resp_data.y = result.x[1] + resp_data.z = result.x[2] + else: + resp_data.x = 0 + resp_data.y = 0 + resp_data.z = 0 + self.locate_pulse_error_alarm.raise_alarm( + problem_description=("SciPy optimize, using method '" + opt_method + + "', failed to converge on a pinger pulse location."), + parameters={ + 'fault_info': {'data': result.message} + } + ) + return resp_data + + def _getCost(self, potential_pulse): + ''' + Compares the difference in observed and theoretical difference in time of arrival + between the hydrophones and the reference hydrophone for potential source origins. + + Note: when there are 4 timestamps (not including reference), this cost is convex + SciPy Optimize will converge on the correct source origin. + With only 3 time stamps, minimization methods will not convrge to the correct + result, however, repeating the process for the source in the same location, + all of the results from minimizing this cost lie on a single 3D line + ''' + cost = 0 + t = self.timestamps + x0 = self.hydrophone_locations[0,0] + y0 = self.hydrophone_locations[0,1] + z0 = self.hydrophone_locations[0,2] + x = potential_pulse[0] + y = potential_pulse[1] + z = potential_pulse[2] + d0 = np.sqrt((x0 - x)**2 + (y0 - x)**2 + (z0 - x)**2) + for i in xrange(1, self.hydrophone_locations.shape[0]): + xi = self.hydrophone_locations[i,0] + yi = self.hydrophone_locations[i,1] + zi = self.hydrophone_locations[i,2] + di = np.sqrt((xi - x)**2 + (yi - x)**2 + (zi - x)**2) + hydro_i_cost = (di - d0 - self.c * t[i])**2 + cost = cost + hydro_i_cost + return cost + +def testFile(filename): + ''' + Runs the multilateration algorithm on timestamps written to a file in the following format: + [ ref_tstamp tstamp1 tstamp2 tstamp3 ] + lines that do not begin with '[' are ignored + ''' + hydrophone_locations = rospy.get_param('~/sonar_driver/hydrophones') #DBG + locator = EchoLocator(hydrophone_locations, 1484) + with open(filename, "r") as data_file: + for line in data_file: + if line[0] == '[': + words =line.split() + timestamps = [] + for word in words: + if word[0] != "[" and word[0] != "]": + timestamps += [eval(word)] + print locator.getPulseLocation(np.array(timestamps)), "\n" + +def delete_last_lines(n=1): + for _ in range(n): + sys.stdout.write(CURSOR_UP_ONE) + sys.stdout.write(ERASE_LINE) + + +if __name__ == "__main__": + d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), + "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", + 19200) + # testFile("/home/santiago/bags/SonarTestData.txt") From b472abfa8185d911d4f1d68ef220a2a6ced333f9 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Thu, 30 Jun 2016 23:48:24 -0400 Subject: [PATCH 062/267] SONAR: fix issues with plotting pulses real-time --- drivers/sub8_sonar/sub8_sonar/sonar_driver.py | 25 +++++++++++++------ 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py index 543dc039..54a77eff 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py @@ -59,6 +59,7 @@ def __init__(self, hydrophone_locations, port, baud=9600): self.sonar_sensor = EchoLocator(hydrophone_locations, c=1484) # speed of sound in m/s rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) + print "Sub8 sonar driver initialized\nMultilateration algorithm: bancroft" rospy.spin() def listener(self): @@ -140,12 +141,11 @@ class EchoLocator(object): ''' # hydrophone locations should be the dict returned by rospy.get_param('~//hydrophones def __init__(self, hydrophone_locations, c): # speed in m/s - self.hydrophone_locations = np.array([1, 1, 1]) # just for apending, will not be included self.c = c - self.hydrophone_location = [] + self.hydrophone_locations = [] for key in hydrophone_locations: sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) - self.hydrophone_locations += sensor_location + self.hydrophone_locations += [sensor_location] alarm_broadcaster = AlarmBroadcaster() self.locate_pulse_error_alarm = alarm_broadcaster.add_alarm( @@ -158,9 +158,15 @@ def getPulseLocation(self, timestamps): ''' Returns a ros message with the location and time of emission of a pinger pulse. ''' - res = estimate_pos(reception_times, positions) - source = [x for x in res if x[2] < 0] # Assume that the source is below us - source = source[0] + res = estimate_pos(timestamps, self.hydrophone_locations) + print res + if len(res) == 1: + source = res[0] + elif len(res) == 2: + source = [x for x in res if x[2] < 0] # Assume that the source is below us + else: + source = [None, None, None] + print source response = SonarResponse() response.x = source[0] response.y = source[1] @@ -209,13 +215,16 @@ def get_B(delta): u = Bpa + Lambda * Bpe position = u[:3] - delta[:3] time = u[3] + delta[3] - if any(reception_times[i] < time for i in xrange(N)): continue - res.append(position) return res +def delete_last_lines(n=1): + for _ in range(n): + sys.stdout.write(CURSOR_UP_ONE) + sys.stdout.write(ERASE_LINE) + if __name__ == "__main__": d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", From c7747cbbdb7fe9bb3ca50bf17af943c271187956 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Fri, 1 Jul 2016 09:29:40 -0400 Subject: [PATCH 063/267] SONAR: express hydrophone coordinates in millimeters for greater numerical precision --- drivers/sub8_sonar/launch/test.launch | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/drivers/sub8_sonar/launch/test.launch b/drivers/sub8_sonar/launch/test.launch index 1d57065a..ee8abe7b 100644 --- a/drivers/sub8_sonar/launch/test.launch +++ b/drivers/sub8_sonar/launch/test.launch @@ -1,10 +1,19 @@ + - { hydro0: {x: 0, y: 0, z: 0}, - hydro1: {x: -0.0254, y: 0, z: 0.0254}, - hydro2: {x: 0.0254, y: 0, z: 0}, - hydro3: {x: 0, y: -0.0254, z: 0} } + + { hydro0: {x: 0, y: 0, z: 0}, + hydro1: {x: -25.4, y: 0, z: 25.4}, + hydro2: {x: 25.4, y: 0, z: 0}, + hydro3: {x: 0, y: -25.4, z: 0} } + + \ No newline at end of file From 26ddf00e710e02aa749f959cd4661d1c068ce985 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Fri, 1 Jul 2016 09:32:33 -0400 Subject: [PATCH 064/267] SONAR: rename old least squares multilateration driver --- drivers/sub8_sonar/sub8_sonar/__init__.py | 3 ++- .../sub8_sonar/sub8_sonar/sonar_driver_least_squares.py | 7 ++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/drivers/sub8_sonar/sub8_sonar/__init__.py b/drivers/sub8_sonar/sub8_sonar/__init__.py index 772682e0..e57f8437 100644 --- a/drivers/sub8_sonar/sub8_sonar/__init__.py +++ b/drivers/sub8_sonar/sub8_sonar/__init__.py @@ -1 +1,2 @@ -from sonar_driver import EchoLocator \ No newline at end of file +from sonar_driver import EchoLocator +from sonar_driver_least_squares import EchoLocatorLS \ No newline at end of file diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py index 8b459c75..b4f3f35f 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py @@ -54,7 +54,7 @@ def __init__(self, hydrophone_locations, port, baud=9600): return None self.hydrophone_locations = hydrophone_locations - self.sonar_sensor = EchoLocator(hydrophone_locations, c=1484) # speed of sound in m/s + self.sonar_sensor = EchoLocatorLS(hydrophone_locations, c=1484) # speed of sound in m/s rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) rospy.spin() @@ -130,7 +130,7 @@ def check_CRC(self, message): else: return False -class EchoLocator(object): +class EchoLocatorLS(object): ''' Identifies the origin of a pulse in time and space given stamps of the time of detection of the pulse by individual sensors. @@ -151,6 +151,7 @@ def __init__(self, hydrophone_locations, c): # speed in m/s action_required=False, severity=2 ) + print "David: least-squares" def getPulseLocation(self, timestamps): ''' @@ -217,7 +218,7 @@ def testFile(filename): lines that do not begin with '[' are ignored ''' hydrophone_locations = rospy.get_param('~/sonar_driver/hydrophones') #DBG - locator = EchoLocator(hydrophone_locations, 1484) + locator = EchoLocatorLS(hydrophone_locations, 1484) with open(filename, "r") as data_file: for line in data_file: if line[0] == '[': From 46610ab1aa2290f972fa9e6636c9c525a771aa1b Mon Sep 17 00:00:00 2001 From: DSsoto Date: Fri, 1 Jul 2016 09:38:38 -0400 Subject: [PATCH 065/267] SONAR: adapt multilateration algorithm for varying wave speeds/mediums --- drivers/sub8_sonar/nodes/sonar_test.py | 13 +- drivers/sub8_sonar/sub8_sonar/sonar_driver.py | 137 ++++++++---------- 2 files changed, 69 insertions(+), 81 deletions(-) diff --git a/drivers/sub8_sonar/nodes/sonar_test.py b/drivers/sub8_sonar/nodes/sonar_test.py index b86c65f7..b6cb2b16 100755 --- a/drivers/sub8_sonar/nodes/sonar_test.py +++ b/drivers/sub8_sonar/nodes/sonar_test.py @@ -3,7 +3,9 @@ import rospy import rosparam import random -from sonar_driver import EchoLocator +from sub8_sonar import EchoLocator + +c = 1484 class SonarSim(object): def __init__(self, hydrophone_locations, wave_propagation_speed_mps): @@ -40,10 +42,10 @@ def print_red(obj): print '\x1b[31m' + obj.__repr__() + '\x1b[0m' hydrophone_locations = rospy.get_param('~/sonar_test/hydrophones') - sonar = SonarSim(hydrophone_locations, 1484) - locator = EchoLocator(hydrophone_locations, 1484) + sonar = SonarSim(hydrophone_locations, c) + locator = EchoLocator(hydrophone_locations, c) # pulses will be generated with x, y, z in range [-pulse_range, pulse_range + 1] - pulse_range = 10 + pulse_range = 1000000 rand_args = [-pulse_range, pulse_range + 1] for i in range(10): pulse = Pulse(random.randrange(*rand_args), @@ -53,5 +55,4 @@ def print_red(obj): tstamps = tstamps - tstamps[0] print_red(pulse) print "Simulated timestamps:\n\t", tstamps - response = locator.getPulseLocation(np.array(tstamps)) - print "Reconstructed Pulse:\n\t" + "x: " + str(response.x) + " y: " + str(response.y) + " z: " + str(response.z) \ No newline at end of file + response = locator.getPulseLocation(np.array(tstamps)) \ No newline at end of file diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py index 54a77eff..e7690893 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py @@ -24,6 +24,7 @@ CURSOR_UP_ONE = '\x1b[1A' ERASE_LINE = '\x1b[2K' + class Sub8Sonar(): ''' Smart sensor that provides high level ROS code with the location of pinger pulses detected with @@ -33,7 +34,7 @@ class Sub8Sonar(): TODO: Add a function to try and reconnect to the serial port if we lose connection. TODO: Express pulse location in map frame ''' - def __init__(self, hydrophone_locations, port, baud=9600): + def __init__(self, c, hydrophone_locations, port, baud=9600): rospy.init_node("sonar") alarm_broadcaster = AlarmBroadcaster() @@ -55,11 +56,12 @@ def __init__(self, hydrophone_locations, port, baud=9600): print "\x1b[31mSonar serial connection error:\n\t", e, "\x1b[0m" return None + self.c = c self.hydrophone_locations = hydrophone_locations - self.sonar_sensor = EchoLocator(hydrophone_locations, c=1484) # speed of sound in m/s + self.sonar_sensor = EchoLocator(hydrophone_locations, self.c) # speed of sound in m/s rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) - print "Sub8 sonar driver initialized\nMultilateration algorithm: bancroft" + print "\x1b[32mSub8 sonar driver initialized\x1b[0m" rospy.spin() def listener(self): @@ -77,7 +79,7 @@ def listener(self): # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters data = struct.unpack('>BffffH', response) timestamps = [data[4], data[1], data[2], data[3] ] - print timestamps + print "timestamps:", timestamps return timestamps else: self.packet_error_alarm.raise_alarm( @@ -135,9 +137,9 @@ def check_CRC(self, message): class EchoLocator(object): ''' - Identifies the origin of a pulse in time and space given stamps of the time of detection of - the pulse by individual sensors. - c is the wave propagation speed in the medium of operation + Identifies the origin location of a pulse given differential times of arrival to the individual + sensors. c is the wave speed in the medium of operation. + Hydrohone coordinates are expected in millimeters, result will be given in millimeters. ''' # hydrophone locations should be the dict returned by rospy.get_param('~//hydrophones def __init__(self, hydrophone_locations, c): # speed in m/s @@ -153,26 +155,72 @@ def __init__(self, hydrophone_locations, c): # speed in m/s action_required=False, severity=2 ) + print "\x1b[32mMultilateration algorithm: bancroft" + print "Speed of Sound (c):", self.c, "m/s\x1b[0m" def getPulseLocation(self, timestamps): ''' Returns a ros message with the location and time of emission of a pinger pulse. ''' - res = estimate_pos(timestamps, self.hydrophone_locations) - print res + res = self.estimate_pos(timestamps, self.hydrophone_locations) + # print "bancroft output:", res if len(res) == 1: source = res[0] elif len(res) == 2: source = [x for x in res if x[2] < 0] # Assume that the source is below us + if not source: + source = res[0] + else: + source = source[0] else: - source = [None, None, None] - print source + source = [0, 0, 0] + response = SonarResponse() response.x = source[0] response.y = source[1] response.z = source[2] + print "Reconstructed Pulse:\n\t" + "x: " + str(response.x) + " y: " + str(response.y) + " z: " + str(response.z) return response + def estimate_pos(self, reception_times, positions): + assert len(positions) == len(reception_times) + + N = len(reception_times) + assert N >= 4 + + L = lambda a, b: a[0]*b[0] + a[1]*b[1] + a[2]*b[2] - a[3]*b[3] + + def get_B(delta): + B = np.zeros((N, 4)) + for i in xrange(N): + B[i] = np.concatenate([positions[i]/(self.c), [-reception_times[i]]]) + delta + return B + + delta = min([.1*np.random.randn(4) for i in xrange(10)], key=lambda delta: np.linalg.cond(get_B(delta))) + + B = get_B(delta) + a = np.array([0.5 * L(B[i], B[i]) for i in xrange(N)]) + e = np.ones(N) + + Bpe = np.linalg.lstsq(B, e)[0] + Bpa = np.linalg.lstsq(B, a)[0] + + Lambdas = quadratic( + L(Bpe, Bpe), + 2*(L(Bpa, Bpe) - 1), + L(Bpa, Bpa)) + if not Lambdas: return [] + + res = [] + for Lambda in Lambdas: + u = Bpa + Lambda * Bpe + position = u[:3] - delta[:3] + time = u[3] + delta[3] + if any(reception_times[i] < time for i in xrange(N)): continue + res.append(position*self.c) + + return res + def quadratic(a, b, c): discriminant = b*b - 4*a*c if discriminant >= 0: @@ -181,44 +229,7 @@ def quadratic(a, b, c): else: return [] -def estimate_pos(reception_times, positions): - assert len(positions) == len(reception_times) - - N = len(reception_times) - assert N >= 4 - - L = lambda a, b: a[0]*b[0] + a[1]*b[1] + a[2]*b[2] - a[3]*b[3] - - def get_B(delta): - B = np.zeros((N, 4)) - for i in xrange(N): - B[i] = np.concatenate([positions[i], [-reception_times[i]]]) + delta - return B - - delta = min([.1*np.random.randn(4) for i in xrange(10)], key=lambda delta: np.linalg.cond(get_B(delta))) - - B = get_B(delta) - a = np.array([0.5 * L(B[i], B[i]) for i in xrange(N)]) - e = np.ones(N) - - Bpe = np.linalg.lstsq(B, e)[0] - Bpa = np.linalg.lstsq(B, a)[0] - - Lambdas = quadratic( - L(Bpe, Bpe), - 2*(L(Bpa, Bpe) - 1), - L(Bpa, Bpa)) - if not Lambdas: return [] - - res = [] - for Lambda in Lambdas: - u = Bpa + Lambda * Bpe - position = u[:3] - delta[:3] - time = u[3] + delta[3] - if any(reception_times[i] < time for i in xrange(N)): continue - res.append(position) - - return res + def delete_last_lines(n=1): for _ in range(n): @@ -226,30 +237,6 @@ def delete_last_lines(n=1): sys.stdout.write(ERASE_LINE) if __name__ == "__main__": - d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), + d = Sub8Sonar(1484, rospy.get_param('~/sonar_driver/hydrophones'), "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", - 19200) - - -# @apply -# def main(): -# np.random.seed(1) - -# r = 0.0254 -# positions = map(np.array, [(0, +r, 0), (-r, 0, 0), (r, 0, 0), (0, -r, 0)]) - -# def valid(correct, solution): -# return np.linalg.norm(correct - solution) < 1e-2 - -# while True: -# test_pos = 10*np.random.randn(3) -# test_time = 10*np.random.randn() - -# reception_times = [test_time + np.linalg.norm(p - test_pos) for p in positions] -# print 'problem:', test_pos, test_time -# res = estimate_pos(reception_times, positions) -# assert res -# good = any(valid(test_pos, x) for x in res) -# print 'result:', res -# assert good -# print \ No newline at end of file + 19200) \ No newline at end of file From 5a1c8210604a9fc99e713de1432ed45a85f463e0 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Wed, 29 Jun 2016 15:55:53 -0400 Subject: [PATCH 066/267] SONAR: change multilateration approach from least-squares to Bancroft's Algorithm --- drivers/sub8_sonar/nodes/plotter.py | 2 - drivers/sub8_sonar/nodes/realtime_plotter.py | 26 -- drivers/sub8_sonar/sub8_sonar/sonar_driver.py | 175 +++++++------ .../sub8_sonar/sonar_driver_least_squares.py | 241 ++++++++++++++++++ 4 files changed, 331 insertions(+), 113 deletions(-) mode change 100644 => 100755 drivers/sub8_sonar/nodes/plotter.py delete mode 100755 drivers/sub8_sonar/nodes/realtime_plotter.py create mode 100755 drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py diff --git a/drivers/sub8_sonar/nodes/plotter.py b/drivers/sub8_sonar/nodes/plotter.py old mode 100644 new mode 100755 index 084d245c..f81499d8 --- a/drivers/sub8_sonar/nodes/plotter.py +++ b/drivers/sub8_sonar/nodes/plotter.py @@ -16,8 +16,6 @@ rospy.wait_for_service('~/sonar/get_pinger_pulse') sonar = rospy.ServiceProxy('~/sonar/get_pinger_pulse', Sonar) print "sonar serv proxy created" -#plt.xlim(-1.0, 1.0) -#plt.ylim(-1.0, 1.0) try: while(True): diff --git a/drivers/sub8_sonar/nodes/realtime_plotter.py b/drivers/sub8_sonar/nodes/realtime_plotter.py deleted file mode 100755 index 258e48ef..00000000 --- a/drivers/sub8_sonar/nodes/realtime_plotter.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python -import rospy -import rosparam - -import numpy as np -from scipy import optimize - -from sub8_msgs.srv import Sonar, SonarResponse - -import matplotlib.pyplot as plt -from mpl_toolkits.mplot3d import Axes3D -fig = plt.figure() -ax = fig.add_subplot(111, projection='3d') - -rospy.wait_for_service('~/sonar/get_pinger_pulse') -sonar = rospy.ServiceProxy('~/sonar/get_pinger_pulse', Sonar) -print sonar, "serv proxy established" -i = 0 -while(True): - i += 1 - print "i:", i - pinger_pose = sonar() - print pinger_pose - ax.scatter(pinger_pose.x, pinger_pose.y, pinger_pose.z) - plt.draw() - plt.pause(0.5) \ No newline at end of file diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py index 8b459c75..543dc039 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py @@ -1,9 +1,11 @@ -#!/usr/bin/env python -import rospy -import rosparam +#!/usr/bin/python +from __future__ import division +import math import numpy as np -from scipy import optimize + +import rospy +import rosparam from sub8_msgs.srv import Sonar, SonarResponse from sub8_ros_tools import thread_lock, make_header @@ -73,7 +75,7 @@ def listener(self): # The checksum matches the data so split the response into each piece of data. # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters data = struct.unpack('>BffffH', response) - timestamps = np.array([data[4], data[1], data[2], data[3] ]) + timestamps = [data[4], data[1], data[2], data[3] ] print timestamps return timestamps else: @@ -140,10 +142,10 @@ class EchoLocator(object): def __init__(self, hydrophone_locations, c): # speed in m/s self.hydrophone_locations = np.array([1, 1, 1]) # just for apending, will not be included self.c = c + self.hydrophone_location = [] for key in hydrophone_locations: sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) - self.hydrophone_locations = np.vstack((self.hydrophone_locations, sensor_location)) - self.hydrophone_locations = self.hydrophone_locations[1:] + self.hydrophone_locations += sensor_location alarm_broadcaster = AlarmBroadcaster() self.locate_pulse_error_alarm = alarm_broadcaster.add_alarm( @@ -156,86 +158,89 @@ def getPulseLocation(self, timestamps): ''' Returns a ros message with the location and time of emission of a pinger pulse. ''' - assert timestamps.size == self.hydrophone_locations.shape[0] - self.timestamps = timestamps - init_guess = np.array([1, 1, 1]) - opt = {'disp': 1} - opt_method = 'Powell' - result = optimize.minimize(self._getCost, init_guess, method=opt_method, options=opt, tol=1e-15) - print result.message - resp_data = SonarResponse() - if(result.success): - resp_data.x = result.x[0] - resp_data.y = result.x[1] - resp_data.z = result.x[2] - else: - resp_data.x = 0 - resp_data.y = 0 - resp_data.z = 0 - self.locate_pulse_error_alarm.raise_alarm( - problem_description=("SciPy optimize, using method '" + opt_method - + "', failed to converge on a pinger pulse location."), - parameters={ - 'fault_info': {'data': result.message} - } - ) - return resp_data - - def _getCost(self, potential_pulse): - ''' - Compares the difference in observed and theoretical difference in time of arrival - between the hydrophones and the reference hydrophone for potential source origins. - - Note: when there are 4 timestamps (not including reference), this cost is convex - SciPy Optimize will converge on the correct source origin. - With only 3 time stamps, minimization methods will not convrge to the correct - result, however, repeating the process for the source in the same location, - all of the results from minimizing this cost lie on a single 3D line - ''' - cost = 0 - t = self.timestamps - x0 = self.hydrophone_locations[0,0] - y0 = self.hydrophone_locations[0,1] - z0 = self.hydrophone_locations[0,2] - x = potential_pulse[0] - y = potential_pulse[1] - z = potential_pulse[2] - d0 = np.sqrt((x0 - x)**2 + (y0 - x)**2 + (z0 - x)**2) - for i in xrange(1, self.hydrophone_locations.shape[0]): - xi = self.hydrophone_locations[i,0] - yi = self.hydrophone_locations[i,1] - zi = self.hydrophone_locations[i,2] - di = np.sqrt((xi - x)**2 + (yi - x)**2 + (zi - x)**2) - hydro_i_cost = (di - d0 - self.c * t[i])**2 - cost = cost + hydro_i_cost - return cost - -def testFile(filename): - ''' - Runs the multilateration algorithm on timestamps written to a file in the following format: - [ ref_tstamp tstamp1 tstamp2 tstamp3 ] - lines that do not begin with '[' are ignored - ''' - hydrophone_locations = rospy.get_param('~/sonar_driver/hydrophones') #DBG - locator = EchoLocator(hydrophone_locations, 1484) - with open(filename, "r") as data_file: - for line in data_file: - if line[0] == '[': - words =line.split() - timestamps = [] - for word in words: - if word[0] != "[" and word[0] != "]": - timestamps += [eval(word)] - print locator.getPulseLocation(np.array(timestamps)), "\n" - -def delete_last_lines(n=1): - for _ in range(n): - sys.stdout.write(CURSOR_UP_ONE) - sys.stdout.write(ERASE_LINE) - + res = estimate_pos(reception_times, positions) + source = [x for x in res if x[2] < 0] # Assume that the source is below us + source = source[0] + response = SonarResponse() + response.x = source[0] + response.y = source[1] + response.z = source[2] + return response + +def quadratic(a, b, c): + discriminant = b*b - 4*a*c + if discriminant >= 0: + first_times_a = (-b+math.copysign(math.sqrt(discriminant), -b))/2 + return [first_times_a/a, c/first_times_a] + else: + return [] + +def estimate_pos(reception_times, positions): + assert len(positions) == len(reception_times) + + N = len(reception_times) + assert N >= 4 + + L = lambda a, b: a[0]*b[0] + a[1]*b[1] + a[2]*b[2] - a[3]*b[3] + + def get_B(delta): + B = np.zeros((N, 4)) + for i in xrange(N): + B[i] = np.concatenate([positions[i], [-reception_times[i]]]) + delta + return B + + delta = min([.1*np.random.randn(4) for i in xrange(10)], key=lambda delta: np.linalg.cond(get_B(delta))) + + B = get_B(delta) + a = np.array([0.5 * L(B[i], B[i]) for i in xrange(N)]) + e = np.ones(N) + + Bpe = np.linalg.lstsq(B, e)[0] + Bpa = np.linalg.lstsq(B, a)[0] + + Lambdas = quadratic( + L(Bpe, Bpe), + 2*(L(Bpa, Bpe) - 1), + L(Bpa, Bpa)) + if not Lambdas: return [] + + res = [] + for Lambda in Lambdas: + u = Bpa + Lambda * Bpe + position = u[:3] - delta[:3] + time = u[3] + delta[3] + + if any(reception_times[i] < time for i in xrange(N)): continue + + res.append(position) + + return res if __name__ == "__main__": d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", 19200) - # testFile("/home/santiago/bags/SonarTestData.txt") + + +# @apply +# def main(): +# np.random.seed(1) + +# r = 0.0254 +# positions = map(np.array, [(0, +r, 0), (-r, 0, 0), (r, 0, 0), (0, -r, 0)]) + +# def valid(correct, solution): +# return np.linalg.norm(correct - solution) < 1e-2 + +# while True: +# test_pos = 10*np.random.randn(3) +# test_time = 10*np.random.randn() + +# reception_times = [test_time + np.linalg.norm(p - test_pos) for p in positions] +# print 'problem:', test_pos, test_time +# res = estimate_pos(reception_times, positions) +# assert res +# good = any(valid(test_pos, x) for x in res) +# print 'result:', res +# assert good +# print \ No newline at end of file diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py new file mode 100755 index 00000000..8b459c75 --- /dev/null +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py @@ -0,0 +1,241 @@ +#!/usr/bin/env python +import rospy +import rosparam + +import numpy as np +from scipy import optimize + +from sub8_msgs.srv import Sonar, SonarResponse +from sub8_ros_tools import thread_lock, make_header +from sub8_alarm import AlarmBroadcaster + +import threading +import serial +import binascii +import struct +import time +import crc16 + +lock = threading.Lock() + +import sys +CURSOR_UP_ONE = '\x1b[1A' +ERASE_LINE = '\x1b[2K' + +class Sub8Sonar(): + ''' + Smart sensor that provides high level ROS code with the location of pinger pulses detected with + Jake Easterling's Hydrophone board. + * Adapted from the actuator driver. + + TODO: Add a function to try and reconnect to the serial port if we lose connection. + TODO: Express pulse location in map frame + ''' + def __init__(self, hydrophone_locations, port, baud=9600): + rospy.init_node("sonar") + + alarm_broadcaster = AlarmBroadcaster() + self.disconnection_alarm = alarm_broadcaster.add_alarm( + name='sonar_disconnect', + action_required=True, + severity=0 + ) + self.packet_error_alarm = alarm_broadcaster.add_alarm( + name='sonar_packet_error', + action_required=False, + severity=2 + ) + + try: + self.ser = serial.Serial(port=port, baudrate=baud, timeout=None) + self.ser.flushInput() + except Exception, e: + print "\x1b[31mSonar serial connection error:\n\t", e, "\x1b[0m" + return None + + self.hydrophone_locations = hydrophone_locations + self.sonar_sensor = EchoLocator(hydrophone_locations, c=1484) # speed of sound in m/s + + rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) + rospy.spin() + + def listener(self): + ''' + Parse the response of the board. + ''' + print "Listening..." + + # We've found a packet now disect it. + response = self.ser.read(19) + # rospy.loginfo("Received: %s" % response) #uncomment for debugging + if self.check_CRC(response): + delete_last_lines(2) # Heard response! + # The checksum matches the data so split the response into each piece of data. + # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters + data = struct.unpack('>BffffH', response) + timestamps = np.array([data[4], data[1], data[2], data[3] ]) + print timestamps + return timestamps + else: + self.packet_error_alarm.raise_alarm( + problem_description="Sonar board checksum error.", + parameters={ + 'fault_info': {'data': response} + } + ) + return None + + @thread_lock(lock) + def request_data(self, srv): + ''' + A polling packet consists of only a header and checksum (CRC-16): + HEADER CHECKSUM + [ 0x41 | ] + ''' + self.ser.flushInput() + + message = struct.pack('B', 0x41) + message += self.CRC(message) + + # rospy.loginfo("Writing: %s" % binascii.hexlify(message)) # uncomment for debugging + + try: + self.ser.write('A') + print "Sent request..." + except: # Except only serial errors in the future. + self.disconnection_alarm.raise_alarm( + problem_description="Sonar board serial connection has been terminated." + ) + return False + return self.sonar_sensor.getPulseLocation(self.listener()) + + def CRC(self, message): + # You may have to change the checksum type. + # Check the crc16 module online to see how to do that. + crc = crc16.crc16xmodem(message, 0xFFFF) + return struct.pack('>H', crc) + + def check_CRC(self, message): + ''' + Given a message with a checksum as the last two bytes, this will return True or False + if the checksum matches the given message. + ''' + msg_checksum = message[-2:] + raw_message = message[:-2] + crc = crc16.crc16xmodem(raw_message, 0xFFFF) + + # If the two match the message was correct + if crc == struct.unpack('>H', msg_checksum)[0]: + return True + else: + return False + +class EchoLocator(object): + ''' + Identifies the origin of a pulse in time and space given stamps of the time of detection of + the pulse by individual sensors. + c is the wave propagation speed in the medium of operation + ''' + # hydrophone locations should be the dict returned by rospy.get_param('~//hydrophones + def __init__(self, hydrophone_locations, c): # speed in m/s + self.hydrophone_locations = np.array([1, 1, 1]) # just for apending, will not be included + self.c = c + for key in hydrophone_locations: + sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) + self.hydrophone_locations = np.vstack((self.hydrophone_locations, sensor_location)) + self.hydrophone_locations = self.hydrophone_locations[1:] + + alarm_broadcaster = AlarmBroadcaster() + self.locate_pulse_error_alarm = alarm_broadcaster.add_alarm( + name='sonar_pulse_locating_error', + action_required=False, + severity=2 + ) + + def getPulseLocation(self, timestamps): + ''' + Returns a ros message with the location and time of emission of a pinger pulse. + ''' + assert timestamps.size == self.hydrophone_locations.shape[0] + self.timestamps = timestamps + init_guess = np.array([1, 1, 1]) + opt = {'disp': 1} + opt_method = 'Powell' + result = optimize.minimize(self._getCost, init_guess, method=opt_method, options=opt, tol=1e-15) + print result.message + resp_data = SonarResponse() + if(result.success): + resp_data.x = result.x[0] + resp_data.y = result.x[1] + resp_data.z = result.x[2] + else: + resp_data.x = 0 + resp_data.y = 0 + resp_data.z = 0 + self.locate_pulse_error_alarm.raise_alarm( + problem_description=("SciPy optimize, using method '" + opt_method + + "', failed to converge on a pinger pulse location."), + parameters={ + 'fault_info': {'data': result.message} + } + ) + return resp_data + + def _getCost(self, potential_pulse): + ''' + Compares the difference in observed and theoretical difference in time of arrival + between the hydrophones and the reference hydrophone for potential source origins. + + Note: when there are 4 timestamps (not including reference), this cost is convex + SciPy Optimize will converge on the correct source origin. + With only 3 time stamps, minimization methods will not convrge to the correct + result, however, repeating the process for the source in the same location, + all of the results from minimizing this cost lie on a single 3D line + ''' + cost = 0 + t = self.timestamps + x0 = self.hydrophone_locations[0,0] + y0 = self.hydrophone_locations[0,1] + z0 = self.hydrophone_locations[0,2] + x = potential_pulse[0] + y = potential_pulse[1] + z = potential_pulse[2] + d0 = np.sqrt((x0 - x)**2 + (y0 - x)**2 + (z0 - x)**2) + for i in xrange(1, self.hydrophone_locations.shape[0]): + xi = self.hydrophone_locations[i,0] + yi = self.hydrophone_locations[i,1] + zi = self.hydrophone_locations[i,2] + di = np.sqrt((xi - x)**2 + (yi - x)**2 + (zi - x)**2) + hydro_i_cost = (di - d0 - self.c * t[i])**2 + cost = cost + hydro_i_cost + return cost + +def testFile(filename): + ''' + Runs the multilateration algorithm on timestamps written to a file in the following format: + [ ref_tstamp tstamp1 tstamp2 tstamp3 ] + lines that do not begin with '[' are ignored + ''' + hydrophone_locations = rospy.get_param('~/sonar_driver/hydrophones') #DBG + locator = EchoLocator(hydrophone_locations, 1484) + with open(filename, "r") as data_file: + for line in data_file: + if line[0] == '[': + words =line.split() + timestamps = [] + for word in words: + if word[0] != "[" and word[0] != "]": + timestamps += [eval(word)] + print locator.getPulseLocation(np.array(timestamps)), "\n" + +def delete_last_lines(n=1): + for _ in range(n): + sys.stdout.write(CURSOR_UP_ONE) + sys.stdout.write(ERASE_LINE) + + +if __name__ == "__main__": + d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), + "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", + 19200) + # testFile("/home/santiago/bags/SonarTestData.txt") From fbec4cb15b48c258794b60632d4524f6c1d8c08d Mon Sep 17 00:00:00 2001 From: DSsoto Date: Thu, 30 Jun 2016 23:48:24 -0400 Subject: [PATCH 067/267] SONAR: fix issues with plotting pulses real-time --- drivers/sub8_sonar/sub8_sonar/sonar_driver.py | 25 +++++++++++++------ 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py index 543dc039..54a77eff 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py @@ -59,6 +59,7 @@ def __init__(self, hydrophone_locations, port, baud=9600): self.sonar_sensor = EchoLocator(hydrophone_locations, c=1484) # speed of sound in m/s rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) + print "Sub8 sonar driver initialized\nMultilateration algorithm: bancroft" rospy.spin() def listener(self): @@ -140,12 +141,11 @@ class EchoLocator(object): ''' # hydrophone locations should be the dict returned by rospy.get_param('~//hydrophones def __init__(self, hydrophone_locations, c): # speed in m/s - self.hydrophone_locations = np.array([1, 1, 1]) # just for apending, will not be included self.c = c - self.hydrophone_location = [] + self.hydrophone_locations = [] for key in hydrophone_locations: sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) - self.hydrophone_locations += sensor_location + self.hydrophone_locations += [sensor_location] alarm_broadcaster = AlarmBroadcaster() self.locate_pulse_error_alarm = alarm_broadcaster.add_alarm( @@ -158,9 +158,15 @@ def getPulseLocation(self, timestamps): ''' Returns a ros message with the location and time of emission of a pinger pulse. ''' - res = estimate_pos(reception_times, positions) - source = [x for x in res if x[2] < 0] # Assume that the source is below us - source = source[0] + res = estimate_pos(timestamps, self.hydrophone_locations) + print res + if len(res) == 1: + source = res[0] + elif len(res) == 2: + source = [x for x in res if x[2] < 0] # Assume that the source is below us + else: + source = [None, None, None] + print source response = SonarResponse() response.x = source[0] response.y = source[1] @@ -209,13 +215,16 @@ def get_B(delta): u = Bpa + Lambda * Bpe position = u[:3] - delta[:3] time = u[3] + delta[3] - if any(reception_times[i] < time for i in xrange(N)): continue - res.append(position) return res +def delete_last_lines(n=1): + for _ in range(n): + sys.stdout.write(CURSOR_UP_ONE) + sys.stdout.write(ERASE_LINE) + if __name__ == "__main__": d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", From 998295366efb33705dec54d0c9315c48c5e3d6d2 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Fri, 1 Jul 2016 09:29:40 -0400 Subject: [PATCH 068/267] SONAR: express hydrophone coordinates in millimeters for greater numerical precision --- drivers/sub8_sonar/launch/test.launch | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/drivers/sub8_sonar/launch/test.launch b/drivers/sub8_sonar/launch/test.launch index 1d57065a..ee8abe7b 100644 --- a/drivers/sub8_sonar/launch/test.launch +++ b/drivers/sub8_sonar/launch/test.launch @@ -1,10 +1,19 @@ + - { hydro0: {x: 0, y: 0, z: 0}, - hydro1: {x: -0.0254, y: 0, z: 0.0254}, - hydro2: {x: 0.0254, y: 0, z: 0}, - hydro3: {x: 0, y: -0.0254, z: 0} } + + { hydro0: {x: 0, y: 0, z: 0}, + hydro1: {x: -25.4, y: 0, z: 25.4}, + hydro2: {x: 25.4, y: 0, z: 0}, + hydro3: {x: 0, y: -25.4, z: 0} } + + \ No newline at end of file From 52765b3d6be36bee4345dda1d65c6d2dbabb85b8 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Fri, 1 Jul 2016 09:32:33 -0400 Subject: [PATCH 069/267] SONAR: rename old least squares multilateration driver --- drivers/sub8_sonar/sub8_sonar/__init__.py | 3 ++- .../sub8_sonar/sub8_sonar/sonar_driver_least_squares.py | 7 ++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/drivers/sub8_sonar/sub8_sonar/__init__.py b/drivers/sub8_sonar/sub8_sonar/__init__.py index 772682e0..e57f8437 100644 --- a/drivers/sub8_sonar/sub8_sonar/__init__.py +++ b/drivers/sub8_sonar/sub8_sonar/__init__.py @@ -1 +1,2 @@ -from sonar_driver import EchoLocator \ No newline at end of file +from sonar_driver import EchoLocator +from sonar_driver_least_squares import EchoLocatorLS \ No newline at end of file diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py index 8b459c75..b4f3f35f 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py @@ -54,7 +54,7 @@ def __init__(self, hydrophone_locations, port, baud=9600): return None self.hydrophone_locations = hydrophone_locations - self.sonar_sensor = EchoLocator(hydrophone_locations, c=1484) # speed of sound in m/s + self.sonar_sensor = EchoLocatorLS(hydrophone_locations, c=1484) # speed of sound in m/s rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) rospy.spin() @@ -130,7 +130,7 @@ def check_CRC(self, message): else: return False -class EchoLocator(object): +class EchoLocatorLS(object): ''' Identifies the origin of a pulse in time and space given stamps of the time of detection of the pulse by individual sensors. @@ -151,6 +151,7 @@ def __init__(self, hydrophone_locations, c): # speed in m/s action_required=False, severity=2 ) + print "David: least-squares" def getPulseLocation(self, timestamps): ''' @@ -217,7 +218,7 @@ def testFile(filename): lines that do not begin with '[' are ignored ''' hydrophone_locations = rospy.get_param('~/sonar_driver/hydrophones') #DBG - locator = EchoLocator(hydrophone_locations, 1484) + locator = EchoLocatorLS(hydrophone_locations, 1484) with open(filename, "r") as data_file: for line in data_file: if line[0] == '[': From 7103b43408c0d83e6f706fd4ec7dc3d780c12cd0 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Fri, 1 Jul 2016 09:38:38 -0400 Subject: [PATCH 070/267] SONAR: adapt multilateration algorithm for varying wave speeds/mediums --- drivers/sub8_sonar/nodes/sonar_test.py | 13 +- drivers/sub8_sonar/sub8_sonar/sonar_driver.py | 137 ++++++++---------- 2 files changed, 69 insertions(+), 81 deletions(-) diff --git a/drivers/sub8_sonar/nodes/sonar_test.py b/drivers/sub8_sonar/nodes/sonar_test.py index b86c65f7..b6cb2b16 100755 --- a/drivers/sub8_sonar/nodes/sonar_test.py +++ b/drivers/sub8_sonar/nodes/sonar_test.py @@ -3,7 +3,9 @@ import rospy import rosparam import random -from sonar_driver import EchoLocator +from sub8_sonar import EchoLocator + +c = 1484 class SonarSim(object): def __init__(self, hydrophone_locations, wave_propagation_speed_mps): @@ -40,10 +42,10 @@ def print_red(obj): print '\x1b[31m' + obj.__repr__() + '\x1b[0m' hydrophone_locations = rospy.get_param('~/sonar_test/hydrophones') - sonar = SonarSim(hydrophone_locations, 1484) - locator = EchoLocator(hydrophone_locations, 1484) + sonar = SonarSim(hydrophone_locations, c) + locator = EchoLocator(hydrophone_locations, c) # pulses will be generated with x, y, z in range [-pulse_range, pulse_range + 1] - pulse_range = 10 + pulse_range = 1000000 rand_args = [-pulse_range, pulse_range + 1] for i in range(10): pulse = Pulse(random.randrange(*rand_args), @@ -53,5 +55,4 @@ def print_red(obj): tstamps = tstamps - tstamps[0] print_red(pulse) print "Simulated timestamps:\n\t", tstamps - response = locator.getPulseLocation(np.array(tstamps)) - print "Reconstructed Pulse:\n\t" + "x: " + str(response.x) + " y: " + str(response.y) + " z: " + str(response.z) \ No newline at end of file + response = locator.getPulseLocation(np.array(tstamps)) \ No newline at end of file diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py index 54a77eff..e7690893 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py @@ -24,6 +24,7 @@ CURSOR_UP_ONE = '\x1b[1A' ERASE_LINE = '\x1b[2K' + class Sub8Sonar(): ''' Smart sensor that provides high level ROS code with the location of pinger pulses detected with @@ -33,7 +34,7 @@ class Sub8Sonar(): TODO: Add a function to try and reconnect to the serial port if we lose connection. TODO: Express pulse location in map frame ''' - def __init__(self, hydrophone_locations, port, baud=9600): + def __init__(self, c, hydrophone_locations, port, baud=9600): rospy.init_node("sonar") alarm_broadcaster = AlarmBroadcaster() @@ -55,11 +56,12 @@ def __init__(self, hydrophone_locations, port, baud=9600): print "\x1b[31mSonar serial connection error:\n\t", e, "\x1b[0m" return None + self.c = c self.hydrophone_locations = hydrophone_locations - self.sonar_sensor = EchoLocator(hydrophone_locations, c=1484) # speed of sound in m/s + self.sonar_sensor = EchoLocator(hydrophone_locations, self.c) # speed of sound in m/s rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) - print "Sub8 sonar driver initialized\nMultilateration algorithm: bancroft" + print "\x1b[32mSub8 sonar driver initialized\x1b[0m" rospy.spin() def listener(self): @@ -77,7 +79,7 @@ def listener(self): # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters data = struct.unpack('>BffffH', response) timestamps = [data[4], data[1], data[2], data[3] ] - print timestamps + print "timestamps:", timestamps return timestamps else: self.packet_error_alarm.raise_alarm( @@ -135,9 +137,9 @@ def check_CRC(self, message): class EchoLocator(object): ''' - Identifies the origin of a pulse in time and space given stamps of the time of detection of - the pulse by individual sensors. - c is the wave propagation speed in the medium of operation + Identifies the origin location of a pulse given differential times of arrival to the individual + sensors. c is the wave speed in the medium of operation. + Hydrohone coordinates are expected in millimeters, result will be given in millimeters. ''' # hydrophone locations should be the dict returned by rospy.get_param('~//hydrophones def __init__(self, hydrophone_locations, c): # speed in m/s @@ -153,26 +155,72 @@ def __init__(self, hydrophone_locations, c): # speed in m/s action_required=False, severity=2 ) + print "\x1b[32mMultilateration algorithm: bancroft" + print "Speed of Sound (c):", self.c, "m/s\x1b[0m" def getPulseLocation(self, timestamps): ''' Returns a ros message with the location and time of emission of a pinger pulse. ''' - res = estimate_pos(timestamps, self.hydrophone_locations) - print res + res = self.estimate_pos(timestamps, self.hydrophone_locations) + # print "bancroft output:", res if len(res) == 1: source = res[0] elif len(res) == 2: source = [x for x in res if x[2] < 0] # Assume that the source is below us + if not source: + source = res[0] + else: + source = source[0] else: - source = [None, None, None] - print source + source = [0, 0, 0] + response = SonarResponse() response.x = source[0] response.y = source[1] response.z = source[2] + print "Reconstructed Pulse:\n\t" + "x: " + str(response.x) + " y: " + str(response.y) + " z: " + str(response.z) return response + def estimate_pos(self, reception_times, positions): + assert len(positions) == len(reception_times) + + N = len(reception_times) + assert N >= 4 + + L = lambda a, b: a[0]*b[0] + a[1]*b[1] + a[2]*b[2] - a[3]*b[3] + + def get_B(delta): + B = np.zeros((N, 4)) + for i in xrange(N): + B[i] = np.concatenate([positions[i]/(self.c), [-reception_times[i]]]) + delta + return B + + delta = min([.1*np.random.randn(4) for i in xrange(10)], key=lambda delta: np.linalg.cond(get_B(delta))) + + B = get_B(delta) + a = np.array([0.5 * L(B[i], B[i]) for i in xrange(N)]) + e = np.ones(N) + + Bpe = np.linalg.lstsq(B, e)[0] + Bpa = np.linalg.lstsq(B, a)[0] + + Lambdas = quadratic( + L(Bpe, Bpe), + 2*(L(Bpa, Bpe) - 1), + L(Bpa, Bpa)) + if not Lambdas: return [] + + res = [] + for Lambda in Lambdas: + u = Bpa + Lambda * Bpe + position = u[:3] - delta[:3] + time = u[3] + delta[3] + if any(reception_times[i] < time for i in xrange(N)): continue + res.append(position*self.c) + + return res + def quadratic(a, b, c): discriminant = b*b - 4*a*c if discriminant >= 0: @@ -181,44 +229,7 @@ def quadratic(a, b, c): else: return [] -def estimate_pos(reception_times, positions): - assert len(positions) == len(reception_times) - - N = len(reception_times) - assert N >= 4 - - L = lambda a, b: a[0]*b[0] + a[1]*b[1] + a[2]*b[2] - a[3]*b[3] - - def get_B(delta): - B = np.zeros((N, 4)) - for i in xrange(N): - B[i] = np.concatenate([positions[i], [-reception_times[i]]]) + delta - return B - - delta = min([.1*np.random.randn(4) for i in xrange(10)], key=lambda delta: np.linalg.cond(get_B(delta))) - - B = get_B(delta) - a = np.array([0.5 * L(B[i], B[i]) for i in xrange(N)]) - e = np.ones(N) - - Bpe = np.linalg.lstsq(B, e)[0] - Bpa = np.linalg.lstsq(B, a)[0] - - Lambdas = quadratic( - L(Bpe, Bpe), - 2*(L(Bpa, Bpe) - 1), - L(Bpa, Bpa)) - if not Lambdas: return [] - - res = [] - for Lambda in Lambdas: - u = Bpa + Lambda * Bpe - position = u[:3] - delta[:3] - time = u[3] + delta[3] - if any(reception_times[i] < time for i in xrange(N)): continue - res.append(position) - - return res + def delete_last_lines(n=1): for _ in range(n): @@ -226,30 +237,6 @@ def delete_last_lines(n=1): sys.stdout.write(ERASE_LINE) if __name__ == "__main__": - d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), + d = Sub8Sonar(1484, rospy.get_param('~/sonar_driver/hydrophones'), "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", - 19200) - - -# @apply -# def main(): -# np.random.seed(1) - -# r = 0.0254 -# positions = map(np.array, [(0, +r, 0), (-r, 0, 0), (r, 0, 0), (0, -r, 0)]) - -# def valid(correct, solution): -# return np.linalg.norm(correct - solution) < 1e-2 - -# while True: -# test_pos = 10*np.random.randn(3) -# test_time = 10*np.random.randn() - -# reception_times = [test_time + np.linalg.norm(p - test_pos) for p in positions] -# print 'problem:', test_pos, test_time -# res = estimate_pos(reception_times, positions) -# assert res -# good = any(valid(test_pos, x) for x in res) -# print 'result:', res -# assert good -# print \ No newline at end of file + 19200) \ No newline at end of file From 226d450d7daf7a2580473ce728e42285d7840d3c Mon Sep 17 00:00:00 2001 From: DSsoto Date: Mon, 4 Jul 2016 02:58:39 -0400 Subject: [PATCH 071/267] SONAR: delete separate least-squares sonar driver and integrate it with new one --- drivers/sub8_sonar/sub8_sonar/__init__.py | 2 +- .../sub8_sonar/sonar_driver_least_squares.py | 242 ------------------ 2 files changed, 1 insertion(+), 243 deletions(-) delete mode 100755 drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py diff --git a/drivers/sub8_sonar/sub8_sonar/__init__.py b/drivers/sub8_sonar/sub8_sonar/__init__.py index e57f8437..c9803fdd 100644 --- a/drivers/sub8_sonar/sub8_sonar/__init__.py +++ b/drivers/sub8_sonar/sub8_sonar/__init__.py @@ -1,2 +1,2 @@ from sonar_driver import EchoLocator -from sonar_driver_least_squares import EchoLocatorLS \ No newline at end of file +from sonar_driver import EchoLocatorLS \ No newline at end of file diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py deleted file mode 100755 index b4f3f35f..00000000 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver_least_squares.py +++ /dev/null @@ -1,242 +0,0 @@ -#!/usr/bin/env python -import rospy -import rosparam - -import numpy as np -from scipy import optimize - -from sub8_msgs.srv import Sonar, SonarResponse -from sub8_ros_tools import thread_lock, make_header -from sub8_alarm import AlarmBroadcaster - -import threading -import serial -import binascii -import struct -import time -import crc16 - -lock = threading.Lock() - -import sys -CURSOR_UP_ONE = '\x1b[1A' -ERASE_LINE = '\x1b[2K' - -class Sub8Sonar(): - ''' - Smart sensor that provides high level ROS code with the location of pinger pulses detected with - Jake Easterling's Hydrophone board. - * Adapted from the actuator driver. - - TODO: Add a function to try and reconnect to the serial port if we lose connection. - TODO: Express pulse location in map frame - ''' - def __init__(self, hydrophone_locations, port, baud=9600): - rospy.init_node("sonar") - - alarm_broadcaster = AlarmBroadcaster() - self.disconnection_alarm = alarm_broadcaster.add_alarm( - name='sonar_disconnect', - action_required=True, - severity=0 - ) - self.packet_error_alarm = alarm_broadcaster.add_alarm( - name='sonar_packet_error', - action_required=False, - severity=2 - ) - - try: - self.ser = serial.Serial(port=port, baudrate=baud, timeout=None) - self.ser.flushInput() - except Exception, e: - print "\x1b[31mSonar serial connection error:\n\t", e, "\x1b[0m" - return None - - self.hydrophone_locations = hydrophone_locations - self.sonar_sensor = EchoLocatorLS(hydrophone_locations, c=1484) # speed of sound in m/s - - rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) - rospy.spin() - - def listener(self): - ''' - Parse the response of the board. - ''' - print "Listening..." - - # We've found a packet now disect it. - response = self.ser.read(19) - # rospy.loginfo("Received: %s" % response) #uncomment for debugging - if self.check_CRC(response): - delete_last_lines(2) # Heard response! - # The checksum matches the data so split the response into each piece of data. - # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters - data = struct.unpack('>BffffH', response) - timestamps = np.array([data[4], data[1], data[2], data[3] ]) - print timestamps - return timestamps - else: - self.packet_error_alarm.raise_alarm( - problem_description="Sonar board checksum error.", - parameters={ - 'fault_info': {'data': response} - } - ) - return None - - @thread_lock(lock) - def request_data(self, srv): - ''' - A polling packet consists of only a header and checksum (CRC-16): - HEADER CHECKSUM - [ 0x41 | ] - ''' - self.ser.flushInput() - - message = struct.pack('B', 0x41) - message += self.CRC(message) - - # rospy.loginfo("Writing: %s" % binascii.hexlify(message)) # uncomment for debugging - - try: - self.ser.write('A') - print "Sent request..." - except: # Except only serial errors in the future. - self.disconnection_alarm.raise_alarm( - problem_description="Sonar board serial connection has been terminated." - ) - return False - return self.sonar_sensor.getPulseLocation(self.listener()) - - def CRC(self, message): - # You may have to change the checksum type. - # Check the crc16 module online to see how to do that. - crc = crc16.crc16xmodem(message, 0xFFFF) - return struct.pack('>H', crc) - - def check_CRC(self, message): - ''' - Given a message with a checksum as the last two bytes, this will return True or False - if the checksum matches the given message. - ''' - msg_checksum = message[-2:] - raw_message = message[:-2] - crc = crc16.crc16xmodem(raw_message, 0xFFFF) - - # If the two match the message was correct - if crc == struct.unpack('>H', msg_checksum)[0]: - return True - else: - return False - -class EchoLocatorLS(object): - ''' - Identifies the origin of a pulse in time and space given stamps of the time of detection of - the pulse by individual sensors. - c is the wave propagation speed in the medium of operation - ''' - # hydrophone locations should be the dict returned by rospy.get_param('~//hydrophones - def __init__(self, hydrophone_locations, c): # speed in m/s - self.hydrophone_locations = np.array([1, 1, 1]) # just for apending, will not be included - self.c = c - for key in hydrophone_locations: - sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) - self.hydrophone_locations = np.vstack((self.hydrophone_locations, sensor_location)) - self.hydrophone_locations = self.hydrophone_locations[1:] - - alarm_broadcaster = AlarmBroadcaster() - self.locate_pulse_error_alarm = alarm_broadcaster.add_alarm( - name='sonar_pulse_locating_error', - action_required=False, - severity=2 - ) - print "David: least-squares" - - def getPulseLocation(self, timestamps): - ''' - Returns a ros message with the location and time of emission of a pinger pulse. - ''' - assert timestamps.size == self.hydrophone_locations.shape[0] - self.timestamps = timestamps - init_guess = np.array([1, 1, 1]) - opt = {'disp': 1} - opt_method = 'Powell' - result = optimize.minimize(self._getCost, init_guess, method=opt_method, options=opt, tol=1e-15) - print result.message - resp_data = SonarResponse() - if(result.success): - resp_data.x = result.x[0] - resp_data.y = result.x[1] - resp_data.z = result.x[2] - else: - resp_data.x = 0 - resp_data.y = 0 - resp_data.z = 0 - self.locate_pulse_error_alarm.raise_alarm( - problem_description=("SciPy optimize, using method '" + opt_method - + "', failed to converge on a pinger pulse location."), - parameters={ - 'fault_info': {'data': result.message} - } - ) - return resp_data - - def _getCost(self, potential_pulse): - ''' - Compares the difference in observed and theoretical difference in time of arrival - between the hydrophones and the reference hydrophone for potential source origins. - - Note: when there are 4 timestamps (not including reference), this cost is convex - SciPy Optimize will converge on the correct source origin. - With only 3 time stamps, minimization methods will not convrge to the correct - result, however, repeating the process for the source in the same location, - all of the results from minimizing this cost lie on a single 3D line - ''' - cost = 0 - t = self.timestamps - x0 = self.hydrophone_locations[0,0] - y0 = self.hydrophone_locations[0,1] - z0 = self.hydrophone_locations[0,2] - x = potential_pulse[0] - y = potential_pulse[1] - z = potential_pulse[2] - d0 = np.sqrt((x0 - x)**2 + (y0 - x)**2 + (z0 - x)**2) - for i in xrange(1, self.hydrophone_locations.shape[0]): - xi = self.hydrophone_locations[i,0] - yi = self.hydrophone_locations[i,1] - zi = self.hydrophone_locations[i,2] - di = np.sqrt((xi - x)**2 + (yi - x)**2 + (zi - x)**2) - hydro_i_cost = (di - d0 - self.c * t[i])**2 - cost = cost + hydro_i_cost - return cost - -def testFile(filename): - ''' - Runs the multilateration algorithm on timestamps written to a file in the following format: - [ ref_tstamp tstamp1 tstamp2 tstamp3 ] - lines that do not begin with '[' are ignored - ''' - hydrophone_locations = rospy.get_param('~/sonar_driver/hydrophones') #DBG - locator = EchoLocatorLS(hydrophone_locations, 1484) - with open(filename, "r") as data_file: - for line in data_file: - if line[0] == '[': - words =line.split() - timestamps = [] - for word in words: - if word[0] != "[" and word[0] != "]": - timestamps += [eval(word)] - print locator.getPulseLocation(np.array(timestamps)), "\n" - -def delete_last_lines(n=1): - for _ in range(n): - sys.stdout.write(CURSOR_UP_ONE) - sys.stdout.write(ERASE_LINE) - - -if __name__ == "__main__": - d = Sub8Sonar(rospy.get_param('~/sonar_driver/hydrophones'), - "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", - 19200) - # testFile("/home/santiago/bags/SonarTestData.txt") From 546182b0c2c58228b2fb07e0498355cf85506998 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Mon, 4 Jul 2016 02:59:29 -0400 Subject: [PATCH 072/267] SONAR: add 'Y' hydrophone configuration --- drivers/sub8_sonar/launch/test.launch | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/drivers/sub8_sonar/launch/test.launch b/drivers/sub8_sonar/launch/test.launch index ee8abe7b..4d4a5432 100644 --- a/drivers/sub8_sonar/launch/test.launch +++ b/drivers/sub8_sonar/launch/test.launch @@ -1,19 +1,20 @@ + - - + - + + + { hydro0: {x: 0, y: 0, z: 0}, + hydro1: {x: 0, y: 25.4, z: 0}, + hydro2: {x: -22, y: -12.7, z: 0}, + hydro3: {x: 22, y: -12.7, z: 0}} + \ No newline at end of file From 0cc8add9de57b3d83fb1ab1c951bf8a8f4d85881 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Mon, 4 Jul 2016 03:01:57 -0400 Subject: [PATCH 073/267] SONAR: simulate and analyze effects of gaussian noise on timestamps --- drivers/sub8_sonar/nodes/sonar_test.py | 86 +++++++++++++++++++++----- 1 file changed, 69 insertions(+), 17 deletions(-) diff --git a/drivers/sub8_sonar/nodes/sonar_test.py b/drivers/sub8_sonar/nodes/sonar_test.py index b6cb2b16..1b20b909 100755 --- a/drivers/sub8_sonar/nodes/sonar_test.py +++ b/drivers/sub8_sonar/nodes/sonar_test.py @@ -1,11 +1,15 @@ #!/usr/bin/env python +from __future__ import division import numpy as np +import numpy.linalg as la +from scipy import optimize import rospy import rosparam import random -from sub8_sonar import EchoLocator +from sub8_sonar import EchoLocator, EchoLocatorLS +import sys -c = 1484 +c = 1.484 # millimeters/microsecond class SonarSim(object): def __init__(self, hydrophone_locations, wave_propagation_speed_mps): @@ -34,25 +38,73 @@ def position(self): return np.array([self.x, self.y, self.z]) def __repr__(self): - return "Pulse:\t" + "x: " + str(self.x) + " y: " + str(self.y) + " z: " + str(self.z) + return "Pulse:\t" + "x: " + str(self.x) + " y: " + str(self.y) + " z: " \ + + str(self.z) + " (mm)" if __name__ == '__main__': - def print_red(obj): - print '\x1b[31m' + obj.__repr__() + '\x1b[0m' + def print_green(str): + print '\x1b[32m' + str + '\x1b[0m' + + def error(obs, exp): + # Interesting, faster, but not as accurate + # alpha = np.arccos(np.clip(np.dot(obs/la.norm(obs),exp/la.norm(exp)),-1,1))*180/np.pi + alpha = 2*np.arctan2(la.norm(la.norm(exp)*obs-la.norm(obs)*exp),la.norm(la.norm(exp)*obs+la.norm(obs)*exp)) + mag_error = 100 * (la.norm(obs) - la.norm(exp)) / la.norm(exp) + return ('\x1b[31m' if (mag_error == -100) else "") + ("Errors: directional=" + str(alpha) + "deg").ljust(42) \ + + ("magnitude=" + str(mag_error) + "%").ljust(20) + + def delete_last_lines(n=1): + CURSOR_UP_ONE = '\x1b[1A' + ERASE_LINE = '\x1b[2K' + for _ in range(n): + sys.stdout.write(CURSOR_UP_ONE) + sys.stdout.write(ERASE_LINE) hydrophone_locations = rospy.get_param('~/sonar_test/hydrophones') sonar = SonarSim(hydrophone_locations, c) locator = EchoLocator(hydrophone_locations, c) - # pulses will be generated with x, y, z in range [-pulse_range, pulse_range + 1] - pulse_range = 1000000 - rand_args = [-pulse_range, pulse_range + 1] - for i in range(10): - pulse = Pulse(random.randrange(*rand_args), - random.randrange(*rand_args), - random.randrange(*rand_args), 0) - tstamps = sonar.listen(pulse) - tstamps = tstamps - tstamps[0] - print_red(pulse) - print "Simulated timestamps:\n\t", tstamps - response = locator.getPulseLocation(np.array(tstamps)) \ No newline at end of file + + # # Simulate individual pulses (Debugging Jakes Board) + # pulse = Pulse(-5251, -7620, 1470, 0) + # tstamps = sonar.listen(pulse) + # tstamps = tstamps - tstamps[0] + # print_green(pulse.__repr__()) + # print "Perfect timestamps: (microseconds)\n\t", tstamps + # res_msg = locator.getPulseLocation(np.array(tstamps)) + # res = np.array([res_msg.x, res_msg.y, res_msg.z]) + # print "\t\x1b[33m".ljust(22) + error(res, pulse.position()) + "\x1b[0m" + + # pulses will be generated with inside a cube with side-length $(pulse_range) (mm) + for h in range(3,8): + # smallest cube will be a meter wide, largest will be 10 km wide + pulse_range = 10**h # in mm + rand_args = [-pulse_range, pulse_range + 1] + num_pulses = 10 + print "\n\x1b[1mGenerating " + str(num_pulses) + " pulses within a " \ + + str(2*pulse_range/1000) + " meters wide cube\x1b[0m\n" + + for i in range(num_pulses): + pulse = Pulse(random.randrange(*rand_args), + random.randrange(*rand_args), + random.randrange(*rand_args), 0) + tstamps = sonar.listen(pulse) + tstamps = tstamps - tstamps[0] + print_green(str(i).ljust(2) + str(pulse)) + print "Perfect timestamps: (microseconds)\n\t", tstamps + res_msg = locator.getPulseLocation(np.array(tstamps)) + delete_last_lines(0) + res = np.array([res_msg.x, res_msg.y, res_msg.z]) + print "\t\x1b[33m".ljust(22) + error(res, pulse.position()) + "\x1b[0m" + print "Progressively adding noise to timestamps..." + + for j in range(-5, 2): + sigma = 10**j + noisy_tstamps = [x + np.random.normal(0, sigma) for x in tstamps] + noisy_tstamps[0] = 0 + print "Noisy timestamps:\n\t", noisy_tstamps + res_msg = locator.getPulseLocation(np.array(noisy_tstamps)) + res = np.array([res_msg.x, res_msg.y, res_msg.z]) + delete_last_lines(4) + print "\t\x1b[33m" + ("sigma: " + str(sigma)).ljust(16) \ + + error(res, pulse.position()) + "\x1b[0m" \ No newline at end of file From e10247a2ab287b0a1c913db9675b687c2c26da16 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Mon, 4 Jul 2016 05:14:17 -0400 Subject: [PATCH 074/267] SONAR: consolidate separate bancroft and least-squares localizers into Multilaterator class --- drivers/sub8_sonar/nodes/sonar_test.py | 24 +-- drivers/sub8_sonar/sub8_sonar/__init__.py | 3 +- drivers/sub8_sonar/sub8_sonar/sonar_driver.py | 150 +++++++++++------- 3 files changed, 107 insertions(+), 70 deletions(-) diff --git a/drivers/sub8_sonar/nodes/sonar_test.py b/drivers/sub8_sonar/nodes/sonar_test.py index 1b20b909..0eed31ef 100755 --- a/drivers/sub8_sonar/nodes/sonar_test.py +++ b/drivers/sub8_sonar/nodes/sonar_test.py @@ -6,12 +6,12 @@ import rospy import rosparam import random -from sub8_sonar import EchoLocator, EchoLocatorLS +from sub8_sonar import Multilaterator import sys c = 1.484 # millimeters/microsecond -class SonarSim(object): +class ReceiverArraySim(object): def __init__(self, hydrophone_locations, wave_propagation_speed_mps): self.c = wave_propagation_speed_mps self.hydrophone_locations = np.array([0, 0, 0]) @@ -54,7 +54,7 @@ def error(obs, exp): return ('\x1b[31m' if (mag_error == -100) else "") + ("Errors: directional=" + str(alpha) + "deg").ljust(42) \ + ("magnitude=" + str(mag_error) + "%").ljust(20) - def delete_last_lines(n=1): + def delete_last_lines(n=0): CURSOR_UP_ONE = '\x1b[1A' ERASE_LINE = '\x1b[2K' for _ in range(n): @@ -62,16 +62,16 @@ def delete_last_lines(n=1): sys.stdout.write(ERASE_LINE) hydrophone_locations = rospy.get_param('~/sonar_test/hydrophones') - sonar = SonarSim(hydrophone_locations, c) - locator = EchoLocator(hydrophone_locations, c) + hydrophone_array = ReceiverArraySim(hydrophone_locations, c) + sonar = Multilaterator(hydrophone_locations, c, 'bancroft') # # Simulate individual pulses (Debugging Jakes Board) # pulse = Pulse(-5251, -7620, 1470, 0) - # tstamps = sonar.listen(pulse) + # tstamps = hydrophone_array.listen(pulse) # tstamps = tstamps - tstamps[0] # print_green(pulse.__repr__()) # print "Perfect timestamps: (microseconds)\n\t", tstamps - # res_msg = locator.getPulseLocation(np.array(tstamps)) + # res_msg = sonar.getPulseLocation(np.array(tstamps)) # res = np.array([res_msg.x, res_msg.y, res_msg.z]) # print "\t\x1b[33m".ljust(22) + error(res, pulse.position()) + "\x1b[0m" @@ -88,12 +88,12 @@ def delete_last_lines(n=1): pulse = Pulse(random.randrange(*rand_args), random.randrange(*rand_args), random.randrange(*rand_args), 0) - tstamps = sonar.listen(pulse) + tstamps = hydrophone_array.listen(pulse) tstamps = tstamps - tstamps[0] print_green(str(i).ljust(2) + str(pulse)) print "Perfect timestamps: (microseconds)\n\t", tstamps - res_msg = locator.getPulseLocation(np.array(tstamps)) - delete_last_lines(0) + res_msg = sonar.getPulseLocation(np.array(tstamps)) + delete_last_lines(4) # more concise output res = np.array([res_msg.x, res_msg.y, res_msg.z]) print "\t\x1b[33m".ljust(22) + error(res, pulse.position()) + "\x1b[0m" print "Progressively adding noise to timestamps..." @@ -103,8 +103,8 @@ def delete_last_lines(n=1): noisy_tstamps = [x + np.random.normal(0, sigma) for x in tstamps] noisy_tstamps[0] = 0 print "Noisy timestamps:\n\t", noisy_tstamps - res_msg = locator.getPulseLocation(np.array(noisy_tstamps)) + res_msg = sonar.getPulseLocation(np.array(noisy_tstamps)) res = np.array([res_msg.x, res_msg.y, res_msg.z]) - delete_last_lines(4) + delete_last_lines(4) # more concise output print "\t\x1b[33m" + ("sigma: " + str(sigma)).ljust(16) \ + error(res, pulse.position()) + "\x1b[0m" \ No newline at end of file diff --git a/drivers/sub8_sonar/sub8_sonar/__init__.py b/drivers/sub8_sonar/sub8_sonar/__init__.py index c9803fdd..84fb9960 100644 --- a/drivers/sub8_sonar/sub8_sonar/__init__.py +++ b/drivers/sub8_sonar/sub8_sonar/__init__.py @@ -1,2 +1 @@ -from sonar_driver import EchoLocator -from sonar_driver import EchoLocatorLS \ No newline at end of file +from sonar_driver import Multilaterator \ No newline at end of file diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py index e7690893..e51e5ced 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py @@ -1,8 +1,8 @@ #!/usr/bin/python - from __future__ import division import math import numpy as np +from scipy import optimize import rospy import rosparam @@ -18,12 +18,9 @@ import time import crc16 -lock = threading.Lock() - import sys -CURSOR_UP_ONE = '\x1b[1A' -ERASE_LINE = '\x1b[2K' +lock = threading.Lock() class Sub8Sonar(): ''' @@ -34,7 +31,7 @@ class Sub8Sonar(): TODO: Add a function to try and reconnect to the serial port if we lose connection. TODO: Express pulse location in map frame ''' - def __init__(self, c, hydrophone_locations, port, baud=9600): + def __init__(self, method, c, hydrophone_locations, port, baud=19200): rospy.init_node("sonar") alarm_broadcaster = AlarmBroadcaster() @@ -58,7 +55,7 @@ def __init__(self, c, hydrophone_locations, port, baud=9600): self.c = c self.hydrophone_locations = hydrophone_locations - self.sonar_sensor = EchoLocator(hydrophone_locations, self.c) # speed of sound in m/s + self.sonar_sensor = Multilaterator(hydrophone_locations, self.c, method) # speed of sound in m/s rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) print "\x1b[32mSub8 sonar driver initialized\x1b[0m" @@ -92,18 +89,8 @@ def listener(self): @thread_lock(lock) def request_data(self, srv): - ''' - A polling packet consists of only a header and checksum (CRC-16): - HEADER CHECKSUM - [ 0x41 | ] - ''' self.ser.flushInput() - message = struct.pack('B', 0x41) - message += self.CRC(message) - - # rospy.loginfo("Writing: %s" % binascii.hexlify(message)) # uncomment for debugging - try: self.ser.write('A') print "Sent request..." @@ -135,56 +122,50 @@ def check_CRC(self, message): else: return False -class EchoLocator(object): +class Multilaterator(object): ''' - Identifies the origin location of a pulse given differential times of arrival to the individual - sensors. c is the wave speed in the medium of operation. - Hydrohone coordinates are expected in millimeters, result will be given in millimeters. + Finds the origin location of a pulse given differential times of + arrival to the individual sensors. c is the wave speed in the medium of operation. + Units: + Hydrohone coordinates are expected in millimeters, pulse location will be given in millimeters. + Timestamps are expected in microseconds. c is expected in millimeters per microsecond + Note: + hydrophone locations should be the dict returned by rospy.get_param('~//hydrophones ''' - # hydrophone locations should be the dict returned by rospy.get_param('~//hydrophones - def __init__(self, hydrophone_locations, c): # speed in m/s - self.c = c + def __init__(self, hydrophone_locations, c, method): # speed in millimeters/microsecond self.hydrophone_locations = [] for key in hydrophone_locations: sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) self.hydrophone_locations += [sensor_location] + self.c = c + self.method = method + print "\x1b[32mSpeed of Sound (c):", self.c, "millimeter/microsecond\x1b[0m" - alarm_broadcaster = AlarmBroadcaster() - self.locate_pulse_error_alarm = alarm_broadcaster.add_alarm( - name='sonar_pulse_locating_error', - action_required=False, - severity=2 - ) - print "\x1b[32mMultilateration algorithm: bancroft" - print "Speed of Sound (c):", self.c, "m/s\x1b[0m" - - def getPulseLocation(self, timestamps): + def getPulseLocation(self, timestamps, method=None): ''' Returns a ros message with the location and time of emission of a pinger pulse. ''' - res = self.estimate_pos(timestamps, self.hydrophone_locations) - # print "bancroft output:", res - if len(res) == 1: - source = res[0] - elif len(res) == 2: - source = [x for x in res if x[2] < 0] # Assume that the source is below us - if not source: - source = res[0] - else: - source = source[0] + if method == None: + method = self.method + # print "\x1b[32mMultilateration algorithm:", method, "\x1b[0m" + assert len(self.hydrophone_locations) == len(timestamps) + source = None + if method == 'bancroft': + source = self.estimate_pos_bancroft(timestamps) + elif method == 'LS': + source = self.estimate_pos_LS(timestamps) else: - source = [0, 0, 0] - + print method, "is not an available multilateration algorithm" + return response = SonarResponse() response.x = source[0] response.y = source[1] response.z = source[2] - print "Reconstructed Pulse:\n\t" + "x: " + str(response.x) + " y: " + str(response.y) + " z: " + str(response.z) + print "Reconstructed Pulse:\n\t" + "x: " + str(response.x) + " y: " + str(response.y) \ + + " z: " + str(response.z) + " (mm)" return response - def estimate_pos(self, reception_times, positions): - assert len(positions) == len(reception_times) - + def estimate_pos_bancroft(self, reception_times): N = len(reception_times) assert N >= 4 @@ -193,7 +174,7 @@ def estimate_pos(self, reception_times, positions): def get_B(delta): B = np.zeros((N, 4)) for i in xrange(N): - B[i] = np.concatenate([positions[i]/(self.c), [-reception_times[i]]]) + delta + B[i] = np.concatenate([self.hydrophone_locations[i]/(self.c), [-reception_times[i]]]) + delta return B delta = min([.1*np.random.randn(4) for i in xrange(10)], key=lambda delta: np.linalg.cond(get_B(delta))) @@ -209,7 +190,8 @@ def get_B(delta): L(Bpe, Bpe), 2*(L(Bpa, Bpe) - 1), L(Bpa, Bpa)) - if not Lambdas: return [] + if not Lambdas: + return [0, 0, 0] res = [] for Lambda in Lambdas: @@ -218,8 +200,62 @@ def get_B(delta): time = u[3] + delta[3] if any(reception_times[i] < time for i in xrange(N)): continue res.append(position*self.c) - - return res + if len(res) == 1: + source = res[0] + elif len(res) == 2: + source = [x for x in res if x[2] < 0] # Assume that the source is below us + if not source: + source = res[0] + else: + source = source[0] + else: + source = [0, 0, 0] + return source + + def estimate_pos_LS(self, timestamps): + ''' + Returns a ros message with the location and time of emission of a pinger pulse. + ''' + self.timestamps = timestamps + init_guess = np.array([1, 1, 1]) + opt = {'disp': 0} + opt_method = 'Powell' + result = optimize.minimize(self.cost_LS, init_guess, method=opt_method, options=opt, tol=1e-15) + if(result.success): + source = [result.x[0], result.x[1], result.x[2]] + else: + source = [0, 0, 0] + return source + + def cost_LS(self, potential_pulse): + ''' + Compares the difference in observed and theoretical difference in time of arrival + between the hydrophones and the reference hydrophone for potential source origins. + + Note: when there are 4 timestamps (not including reference), this cost is convex: + SciPy Optimize will converge on the correct source origin. + With only 3 time stamps, minimization methods will not convrge to the correct + result, however, repeating the process for the source in the same location, + all of the results from minimizing this cost lie on a single 3D line + ''' + cost = 0 + t = self.timestamps + x0 = self.hydrophone_locations[0][0] + y0 = self.hydrophone_locations[0][1] + z0 = self.hydrophone_locations[0][2] + x = potential_pulse[0] + y = potential_pulse[1] + z = potential_pulse[2] + d0 = np.sqrt((x0 - x)**2 + (y0 - x)**2 + (z0 - x)**2) + for i in xrange(1, len(self.hydrophone_locations)): + xi = self.hydrophone_locations[i][0] + yi = self.hydrophone_locations[i][1] + zi = self.hydrophone_locations[i][2] + di = np.sqrt((xi - x)**2 + (yi - x)**2 + (zi - x)**2) + hydro_i_cost = (di - d0 - self.c * t[i])**2 + cost = cost + hydro_i_cost + return cost + def quadratic(a, b, c): discriminant = b*b - 4*a*c @@ -230,13 +266,15 @@ def quadratic(a, b, c): return [] - def delete_last_lines(n=1): + CURSOR_UP_ONE = '\x1b[1A' + ERASE_LINE = '\x1b[2K' for _ in range(n): sys.stdout.write(CURSOR_UP_ONE) sys.stdout.write(ERASE_LINE) + if __name__ == "__main__": - d = Sub8Sonar(1484, rospy.get_param('~/sonar_driver/hydrophones'), + d = Sub8Sonar('bancroft', 1.484, rospy.get_param('~/sonar_driver/hydrophones'), "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", 19200) \ No newline at end of file From f7b4c61b82c67d09f0fb5e4e618a58dec1b46b99 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Mon, 4 Jul 2016 08:44:57 -0400 Subject: [PATCH 075/267] SONAR: write new cost function --- drivers/sub8_sonar/launch/test.launch | 9 +++--- drivers/sub8_sonar/nodes/sonar_test.py | 2 +- drivers/sub8_sonar/sub8_sonar/sonar_driver.py | 30 ++++++++++++++----- 3 files changed, 28 insertions(+), 13 deletions(-) diff --git a/drivers/sub8_sonar/launch/test.launch b/drivers/sub8_sonar/launch/test.launch index 4d4a5432..50da7818 100644 --- a/drivers/sub8_sonar/launch/test.launch +++ b/drivers/sub8_sonar/launch/test.launch @@ -2,19 +2,18 @@ - + - + \ No newline at end of file diff --git a/drivers/sub8_sonar/nodes/sonar_test.py b/drivers/sub8_sonar/nodes/sonar_test.py index 0eed31ef..b0b6e209 100755 --- a/drivers/sub8_sonar/nodes/sonar_test.py +++ b/drivers/sub8_sonar/nodes/sonar_test.py @@ -63,7 +63,7 @@ def delete_last_lines(n=0): hydrophone_locations = rospy.get_param('~/sonar_test/hydrophones') hydrophone_array = ReceiverArraySim(hydrophone_locations, c) - sonar = Multilaterator(hydrophone_locations, c, 'bancroft') + sonar = Multilaterator(hydrophone_locations, c, 'LS') # # Simulate individual pulses (Debugging Jakes Board) # pulse = Pulse(-5251, -7620, 1470, 0) diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py index 84130959..9fe85468 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py @@ -2,6 +2,9 @@ from __future__ import division import math import numpy as np +import numpy.linalg as la +from scipy import optimize +from itertools import combinations import rospy import rosparam @@ -140,6 +143,7 @@ def __init__(self, hydrophone_locations, c, method): # speed in millimeters/mic for key in hydrophone_locations: sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) self.hydrophone_locations += [sensor_location] + self.pairs = list(combinations(range(len(hydrophone_locations)),2)) self.c = c self.method = method print "\x1b[32mSpeed of Sound (c):", self.c, "millimeter/microsecond\x1b[0m" @@ -181,6 +185,7 @@ def get_B(delta): return B delta = min([.1*np.random.randn(4) for i in xrange(10)], key=lambda delta: np.linalg.cond(get_B(delta))) + # delta = np.zeros(4) # gives very good heading for noisy timestamps, although range is completely unreliable B = get_B(delta) a = np.array([0.5 * L(B[i], B[i]) for i in xrange(N)]) @@ -220,7 +225,7 @@ def estimate_pos_LS(self, timestamps): Returns a ros message with the location and time of emission of a pinger pulse. ''' self.timestamps = timestamps - init_guess = np.array([1, 1, 1]) + init_guess = np.random.normal(0,100,3) opt = {'disp': 0} opt_method = 'Powell' result = optimize.minimize(self.cost_LS, init_guess, method=opt_method, options=opt, tol=1e-15) @@ -233,16 +238,27 @@ def estimate_pos_LS(self, timestamps): def cost_LS(self, potential_pulse): ''' Compares the difference in observed and theoretical difference in time of arrival - between the hydrophones and the reference hydrophone for potential source origins. + between tevery unique pair of hydrophones. - Note: when there are 4 timestamps (not including reference), this cost is convex: - SciPy Optimize will converge on the correct source origin. - With only 3 time stamps, minimization methods will not convrge to the correct - result, however, repeating the process for the source in the same location, - all of the results from minimizing this cost lie on a single 3D line + Note: the result will be along the direction of the heading but not at the right distance. ''' cost = 0 t = self.timestamps + c = self.c + x = np.array(potential_pulse) + for pair in self.pairs: + h0 = self.hydrophone_locations[pair[0]] + h1 = self.hydrophone_locations[pair[1]] + residual = la.norm(x-h0) - la.norm(x-h1) - c*(t[pair[0]] - t[pair[1]]) + cost += residual**2 + return cost + + def cost_LS2(self, potential_pulse): + """ + Slightly less accurate than the one above in terms of heading but much faster. + """ + cost = 0 + t = self.timestamps x0 = self.hydrophone_locations[0][0] y0 = self.hydrophone_locations[0][1] z0 = self.hydrophone_locations[0][2] From eb54f05f499ef2e0237c566bfcc675a9b64f1efe Mon Sep 17 00:00:00 2001 From: DSsoto Date: Tue, 5 Jul 2016 00:54:15 -0400 Subject: [PATCH 076/267] SONAR: address new PR167 comments --- drivers/sub8_sonar/sub8_sonar/sonar_driver.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py index 9fe85468..e793cf4c 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py @@ -9,9 +9,6 @@ import rospy import rosparam -import rospy -import rosparam - from sub8_msgs.srv import Sonar, SonarResponse from sub8_ros_tools import thread_lock, make_header from sub8_alarm import AlarmBroadcaster From 638588423a5a0593c734d711f0657e2189f852c9 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Sat, 23 Jul 2016 15:40:33 -0500 Subject: [PATCH 077/267] Marker mission and TF changes (#171) * UTILS: Update __init__. * SIM: Add to services to common Job class, fix logging issues in job_runner. * MISSION: Add visualization for marker mission. Attempt to fix defer errors. Make mission work. * TF: Rotate down camera 180 degrees. * UTILS: Add stamp in make_header. * COMMAND: Add global speed limit for pose editor * MISSION: Switch buoy_2d to boost. Manually threshold follow_orange_pipes. * SIM: Fix job_runner bug. Add rviz sub model to duck.launch. * UTIL: Make image_dumper take single bag or folder of bags. * BUOYS: Get ready for pool day. * MARKER: Fixes for pool day. * MISSONS: Add toggles in missions for thresholding/boosting. Add classifier path. --- utils/sub8_ros_tools/sub8_ros_tools/__init__.py | 2 +- utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py | 13 +++++++------ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index 76906122..8ef8b94a 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -13,7 +13,7 @@ make_header, odom_sub, rosmsg_to_numpy, pose_to_numpy, twist_to_numpy, odometry_to_numpy, posetwist_to_numpy, make_wrench_stamped, make_pose_stamped, - numpy_to_quaternion, numpy_pair_to_pose, numpy_to_point, numpy_quat_pair_to_pose + numpy_to_quaternion, numpy_pair_to_pose, numpy_to_point, numpy_quat_pair_to_pose, numpy_to_twist ) from threading_helpers import thread_lock from geometry_helpers import (make_rotation, normalize, skew_symmetric_cross, deskew, compose_transformation, diff --git a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py index 2897a31a..82637d77 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py @@ -139,14 +139,15 @@ def numpy_quat_pair_to_pose(np_translation, np_quaternion): return geometry_msgs.Pose(position=position, orientation=orientation) -def make_header(frame='/body'): - try: - cur_time = rospy.Time.now() - except rospy.ROSInitException: - cur_time = rospy.Time(0) +def make_header(frame='/body', stamp=None): + if stamp is None: + try: + stamp = rospy.Time.now() + except rospy.ROSInitException: + stamp = rospy.Time(0) header = std_msgs.Header( - stamp=cur_time, + stamp=stamp, frame_id=frame ) return header From 9c546661be4af81e3cfc784c83457a92fed31c0b Mon Sep 17 00:00:00 2001 From: DSsoto Date: Wed, 6 Jul 2016 02:28:12 -0400 Subject: [PATCH 078/267] SONAR: create multilateration module and expose it to outside world --- .../sub8_sonar/multilateration/__init__.py | 1 + .../multilateration/multilateration.py | 206 +++++++++++++++++ drivers/sub8_sonar/nodes/plotter.py | 7 +- drivers/sub8_sonar/nodes/sonar_test.py | 37 +-- drivers/sub8_sonar/setup.py | 2 +- drivers/sub8_sonar/sub8_sonar/__init__.py | 1 - drivers/sub8_sonar/sub8_sonar/sonar_driver.py | 215 ++++-------------- 7 files changed, 262 insertions(+), 207 deletions(-) create mode 100644 drivers/sub8_sonar/multilateration/__init__.py create mode 100644 drivers/sub8_sonar/multilateration/multilateration.py diff --git a/drivers/sub8_sonar/multilateration/__init__.py b/drivers/sub8_sonar/multilateration/__init__.py new file mode 100644 index 00000000..87a0c3ea --- /dev/null +++ b/drivers/sub8_sonar/multilateration/__init__.py @@ -0,0 +1 @@ +from multilateration import Multilaterator, ReceiverArraySim, Pulse diff --git a/drivers/sub8_sonar/multilateration/multilateration.py b/drivers/sub8_sonar/multilateration/multilateration.py new file mode 100644 index 00000000..f88452a0 --- /dev/null +++ b/drivers/sub8_sonar/multilateration/multilateration.py @@ -0,0 +1,206 @@ +#!/usr/bin/python +from __future__ import division +import numpy as np +import numpy.linalg as la +from scipy import optimize +from itertools import combinations + +from sub8_msgs.srv import Sonar, SonarResponse + + +class Multilaterator(object): + ''' + Finds the origin location of a pulse given differential times of + arrival to the individual sensors. c is the wave speed in the medium of operation. + Units: + Hydrohone coordinates are expected in millimeters, pulse location will be given in millimeters. + Timestamps are expected in microseconds. c is expected in millimeters per microsecond + Note: + hydrophone locations should be the dict returned by rospy.get_param('~//hydrophones + ''' + def __init__(self, hydrophone_locations, c, method): # speed in millimeters/microsecond + self.hydrophone_locations = [] + for key in hydrophone_locations: + sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) + self.hydrophone_locations += [sensor_location] + self.pairs = list(combinations(range(len(hydrophone_locations)),2)) + self.c = c + self.method = method + print "\x1b[32mSpeed of Sound (c):", self.c, "millimeter/microsecond\x1b[0m" + + def getPulseLocation(self, timestamps, method=None): + ''' + Returns a ros message with the location and time of emission of a pinger pulse. + ''' + if method == None: + method = self.method + # print "\x1b[32mMultilateration algorithm:", method, "\x1b[0m" + assert len(self.hydrophone_locations) == len(timestamps) + source = None + if method == 'bancroft': + source = self.estimate_pos_bancroft(timestamps) + elif method == 'LS': + source = self.estimate_pos_LS(timestamps) + else: + print method, "is not an available multilateration algorithm" + return + response = SonarResponse() + response.x = source[0] + response.y = source[1] + response.z = source[2] + print "Reconstructed Pulse:\n\t" + "x: " + str(response.x) + " y: " + str(response.y) \ + + " z: " + str(response.z) + " (mm)" + return response + + def estimate_pos_bancroft(self, reception_times): + N = len(reception_times) + assert N >= 4 + + L = lambda a, b: a[0]*b[0] + a[1]*b[1] + a[2]*b[2] - a[3]*b[3] + + def get_B(delta): + B = np.zeros((N, 4)) + for i in xrange(N): + B[i] = np.concatenate([self.hydrophone_locations[i]/(self.c), [-reception_times[i]]]) + delta + return B + + delta = min([.1*np.random.randn(4) for i in xrange(10)], key=lambda delta: np.linalg.cond(get_B(delta))) + # delta = np.zeros(4) # gives very good heading for noisy timestamps, although range is completely unreliable + + B = get_B(delta) + a = np.array([0.5 * L(B[i], B[i]) for i in xrange(N)]) + e = np.ones(N) + + Bpe = np.linalg.lstsq(B, e)[0] + Bpa = np.linalg.lstsq(B, a)[0] + + Lambdas = quadratic( + L(Bpe, Bpe), + 2*(L(Bpa, Bpe) - 1), + L(Bpa, Bpa)) + if not Lambdas: + return [0, 0, 0] + + res = [] + for Lambda in Lambdas: + u = Bpa + Lambda * Bpe + position = u[:3] - delta[:3] + time = u[3] + delta[3] + if any(reception_times[i] < time for i in xrange(N)): continue + res.append(position*self.c) + if len(res) == 1: + source = res[0] + elif len(res) == 2: + source = [x for x in res if x[2] < 0] # Assume that the source is below us + if not source: + source = res[0] + else: + source = source[0] + else: + source = [0, 0, 0] + return source + + def estimate_pos_LS(self, timestamps): + ''' + Returns a ros message with the location and time of emission of a pinger pulse. + ''' + self.timestamps = timestamps + init_guess = np.random.normal(0,100,3) + opt = {'disp': 0} + opt_method = 'Powell' + result = optimize.minimize(self.cost_LS, init_guess, method=opt_method, options=opt, tol=1e-15) + if(result.success): + source = [result.x[0], result.x[1], result.x[2]] + else: + source = [0, 0, 0] + return source + + def cost_LS(self, potential_pulse): + ''' + Compares the difference in observed and theoretical difference in time of arrival + between tevery unique pair of hydrophones. + + Note: the result will be along the direction of the heading but not at the right distance. + ''' + cost = 0 + t = self.timestamps + c = self.c + x = np.array(potential_pulse) + for pair in self.pairs: + h0 = self.hydrophone_locations[pair[0]] + h1 = self.hydrophone_locations[pair[1]] + residual = la.norm(x-h0) - la.norm(x-h1) - c*(t[pair[0]] - t[pair[1]]) + cost += residual**2 + return cost + + def cost_LS2(self, potential_pulse): + """ + Slightly less accurate than the one above in terms of heading but much faster. + """ + cost = 0 + t = self.timestamps + x0 = self.hydrophone_locations[0][0] + y0 = self.hydrophone_locations[0][1] + z0 = self.hydrophone_locations[0][2] + x = potential_pulse[0] + y = potential_pulse[1] + z = potential_pulse[2] + d0 = np.sqrt((x0 - x)**2 + (y0 - x)**2 + (z0 - x)**2) + for i in xrange(1, len(self.hydrophone_locations)): + xi = self.hydrophone_locations[i][0] + yi = self.hydrophone_locations[i][1] + zi = self.hydrophone_locations[i][2] + di = np.sqrt((xi - x)**2 + (yi - x)**2 + (zi - x)**2) + hydro_i_cost = (di - d0 - self.c * t[i])**2 + cost = cost + hydro_i_cost + return cost + + +class ReceiverArraySim(object): + """ + Simulates an array of receivers that listens to point sources and returns the DTOA. + (difference in time of arrival) + Base Units: + time - microseconds + length - millimeters + """ + def __init__(self, hydrophone_locations, wave_propagation_speed_mps): + self.c = wave_propagation_speed_mps + self.hydrophone_locations = np.array([0, 0, 0]) + for key in hydrophone_locations: + sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) + self.hydrophone_locations = np.vstack((self.hydrophone_locations, sensor_location)) + self.hydrophone_locations = self.hydrophone_locations[1:] + + def listen(self, pulse): + timestamps = [] + for idx in range(4): + src_range = np.sqrt(sum(np.square(pulse.position() - self.hydrophone_locations[idx]))) + timestamps += [pulse.t + src_range / self.c] + return np.array(timestamps) + +class Pulse(object): + """ + Represents an omnidirectional wave or impulse emmited from a point source + """ + def __init__(self, x, y, z, t): + self.x = x + self.y = y + self.z = z + self.t = t + + def position(self): + return np.array([self.x, self.y, self.z]) + + def __repr__(self): + return "Pulse:\t" + "x: " + str(self.x) + " y: " + str(self.y) + " z: " \ + + str(self.z) + " (mm)" + + +def quadratic(a, b, c): + discriminant = b*b - 4*a*c + if discriminant >= 0: + first_times_a = (-b+math.copysign(math.sqrt(discriminant), -b))/2 + return [first_times_a/a, c/first_times_a] + else: + return [] \ No newline at end of file diff --git a/drivers/sub8_sonar/nodes/plotter.py b/drivers/sub8_sonar/nodes/plotter.py index f81499d8..93d8f7ad 100755 --- a/drivers/sub8_sonar/nodes/plotter.py +++ b/drivers/sub8_sonar/nodes/plotter.py @@ -19,7 +19,12 @@ try: while(True): - pinger_pose = sonar() + try: + pinger_pose = sonar() + except Exception: + plt.close('all') + print "\nSonar driver is down, shutting down plotter" + break print "x:", str(pinger_pose.x).rjust(15), "y:", str(pinger_pose.y).rjust(15), "z:", str(pinger_pose.z).rjust(15) ax.scatter(pinger_pose.x, pinger_pose.y, pinger_pose.z) plt.draw() diff --git a/drivers/sub8_sonar/nodes/sonar_test.py b/drivers/sub8_sonar/nodes/sonar_test.py index b0b6e209..59fed8f8 100755 --- a/drivers/sub8_sonar/nodes/sonar_test.py +++ b/drivers/sub8_sonar/nodes/sonar_test.py @@ -6,41 +6,9 @@ import rospy import rosparam import random -from sub8_sonar import Multilaterator +from multilateration import Multilaterator, ReceiverArraySim, Pulse import sys -c = 1.484 # millimeters/microsecond - -class ReceiverArraySim(object): - def __init__(self, hydrophone_locations, wave_propagation_speed_mps): - self.c = wave_propagation_speed_mps - self.hydrophone_locations = np.array([0, 0, 0]) - for key in hydrophone_locations: - sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) - self.hydrophone_locations = np.vstack((self.hydrophone_locations, sensor_location)) - self.hydrophone_locations = self.hydrophone_locations[1:] - - def listen(self, pulse): - timestamps = [] - for idx in range(4): - src_range = np.sqrt(sum(np.square(pulse.position() - self.hydrophone_locations[idx]))) - timestamps += [pulse.t + src_range / self.c] - return np.array(timestamps) - -class Pulse(object): - def __init__(self, x, y, z, t): - self.x = x - self.y = y - self.z = z - self.t = t - - def position(self): - return np.array([self.x, self.y, self.z]) - - def __repr__(self): - return "Pulse:\t" + "x: " + str(self.x) + " y: " + str(self.y) + " z: " \ - + str(self.z) + " (mm)" - if __name__ == '__main__': def print_green(str): @@ -61,6 +29,7 @@ def delete_last_lines(n=0): sys.stdout.write(CURSOR_UP_ONE) sys.stdout.write(ERASE_LINE) + c = 1.484 # millimeters/microsecond hydrophone_locations = rospy.get_param('~/sonar_test/hydrophones') hydrophone_array = ReceiverArraySim(hydrophone_locations, c) sonar = Multilaterator(hydrophone_locations, c, 'LS') @@ -107,4 +76,4 @@ def delete_last_lines(n=0): res = np.array([res_msg.x, res_msg.y, res_msg.z]) delete_last_lines(4) # more concise output print "\t\x1b[33m" + ("sigma: " + str(sigma)).ljust(16) \ - + error(res, pulse.position()) + "\x1b[0m" \ No newline at end of file + + error(res, pulse.position()) + "\x1b[0m" diff --git a/drivers/sub8_sonar/setup.py b/drivers/sub8_sonar/setup.py index 997ca783..2486c904 100644 --- a/drivers/sub8_sonar/setup.py +++ b/drivers/sub8_sonar/setup.py @@ -5,7 +5,7 @@ # fetch values from package.xml setup_args = generate_distutils_setup( - packages=['sub8_sonar'], + packages=['sub8_sonar', 'multilateration'], ) setup(**setup_args) diff --git a/drivers/sub8_sonar/sub8_sonar/__init__.py b/drivers/sub8_sonar/sub8_sonar/__init__.py index 97e2ae96..e69de29b 100644 --- a/drivers/sub8_sonar/sub8_sonar/__init__.py +++ b/drivers/sub8_sonar/sub8_sonar/__init__.py @@ -1 +0,0 @@ -from sonar_driver import Multilaterator diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py index e793cf4c..29a5e1e9 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py @@ -1,17 +1,11 @@ #!/usr/bin/python -from __future__ import division -import math -import numpy as np -import numpy.linalg as la -from scipy import optimize -from itertools import combinations - import rospy import rosparam from sub8_msgs.srv import Sonar, SonarResponse from sub8_ros_tools import thread_lock, make_header from sub8_alarm import AlarmBroadcaster +from multilateration import Multilaterator import threading import serial @@ -19,7 +13,6 @@ import struct import time import crc16 - import sys lock = threading.Lock() @@ -29,7 +22,6 @@ class Sub8Sonar(): ''' Smart sensor that provides high level ROS code with the location of pinger pulses detected with Jake Easterling's Hydrophone board. - * Adapted from the actuator driver. TODO: Add a function to try and reconnect to the serial port if we lose connection. TODO: Express pulse location in map frame @@ -60,11 +52,25 @@ def __init__(self, method, c, hydrophone_locations, port, baud=19200): self.hydrophone_locations = hydrophone_locations self.sonar_sensor = Multilaterator(hydrophone_locations, self.c, method) # speed of sound in m/s - rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.request_data) + rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.get_pulse_from_timestamps) print "\x1b[32mSub8 sonar driver initialized\x1b[0m" rospy.spin() - def listener(self): + @thread_lock(lock) + def get_pulse_from_timestamps(self, srv): + self.ser.flushInput() + + try: + self.ser.write('A') + print "Sent request..." + except: # Except only serial errors in the future. + self.disconnection_alarm.raise_alarm( + problem_description="Sonar board serial connection has been terminated." + ) + return False + return self.sonar_sensor.getPulseLocation(self.timestamp_listener()) + + def timestamp_listener(self): ''' Parse the response of the board. ''' @@ -91,7 +97,7 @@ def listener(self): return None @thread_lock(lock) - def request_data(self, srv): + def get_pulse_from_raw_data(self, srv): self.ser.flushInput() try: @@ -104,6 +110,32 @@ def request_data(self, srv): return False return self.sonar_sensor.getPulseLocation(self.listener()) + def raw_data_listener(self): + ''' + Parse the response of the board. + ''' + print "Listening..." + + # We've found a packet now disect it. + response = self.ser.read(19) + # rospy.loginfo("Received: %s" % response) #uncomment for debugging + if self.check_CRC(response): + delete_last_lines(2) # Heard response! + # The checksum matches the data so split the response into each piece of data. + # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters + data = struct.unpack('>BffffH', response) + timestamps = [data[4], data[1], data[2], data[3] ] + print "timestamps:", timestamps + return timestamps + else: + self.packet_error_alarm.raise_alarm( + problem_description="Sonar board checksum error.", + parameters={ + 'fault_info': {'data': response} + } + ) + return None + def CRC(self, message): # You may have to change the checksum type. # Check the crc16 module online to see how to do that. @@ -125,163 +157,6 @@ def check_CRC(self, message): else: return False -class Multilaterator(object): - ''' - Finds the origin location of a pulse given differential times of - arrival to the individual sensors. c is the wave speed in the medium of operation. - Units: - Hydrohone coordinates are expected in millimeters, pulse location will be given in millimeters. - Timestamps are expected in microseconds. c is expected in millimeters per microsecond - Note: - hydrophone locations should be the dict returned by rospy.get_param('~//hydrophones - ''' - def __init__(self, hydrophone_locations, c, method): # speed in millimeters/microsecond - self.hydrophone_locations = [] - for key in hydrophone_locations: - sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) - self.hydrophone_locations += [sensor_location] - self.pairs = list(combinations(range(len(hydrophone_locations)),2)) - self.c = c - self.method = method - print "\x1b[32mSpeed of Sound (c):", self.c, "millimeter/microsecond\x1b[0m" - - def getPulseLocation(self, timestamps, method=None): - ''' - Returns a ros message with the location and time of emission of a pinger pulse. - ''' - if method == None: - method = self.method - # print "\x1b[32mMultilateration algorithm:", method, "\x1b[0m" - assert len(self.hydrophone_locations) == len(timestamps) - source = None - if method == 'bancroft': - source = self.estimate_pos_bancroft(timestamps) - elif method == 'LS': - source = self.estimate_pos_LS(timestamps) - else: - print method, "is not an available multilateration algorithm" - return - response = SonarResponse() - response.x = source[0] - response.y = source[1] - response.z = source[2] - print "Reconstructed Pulse:\n\t" + "x: " + str(response.x) + " y: " + str(response.y) \ - + " z: " + str(response.z) + " (mm)" - return response - - def estimate_pos_bancroft(self, reception_times): - N = len(reception_times) - assert N >= 4 - - L = lambda a, b: a[0]*b[0] + a[1]*b[1] + a[2]*b[2] - a[3]*b[3] - - def get_B(delta): - B = np.zeros((N, 4)) - for i in xrange(N): - B[i] = np.concatenate([self.hydrophone_locations[i]/(self.c), [-reception_times[i]]]) + delta - return B - - delta = min([.1*np.random.randn(4) for i in xrange(10)], key=lambda delta: np.linalg.cond(get_B(delta))) - # delta = np.zeros(4) # gives very good heading for noisy timestamps, although range is completely unreliable - - B = get_B(delta) - a = np.array([0.5 * L(B[i], B[i]) for i in xrange(N)]) - e = np.ones(N) - - Bpe = np.linalg.lstsq(B, e)[0] - Bpa = np.linalg.lstsq(B, a)[0] - - Lambdas = quadratic( - L(Bpe, Bpe), - 2*(L(Bpa, Bpe) - 1), - L(Bpa, Bpa)) - if not Lambdas: - return [0, 0, 0] - - res = [] - for Lambda in Lambdas: - u = Bpa + Lambda * Bpe - position = u[:3] - delta[:3] - time = u[3] + delta[3] - if any(reception_times[i] < time for i in xrange(N)): continue - res.append(position*self.c) - if len(res) == 1: - source = res[0] - elif len(res) == 2: - source = [x for x in res if x[2] < 0] # Assume that the source is below us - if not source: - source = res[0] - else: - source = source[0] - else: - source = [0, 0, 0] - return source - - def estimate_pos_LS(self, timestamps): - ''' - Returns a ros message with the location and time of emission of a pinger pulse. - ''' - self.timestamps = timestamps - init_guess = np.random.normal(0,100,3) - opt = {'disp': 0} - opt_method = 'Powell' - result = optimize.minimize(self.cost_LS, init_guess, method=opt_method, options=opt, tol=1e-15) - if(result.success): - source = [result.x[0], result.x[1], result.x[2]] - else: - source = [0, 0, 0] - return source - - def cost_LS(self, potential_pulse): - ''' - Compares the difference in observed and theoretical difference in time of arrival - between tevery unique pair of hydrophones. - - Note: the result will be along the direction of the heading but not at the right distance. - ''' - cost = 0 - t = self.timestamps - c = self.c - x = np.array(potential_pulse) - for pair in self.pairs: - h0 = self.hydrophone_locations[pair[0]] - h1 = self.hydrophone_locations[pair[1]] - residual = la.norm(x-h0) - la.norm(x-h1) - c*(t[pair[0]] - t[pair[1]]) - cost += residual**2 - return cost - - def cost_LS2(self, potential_pulse): - """ - Slightly less accurate than the one above in terms of heading but much faster. - """ - cost = 0 - t = self.timestamps - x0 = self.hydrophone_locations[0][0] - y0 = self.hydrophone_locations[0][1] - z0 = self.hydrophone_locations[0][2] - x = potential_pulse[0] - y = potential_pulse[1] - z = potential_pulse[2] - d0 = np.sqrt((x0 - x)**2 + (y0 - x)**2 + (z0 - x)**2) - for i in xrange(1, len(self.hydrophone_locations)): - xi = self.hydrophone_locations[i][0] - yi = self.hydrophone_locations[i][1] - zi = self.hydrophone_locations[i][2] - di = np.sqrt((xi - x)**2 + (yi - x)**2 + (zi - x)**2) - hydro_i_cost = (di - d0 - self.c * t[i])**2 - cost = cost + hydro_i_cost - return cost - - -def quadratic(a, b, c): - discriminant = b*b - 4*a*c - if discriminant >= 0: - first_times_a = (-b+math.copysign(math.sqrt(discriminant), -b))/2 - return [first_times_a/a, c/first_times_a] - else: - return [] - - def delete_last_lines(n=1): CURSOR_UP_ONE = '\x1b[1A' ERASE_LINE = '\x1b[2K' @@ -290,6 +165,6 @@ def delete_last_lines(n=1): sys.stdout.write(ERASE_LINE) if __name__ == "__main__": - d = Sub8Sonar('bancroft', 1.484, rospy.get_param('~/sonar_driver/hydrophones'), + d = Sub8Sonar('LS', 1.484, rospy.get_param('~/sonar_driver/hydrophones'), "/dev/serial/by-id/usb-FTDI_FT232R_USB_UART_AH02X4IE-if00-port0", 19200) \ No newline at end of file From 0d91619026eb62f6e890537dfe681c1e76111f20 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Thu, 7 Jul 2016 09:06:02 -0400 Subject: [PATCH 079/267] SONAR: work on processing signals from Jake's board --- drivers/sub8_sonar/launch/test.launch | 8 +- drivers/sub8_sonar/nodes/sonar_test.py | 63 ++++++------ drivers/sub8_sonar/sub8_sonar/sonar_driver.py | 95 +++++++++++++------ 3 files changed, 105 insertions(+), 61 deletions(-) diff --git a/drivers/sub8_sonar/launch/test.launch b/drivers/sub8_sonar/launch/test.launch index 50da7818..7eff7b7f 100644 --- a/drivers/sub8_sonar/launch/test.launch +++ b/drivers/sub8_sonar/launch/test.launch @@ -2,18 +2,18 @@ - + - + \ No newline at end of file diff --git a/drivers/sub8_sonar/nodes/sonar_test.py b/drivers/sub8_sonar/nodes/sonar_test.py index 59fed8f8..d4aa9108 100755 --- a/drivers/sub8_sonar/nodes/sonar_test.py +++ b/drivers/sub8_sonar/nodes/sonar_test.py @@ -45,35 +45,38 @@ def delete_last_lines(n=0): # print "\t\x1b[33m".ljust(22) + error(res, pulse.position()) + "\x1b[0m" # pulses will be generated with inside a cube with side-length $(pulse_range) (mm) - for h in range(3,8): - # smallest cube will be a meter wide, largest will be 10 km wide - pulse_range = 10**h # in mm - rand_args = [-pulse_range, pulse_range + 1] - num_pulses = 10 - print "\n\x1b[1mGenerating " + str(num_pulses) + " pulses within a " \ - + str(2*pulse_range/1000) + " meters wide cube\x1b[0m\n" + try: + for h in range(3,8): + # smallest cube will be a meter wide, largest will be 10 km wide + pulse_range = 10**h # in mm + rand_args = [-pulse_range, pulse_range + 1] + num_pulses = 10 + print "\n\x1b[1mGenerating " + str(num_pulses) + " pulses within a " \ + + str(2*pulse_range/1000) + " meters wide cube\x1b[0m\n" - for i in range(num_pulses): - pulse = Pulse(random.randrange(*rand_args), - random.randrange(*rand_args), - random.randrange(*rand_args), 0) - tstamps = hydrophone_array.listen(pulse) - tstamps = tstamps - tstamps[0] - print_green(str(i).ljust(2) + str(pulse)) - print "Perfect timestamps: (microseconds)\n\t", tstamps - res_msg = sonar.getPulseLocation(np.array(tstamps)) - delete_last_lines(4) # more concise output - res = np.array([res_msg.x, res_msg.y, res_msg.z]) - print "\t\x1b[33m".ljust(22) + error(res, pulse.position()) + "\x1b[0m" - print "Progressively adding noise to timestamps..." - - for j in range(-5, 2): - sigma = 10**j - noisy_tstamps = [x + np.random.normal(0, sigma) for x in tstamps] - noisy_tstamps[0] = 0 - print "Noisy timestamps:\n\t", noisy_tstamps - res_msg = sonar.getPulseLocation(np.array(noisy_tstamps)) - res = np.array([res_msg.x, res_msg.y, res_msg.z]) + for i in range(num_pulses): + pulse = Pulse(random.randrange(*rand_args), + random.randrange(*rand_args), + random.randrange(*rand_args), 0) + tstamps = hydrophone_array.listen(pulse) + tstamps = tstamps - tstamps[0] + print_green(str(i).ljust(2) + str(pulse)) + print "Perfect timestamps: (microseconds)\n\t", tstamps + res_msg = sonar.getPulseLocation(np.array(tstamps)) delete_last_lines(4) # more concise output - print "\t\x1b[33m" + ("sigma: " + str(sigma)).ljust(16) \ - + error(res, pulse.position()) + "\x1b[0m" + res = np.array([res_msg.x, res_msg.y, res_msg.z]) + print "\t\x1b[33m".ljust(22) + error(res, pulse.position()) + "\x1b[0m" + print "Progressively adding noise to timestamps..." + + for j in range(-5, 2): + sigma = 10**j + noisy_tstamps = [x + np.random.normal(0, sigma) for x in tstamps] + noisy_tstamps[0] = 0 + print "Noisy timestamps:\n\t", noisy_tstamps + res_msg = sonar.getPulseLocation(np.array(noisy_tstamps)) + res = np.array([res_msg.x, res_msg.y, res_msg.z]) + delete_last_lines(4) # more concise output + print "\t\x1b[33m" + ("sigma: " + str(sigma)).ljust(16) \ + + error(res, pulse.position()) + "\x1b[0m" + except KeyboardInterrupt: + print "\nAborting mutilateration tests prematurely" diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py index 29a5e1e9..7df39e0f 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py @@ -2,6 +2,9 @@ import rospy import rosparam +import numpy as np +from scipy.signal import resample + from sub8_msgs.srv import Sonar, SonarResponse from sub8_ros_tools import thread_lock, make_header from sub8_alarm import AlarmBroadcaster @@ -15,6 +18,10 @@ import crc16 import sys +# temp +import matplotlib.patches as mpatches +import matplotlib.pyplot as plt + lock = threading.Lock() @@ -50,9 +57,9 @@ def __init__(self, method, c, hydrophone_locations, port, baud=19200): self.c = c self.hydrophone_locations = hydrophone_locations - self.sonar_sensor = Multilaterator(hydrophone_locations, self.c, method) # speed of sound in m/s + self.multilaterator = Multilaterator(hydrophone_locations, self.c, method) # speed of sound in m/s - rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.get_pulse_from_timestamps) + rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.get_pulse_from_raw_data) print "\x1b[32mSub8 sonar driver initialized\x1b[0m" rospy.spin() @@ -62,13 +69,13 @@ def get_pulse_from_timestamps(self, srv): try: self.ser.write('A') - print "Sent request..." + print "Sent timestamp request..." except: # Except only serial errors in the future. self.disconnection_alarm.raise_alarm( problem_description="Sonar board serial connection has been terminated." ) return False - return self.sonar_sensor.getPulseLocation(self.timestamp_listener()) + return self.multilaterator.getPulseLocation(self.timestamp_listener()) def timestamp_listener(self): ''' @@ -98,17 +105,17 @@ def timestamp_listener(self): @thread_lock(lock) def get_pulse_from_raw_data(self, srv): + # request signals from hydrophone board self.ser.flushInput() - try: - self.ser.write('A') - print "Sent request..." + self.ser.write('B') + print "Sent raw data request..." except: # Except only serial errors in the future. self.disconnection_alarm.raise_alarm( problem_description="Sonar board serial connection has been terminated." ) return False - return self.sonar_sensor.getPulseLocation(self.listener()) + return self.multilaterator.getPulseLocation(self.raw_data_listener()) def raw_data_listener(self): ''' @@ -116,25 +123,59 @@ def raw_data_listener(self): ''' print "Listening..." - # We've found a packet now disect it. - response = self.ser.read(19) - # rospy.loginfo("Received: %s" % response) #uncomment for debugging - if self.check_CRC(response): - delete_last_lines(2) # Heard response! - # The checksum matches the data so split the response into each piece of data. - # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters - data = struct.unpack('>BffffH', response) - timestamps = [data[4], data[1], data[2], data[3] ] - print "timestamps:", timestamps - return timestamps - else: - self.packet_error_alarm.raise_alarm( - problem_description="Sonar board checksum error.", - parameters={ - 'fault_info': {'data': response} - } - ) - return None + # prepare arrays for storing signals + signal_bias = 32767 + waves_recorded = 3 + samples_per_wave = 17.24 + upsample_scale = 3 + exp_recording_size = np.floor(samples_per_wave) * waves_recorded * upsample_scale - 1 + raw_signals = np.zeros([4, exp_recording_size], dtype=float) + + try: + # read in start bit of packet + timeout = 0 + start_bit = self.ser.read(1) + # If start bit not read, keep trying + while start_bit != '\xBB': + start_bit = self.ser.read(1) + timeout += 1 + if timeout > 600: + raise BufferError("Timeout: failed to read a start bit from sonar board") + for elem in np.nditer(raw_signals, op_flags=['readwrite']): + elem[...] = float(self.ser.read(5)) - signal_bias + except BufferError as e: + print e + + # Set how much of each signal to actually use for processing + non_zero_end_idx = 57 + raw_signals = raw_signals[:, 0:non_zero_end_idx] + + # upsample the signals for successful cross-correlation + upsampled_signals = [resample(x, 8*len(x)) for x in raw_signals] + + # scale waves so they all have the same variance + equalized_signals = [x / np.std(x) for x in upsampled_signals] + raw_signals = [x / np.std(x) for x in raw_signals] + wave0_upsampled, wave1_upsampled, wave2_upsampled, wave3_upsampled = equalized_signals + wave0, wave1, wave2, wave3 = raw_signals + + tup = np.arange(0, non_zero_end_idx, 0.125) + tref = np.arange(0, non_zero_end_idx, 1) + # t = np.arange(0, non_zero_end_idx, 1) + # H1 = mpatches.Patch(color='blue', label='H1') + # H2 = mpatches.Patch(color='green', label='H2') + # H3 = mpatches.Patch(color='red', label='H3') + # H0 = mpatches.Patch(color='cyan', label='H0') + # plt.legend(handles=[H0,H1,H2,H3]) + plt.plot(tref,wave1,tref,wave2,tref,wave3,tref,wave0) + plt.plot(tup,wave1_upsampled,tup,wave2_upsampled,tup,wave3_upsampled,tup,wave0_upsampled) + plt.show() + + print "done!" + return [0,0,0,0] + + + def CRC(self, message): # You may have to change the checksum type. From 5dfcbc36b106566399710dc5374f69f017bd6956 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Sat, 9 Jul 2016 03:55:42 -0400 Subject: [PATCH 080/267] SONAR: add signal frequency estimation --- .../multilateration/multilateration.py | 44 ++--- drivers/sub8_sonar/sub8_sonar/sonar_driver.py | 150 ++++++++++++++---- 2 files changed, 147 insertions(+), 47 deletions(-) diff --git a/drivers/sub8_sonar/multilateration/multilateration.py b/drivers/sub8_sonar/multilateration/multilateration.py index f88452a0..1ef865c4 100644 --- a/drivers/sub8_sonar/multilateration/multilateration.py +++ b/drivers/sub8_sonar/multilateration/multilateration.py @@ -32,25 +32,31 @@ def getPulseLocation(self, timestamps, method=None): ''' Returns a ros message with the location and time of emission of a pinger pulse. ''' - if method == None: - method = self.method - # print "\x1b[32mMultilateration algorithm:", method, "\x1b[0m" - assert len(self.hydrophone_locations) == len(timestamps) - source = None - if method == 'bancroft': - source = self.estimate_pos_bancroft(timestamps) - elif method == 'LS': - source = self.estimate_pos_LS(timestamps) - else: - print method, "is not an available multilateration algorithm" - return - response = SonarResponse() - response.x = source[0] - response.y = source[1] - response.z = source[2] - print "Reconstructed Pulse:\n\t" + "x: " + str(response.x) + " y: " + str(response.y) \ - + " z: " + str(response.z) + " (mm)" - return response + try: + if method == None: + method = self.method + # print "\x1b[32mMultilateration algorithm:", method, "\x1b[0m" + assert len(self.hydrophone_locations) == len(timestamps) + source = None + if method == 'bancroft': + source = self.estimate_pos_bancroft(timestamps) + elif method == 'LS': + source = self.estimate_pos_LS(timestamps) + else: + print method, "is not an available multilateration algorithm" + return + response = SonarResponse() + response.x = source[0] + response.y = source[1] + response.z = source[2] + print "Reconstructed Pulse:\n\t" + "x: " + str(response.x) + " y: " + str(response.y) \ + + " z: " + str(response.z) + " (mm)" + return response + except KeyboardInterrupt: + print "Source localization interupted, returning all zeroes." + response = SonarResponse() + response.x, response.y, response.z = (0, 0, 0) + def estimate_pos_bancroft(self, reception_times): N = len(reception_times) diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py index 7df39e0f..ddfb3f6a 100755 --- a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py +++ b/drivers/sub8_sonar/sub8_sonar/sonar_driver.py @@ -1,10 +1,14 @@ #!/usr/bin/python +from __future__ import division +import numpy as np +import numpy.linalg as la +from scipy.signal import resample, periodogram + +from itertools import product + import rospy import rosparam -import numpy as np -from scipy.signal import resample - from sub8_msgs.srv import Sonar, SonarResponse from sub8_ros_tools import thread_lock, make_header from sub8_alarm import AlarmBroadcaster @@ -19,8 +23,14 @@ import sys # temp +import matplotlib +matplotlib.use("WX") import matplotlib.patches as mpatches import matplotlib.pyplot as plt +fig = plt.figure(0) +fig2 =plt.figure(1) +plt.ion() +plt.show() lock = threading.Lock() @@ -56,8 +66,14 @@ def __init__(self, method, c, hydrophone_locations, port, baud=19200): return None self.c = c - self.hydrophone_locations = hydrophone_locations + self.hydrophone_locations = [] + for key in hydrophone_locations: + sensor_location = np.array([hydrophone_locations[key]['x'], hydrophone_locations[key]['y'], hydrophone_locations[key]['z']]) + self.hydrophone_locations += [sensor_location] self.multilaterator = Multilaterator(hydrophone_locations, self.c, method) # speed of sound in m/s + self.est_signal_freq_kHz = 0 + self._freq_sum = 0 + self._freq_samples = 0 rospy.Service('~/sonar/get_pinger_pulse', Sonar, self.get_pulse_from_raw_data) print "\x1b[32mSub8 sonar driver initialized\x1b[0m" @@ -87,7 +103,7 @@ def timestamp_listener(self): response = self.ser.read(19) # rospy.loginfo("Received: %s" % response) #uncomment for debugging if self.check_CRC(response): - delete_last_lines(2) # Heard response! + delete_last_lines(0) # Heard response! # The checksum matches the data so split the response into each piece of data. # For info on what these letters mean: https://docs.python.org/2/library/struct.html#format-characters data = struct.unpack('>BffffH', response) @@ -142,40 +158,118 @@ def raw_data_listener(self): if timeout > 600: raise BufferError("Timeout: failed to read a start bit from sonar board") for elem in np.nditer(raw_signals, op_flags=['readwrite']): - elem[...] = float(self.ser.read(5)) - signal_bias + elem[...] = float(self.ser.read(5)) - signal_bias # ... is idx to current elem except BufferError as e: print e - # Set how much of each signal to actually use for processing non_zero_end_idx = 57 + up_factor = 8 + sampling_freq = 430E3 * 0.8 # Hz + samp_period = 1E6 / sampling_freq # microseconds + upsamp_step = samp_period / up_factor + + # Set how much of each signal to actually use for processing raw_signals = raw_signals[:, 0:non_zero_end_idx] # upsample the signals for successful cross-correlation - upsampled_signals = [resample(x, 8*len(x)) for x in raw_signals] + upsampled_signals = [resample(x, up_factor*len(x)) for x in raw_signals] # scale waves so they all have the same variance equalized_signals = [x / np.std(x) for x in upsampled_signals] raw_signals = [x / np.std(x) for x in raw_signals] - wave0_upsampled, wave1_upsampled, wave2_upsampled, wave3_upsampled = equalized_signals - wave0, wave1, wave2, wave3 = raw_signals - - tup = np.arange(0, non_zero_end_idx, 0.125) - tref = np.arange(0, non_zero_end_idx, 1) - # t = np.arange(0, non_zero_end_idx, 1) - # H1 = mpatches.Patch(color='blue', label='H1') - # H2 = mpatches.Patch(color='green', label='H2') - # H3 = mpatches.Patch(color='red', label='H3') - # H0 = mpatches.Patch(color='cyan', label='H0') - # plt.legend(handles=[H0,H1,H2,H3]) - plt.plot(tref,wave1,tref,wave2,tref,wave3,tref,wave0) - plt.plot(tup,wave1_upsampled,tup,wave2_upsampled,tup,wave3_upsampled,tup,wave0_upsampled) - plt.show() - - print "done!" - return [0,0,0,0] - - - + w0_upsamp, w1_upsamp, w2_upsamp, w3_upsamp = equalized_signals + t_up = np.arange(0, non_zero_end_idx*samp_period, upsamp_step) + + zero_crossings = np.where(np.diff(np.sign(w0_upsamp)))[0] + num_crossings = len(zero_crossings) + if num_crossings % 2 == 0: # we want an odd number of zero crossings + zero_crossings = zero_crossings[1:] + num_crossings -= 1 + waves_between_first_and_last_crossing = (num_crossings - 1) / 2 + time_between_first_and_last_crossing = t_up[zero_crossings[-1]] - t_up[zero_crossings[0]] + curr_signal_freq_kHz = 1E3 * waves_between_first_and_last_crossing / time_between_first_and_last_crossing # kHz + self._freq_sum += curr_signal_freq_kHz + self._freq_samples += 1 + self.est_signal_freq_kHz = self._freq_sum / self._freq_samples + + + # frequencies, spectrum = periodogram(w0_upsamp, sampling_freq * up_factor) + # signal_freq = frequencies[np.argmax(spectrum)] # Hz + # print "fft source frequency:", signal_freq, "Hz" + print "current source frequency:", curr_signal_freq_kHz, "kHz" + print "est source frequency:", self.est_signal_freq_kHz, "kHz" + # ax = fig2.add_subplot(111) + # ax.semilogy(frequencies,spectrum) + # ax.set_ylim([1e-7, 1e2]) + # ax.set_xlim([0, 6e4]) + # ax.set_xlabel('frequency [Hz]') + signal_period = 1E3 / self.est_signal_freq_kHz # microseconds + upsamples_recorded = len(w0_upsamp) + + waves_ref = 3.5 + waves_non_ref = 2.5 + samples_per_wave = signal_period / samp_period + ref_upsamples = int(round(waves_ref * samples_per_wave * up_factor)) + nonref_upsamples = int(np.ceil(waves_non_ref * samples_per_wave * up_factor)) + + ref_start_idx = None + if (len(w0_upsamp) % 2 == ref_upsamples % 2): + ref_start_idx = int(round((upsamples_recorded / 2) - (ref_upsamples / 2))) + else: + ref_start_idx = int(np.ceil((upsamples_recorded / 2) - (ref_upsamples / 2))) + ref_end_idx = ref_start_idx + ref_upsamples - 1 + nonref_end_idx = ref_start_idx + nonref_upsamples - 1 + + w0_select = w0_upsamp[ref_start_idx : ref_end_idx + 1] + w1_select = w1_upsamp[ref_start_idx : nonref_end_idx + 1] + w2_select = w2_upsamp[ref_start_idx : nonref_end_idx + 1] + w3_select = w3_upsamp[ref_start_idx : nonref_end_idx + 1] + t_ref_select = t_up[ref_start_idx : ref_end_idx + 1] + t_nonref_select = t_up[ref_start_idx : nonref_end_idx + 1] + + cc1 = np.correlate(w0_select, w1_select, mode='full') + cc2 = np.correlate(w0_select, w2_select, mode='full') + cc3 = np.correlate(w0_select, w3_select, mode='full') + corr_start = t_ref_select[0] - t_nonref_select[-1] + corr_end = t_ref_select[-1] - t_ref_select[0] + upsamp_step + t_corr = np.arange(corr_start, corr_end, upsamp_step) + print len(cc1), len(t_corr) + + fig.clf() + ax1 = fig.add_subplot(511) + ax2 = fig.add_subplot(512) + ax3 = fig.add_subplot(513) + ax4 = fig.add_subplot(514) + ax5 = fig.add_subplot(515) + axarr = [ax1, ax2, ax3, ax4, ax5] + + axarr[0].plot(t_up,w1_upsamp,'r',t_up,w2_upsamp,'g',t_up,w3_upsamp,'b',t_up,w0_upsamp,'k') + axarr[0].axis([0,60*samp_period,-3,3]) + axarr[0].set_title('Signals') + + axarr[1].plot(t_nonref_select, w1_select, 'r', + t_nonref_select, w2_select, 'g', + t_nonref_select, w3_select, 'b', + t_ref_select, w0_select, 'k') + axarr[1].set_title('Selected portions') + + axarr[2].plot(t_corr, cc1) + axarr[2].set_title('Hydrophone 1 cross-correlation') + axarr[3].plot(t_corr, cc2) + axarr[3].set_title('Hydrophone 2 cross-correlation') + axarr[4].plot(t_corr, cc3) + axarr[4].set_title('Hydrophone 3 cross-correlation') + + plt.draw() + plt.pause(0.1) + + return [0,0,0,0] #DBG + + def max_delta_t(hydrophone_idx_a, hydrophone_idx_b): + a = self.hydrophone_locations[hydrophone_idx_a] + b = self.hydrophone_locations[hydrophone_idx_b] + dist = la.norm(a - b) + return dist / self.c def CRC(self, message): # You may have to change the checksum type. From b1b30f5220fd5bebf73989c0eb72f6cdb04f7a69 Mon Sep 17 00:00:00 2001 From: DSsoto Date: Mon, 25 Jul 2016 20:48:50 -0400 Subject: [PATCH 081/267] LEGACY SONAR: separate hydrophone and paulboard_driver pkgs LEGACY SONAR: fix paulboard driver test.launch --- drivers/hydrophones/CMakeLists.txt | 71 +++++ drivers/hydrophones/README.md | 21 ++ drivers/hydrophones/msg/Debug.msg | 6 + drivers/hydrophones/msg/Ping.msg | 5 + drivers/hydrophones/msg/ProcessedPing.msg | 5 + drivers/hydrophones/package.xml | 38 +++ drivers/hydrophones/scripts/hydrophones | 59 ++++ drivers/hydrophones/scripts/ping_logger | 25 ++ drivers/hydrophones/scripts/ping_plotter | 44 +++ drivers/hydrophones/scripts/ping_printer | 20 ++ drivers/hydrophones/setup.py | 13 + drivers/hydrophones/src/__init__.py | 0 .../hydrophones/src/hydrophones/__init__.py | 1 + .../hydrophones/src/hydrophones/algorithms.py | 256 ++++++++++++++++++ drivers/hydrophones/src/hydrophones/util.py | 73 +++++ drivers/paulboard_driver/CMakeLists.txt | 66 +++++ drivers/paulboard_driver/README.md | 6 + .../paulboard_driver/blobs/SimpleHyd2013.bin | Bin 0 -> 33440 bytes drivers/paulboard_driver/launch/test.launch | 16 ++ drivers/paulboard_driver/package.xml | 29 ++ .../paulboard_driver/scripts/paulboard_driver | 149 ++++++++++ drivers/paulboard_driver/setup.py | 13 + .../src/paulboard_driver/SimpleHyd2013.bin | Bin 0 -> 33440 bytes .../src/paulboard_driver/__init__.py | 0 .../src/paulboard_driver/ais_bootloader.py | 147 ++++++++++ 25 files changed, 1063 insertions(+) create mode 100644 drivers/hydrophones/CMakeLists.txt create mode 100644 drivers/hydrophones/README.md create mode 100644 drivers/hydrophones/msg/Debug.msg create mode 100644 drivers/hydrophones/msg/Ping.msg create mode 100644 drivers/hydrophones/msg/ProcessedPing.msg create mode 100644 drivers/hydrophones/package.xml create mode 100755 drivers/hydrophones/scripts/hydrophones create mode 100755 drivers/hydrophones/scripts/ping_logger create mode 100755 drivers/hydrophones/scripts/ping_plotter create mode 100755 drivers/hydrophones/scripts/ping_printer create mode 100644 drivers/hydrophones/setup.py create mode 100644 drivers/hydrophones/src/__init__.py create mode 100644 drivers/hydrophones/src/hydrophones/__init__.py create mode 100644 drivers/hydrophones/src/hydrophones/algorithms.py create mode 100644 drivers/hydrophones/src/hydrophones/util.py create mode 100644 drivers/paulboard_driver/CMakeLists.txt create mode 100644 drivers/paulboard_driver/README.md create mode 100644 drivers/paulboard_driver/blobs/SimpleHyd2013.bin create mode 100644 drivers/paulboard_driver/launch/test.launch create mode 100644 drivers/paulboard_driver/package.xml create mode 100755 drivers/paulboard_driver/scripts/paulboard_driver create mode 100644 drivers/paulboard_driver/setup.py create mode 100644 drivers/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin create mode 100644 drivers/paulboard_driver/src/paulboard_driver/__init__.py create mode 100644 drivers/paulboard_driver/src/paulboard_driver/ais_bootloader.py diff --git a/drivers/hydrophones/CMakeLists.txt b/drivers/hydrophones/CMakeLists.txt new file mode 100644 index 00000000..a444b1ca --- /dev/null +++ b/drivers/hydrophones/CMakeLists.txt @@ -0,0 +1,71 @@ +# Catkin User Guide: http://www.ros.org/doc/groovy/api/catkin/html/user_guide/user_guide.html +# Catkin CMake Standard: http://www.ros.org/doc/groovy/api/catkin/html/user_guide/standards.html +cmake_minimum_required(VERSION 2.8.3) +project(hydrophones) +# Load catkin and all dependencies required for this package +# TODO: remove all from COMPONENTS that are not catkin packages. +find_package(catkin REQUIRED COMPONENTS tf std_msgs message_runtime message_generation rospy geometry_msgs) +catkin_python_setup() + +# CATKIN_MIGRATION: removed during catkin migration +# cmake_minimum_required(VERSION 2.4.6) + +# CATKIN_MIGRATION: removed during catkin migration +# include($ENV{ROS_ROOT}/core/rosbuild/rosbuild.cmake) + +# Set the build type. Options are: +# Coverage : w/ debug symbols, w/o optimization, w/ code-coverage +# Debug : w/ debug symbols, w/o optimization +# Release : w/o debug symbols, w/ optimization +# RelWithDebInfo : w/ debug symbols, w/ optimization +# MinSizeRel : w/o debug symbols, w/ optimization, stripped binaries +#set(ROS_BUILD_TYPE RelWithDebInfo) + + +# CATKIN_MIGRATION: removed during catkin migration +# rosbuild_init() + +#set the default path for built executables to the "bin" directory + +# CATKIN_MIGRATION: removed during catkin migration +# set(EXECUTABLE_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/bin) +#set the default path for built libraries to the "lib" directory + +# CATKIN_MIGRATION: removed during catkin migration +# set(LIBRARY_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/lib) + +#uncomment if you have defined messages +add_message_files( + FILES + ProcessedPing.msg + Debug.msg + Ping.msg +) +## Generate added messages and services with any dependencies listed here +generate_messages( + DEPENDENCIES std_msgs geometry_msgs +) + +# catkin_package parameters: http://ros.org/doc/groovy/api/catkin/html/dev_guide/generated_cmake_api.html#catkin-package +# TODO: fill in what other packages will need to use this package +catkin_package( + DEPENDS # TODO + CATKIN_DEPENDS tf std_msgs message_runtime message_generation rospy + INCLUDE_DIRS # TODO include + LIBRARIES # TODO +) + +include_directories( ${Boost_INCLUDE_DIR} ${catkin_INCLUDE_DIRS}) + +#uncomment if you have defined services +#rosbuild_gensrv() + +#common commands for building c++ executables and libraries +#rosbuild_add_library(${PROJECT_NAME} src/example.cpp) +#target_link_libraries(${PROJECT_NAME} another_library) +#rosbuild_add_boost_directories() +#rosbuild_link_boost(${PROJECT_NAME} thread) +#rosbuild_add_executable(example examples/example.cpp) +#target_link_libraries(example ${PROJECT_NAME}) + +install(PROGRAMS scripts/ping_printer scripts/ping_logger scripts/ping_plotter scripts/hydrophones DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}) diff --git a/drivers/hydrophones/README.md b/drivers/hydrophones/README.md new file mode 100644 index 00000000..c8f50c34 --- /dev/null +++ b/drivers/hydrophones/README.md @@ -0,0 +1,21 @@ +Conventions used: + +Incoming data is assumed to be from four hydrophones, indexed from 0 to 3, +laid out as follows: + + 1 + | x ^ + 3--0 | + | y <--0 z (out of screen) + 2 + +The resulting published position point is in the coordinate frame shown above. + +Other trivia: + +`dist_h` is the distance (in meters, of course) from hydrophone 0 to 1 (and +equivalently, 0 to 2). `dist_h4` is the distance from 0 to 3. + +The deltas (seen internally and published on the debug topic) are _delays_, so +one being more positive means that the ping was heard by that hydrophone +later. diff --git a/drivers/hydrophones/msg/Debug.msg b/drivers/hydrophones/msg/Debug.msg new file mode 100644 index 00000000..cc0d479b --- /dev/null +++ b/drivers/hydrophones/msg/Debug.msg @@ -0,0 +1,6 @@ +Header header +float64[] deltas +float64[] delta_errors +float64 fft_sharpness +float64 heading +float64 declination diff --git a/drivers/hydrophones/msg/Ping.msg b/drivers/hydrophones/msg/Ping.msg new file mode 100644 index 00000000..9389665a --- /dev/null +++ b/drivers/hydrophones/msg/Ping.msg @@ -0,0 +1,5 @@ +Header header +int32 channels +int32 samples +int32 sample_rate +uint16[] data # C1 C2 C3 C4 C1 C2 C3 C4 ... diff --git a/drivers/hydrophones/msg/ProcessedPing.msg b/drivers/hydrophones/msg/ProcessedPing.msg new file mode 100644 index 00000000..4951f733 --- /dev/null +++ b/drivers/hydrophones/msg/ProcessedPing.msg @@ -0,0 +1,5 @@ +Header header +geometry_msgs/Point position +float64 freq +float64 amplitude +bool valid diff --git a/drivers/hydrophones/package.xml b/drivers/hydrophones/package.xml new file mode 100644 index 00000000..5ddadb7b --- /dev/null +++ b/drivers/hydrophones/package.xml @@ -0,0 +1,38 @@ + + hydrophones + 1.0.0 + hydrophones + Matthew Thompson + + BSD + + http://ros.org/wiki/hydrophones + + + Matthew Thompson + + + catkin + + + tf + rospy + message_runtime + message_generation + std_msgs + geometry_msgs + + + tf + rospy + message_runtime + message_generation + std_msgs + geometry_msgs + + + + + + + diff --git a/drivers/hydrophones/scripts/hydrophones b/drivers/hydrophones/scripts/hydrophones new file mode 100755 index 00000000..f62219f5 --- /dev/null +++ b/drivers/hydrophones/scripts/hydrophones @@ -0,0 +1,59 @@ +#!/usr/bin/env python + +import os +import numpy +import threading +import math + +import rospy + +from hydrophones import algorithms, util +from hydrophones.msg import Ping, ProcessedPing, Debug +from std_msgs.msg import Header +from geometry_msgs.msg import Point, PoseStamped, Pose + +def process_ping(ping): + samples = util.ping_to_samples(ping) + sample_rate = ping.sample_rate + + r = algorithms.run(samples, sample_rate, v_sound, dist_h, dist_h4) + + if len(r['pos']) > 0: + heading = math.atan2(r['pos'][1], r['pos'][0]) + declination = math.atan2(-r['pos'][2], numpy.linalg.norm(r['pos'][0:2])) + else: + heading = 0 + declination = 0 + + if len(r['errors']) > 0: + rospy.logwarn('Errors processing ping: ' + ", ".join(r['errors'])) + + valid = len(r['pos']) > 0 and len(r['errors']) == 0 + if valid: + pub.publish(header=Header(stamp=ping.header.stamp, + frame_id=ping.header.frame_id), + position=Point(*r['pos'].tolist()), + freq=r['freq'], + amplitude=r['amplitude'], + valid=valid) + pub_pose.publish(header=Header(stamp=ping.header.stamp, + frame_id=ping.header.frame_id), + pose=Pose(position=Point(*r['pos'].tolist()))) + pub_debug.publish(header=Header(stamp=ping.header.stamp, + frame_id=ping.header.frame_id), + deltas=r['deltas'].tolist(), + delta_errors=r['delta_errors'].tolist(), + fft_sharpness=r['fft_sharpness'], + heading=heading, + declination=declination) + +rospy.init_node('hydrophones') +dist_h = rospy.get_param('~dist_h') +dist_h4 = rospy.get_param('~dist_h4') +v_sound = rospy.get_param('~v_sound') +template_periods = rospy.get_param('~template_periods', 3) +pub = rospy.Publisher('hydrophones/processed', ProcessedPing) +pub_pose = rospy.Publisher('hydrophones/pose', PoseStamped) +pub_debug = rospy.Publisher('hydrophones/debug', Debug) +sub = rospy.Subscriber('hydrophones/ping', Ping, process_ping) +rospy.spin() diff --git a/drivers/hydrophones/scripts/ping_logger b/drivers/hydrophones/scripts/ping_logger new file mode 100755 index 00000000..6747aa30 --- /dev/null +++ b/drivers/hydrophones/scripts/ping_logger @@ -0,0 +1,25 @@ +#!/usr/bin/env python + +import os +import numpy + +import roslib +roslib.load_manifest('hydrophones') +import rospy + +from hydrophones.msg import Ping +from hydrophones import util + +rospy.init_node('ping_logger') +output_dir = rospy.get_param("~output_dir", "pings") +if not os.path.exists(output_dir): + os.makedirs(output_dir) + +def save_ping(ping): + path = os.path.join(output_dir, str(ping.header.seq) + ".csv") + samples = util.ping_to_samples(ping) + numpy.savetxt(path, samples.transpose(), fmt='%d', delimiter=',') + +sub = rospy.Subscriber('hydrophones/ping', Ping, save_ping) +rospy.spin() + diff --git a/drivers/hydrophones/scripts/ping_plotter b/drivers/hydrophones/scripts/ping_plotter new file mode 100755 index 00000000..53ca34e1 --- /dev/null +++ b/drivers/hydrophones/scripts/ping_plotter @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +import os +import numpy +import threading +import matplotlib +import matplotlib.pyplot as plt + +import roslib +roslib.load_manifest('hydrophones') +import rospy + +from hydrophones import algorithms, util +from hydrophones.msg import Ping + +class Node(object): + def __init__(self): + self.fig = plt.figure() + self.ax = self.fig.add_subplot(111) + self.fig.show() + self.cond = threading.Condition() + self.samples = None + self.sub = rospy.Subscriber('hydrophones/ping', Ping, self.ping_callback) + + def ping_callback(self, ping): + with self.cond: + self.samples = util.ping_to_samples(ping) + self.cond.notify_all() + + def run(self): + while not rospy.is_shutdown(): + with self.cond: + self.cond.wait(.5) + if self.samples is not None: + self.ax.clear() + self.ax.plot(self.samples.transpose()) + self.ax.autoscale() + self.fig.canvas.draw() + self.fig.show() + self.samples = None + +rospy.init_node('ping_plotter') +Node().run() + diff --git a/drivers/hydrophones/scripts/ping_printer b/drivers/hydrophones/scripts/ping_printer new file mode 100755 index 00000000..fe2d083c --- /dev/null +++ b/drivers/hydrophones/scripts/ping_printer @@ -0,0 +1,20 @@ +#!/usr/bin/env python + +import math + +import roslib +roslib.load_manifest('hydrophones') +import rospy + +from hydrophones.msg import ProcessedPing + +def print_heading(ping_processed): + print '%s%dhz heading %d declination %d amplitude %d' % ('bad' if not ping_processed.valid else '', + ping_processed.freq, + ping_processed.heading / math.pi * 180, + ping_processed.declination / math.pi * 180, + ping_processed.amplitude) + +rospy.init_node('print_heading') +sub = rospy.Subscriber('hydrophones/processed', ProcessedPing, print_heading) +rospy.spin() diff --git a/drivers/hydrophones/setup.py b/drivers/hydrophones/setup.py new file mode 100644 index 00000000..8a4c344f --- /dev/null +++ b/drivers/hydrophones/setup.py @@ -0,0 +1,13 @@ +## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD + +from distutils.core import setup +from catkin_pkg.python_setup import generate_distutils_setup + +# fetch values from package.xml +setup_args = generate_distutils_setup( + packages=['hydrophones'], + package_dir={'': 'src'}, + requires=[], # TODO +) + +setup(**setup_args) \ No newline at end of file diff --git a/drivers/hydrophones/src/__init__.py b/drivers/hydrophones/src/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/drivers/hydrophones/src/hydrophones/__init__.py b/drivers/hydrophones/src/hydrophones/__init__.py new file mode 100644 index 00000000..b23e43d7 --- /dev/null +++ b/drivers/hydrophones/src/hydrophones/__init__.py @@ -0,0 +1 @@ +#autogenerated by ROS python message generators \ No newline at end of file diff --git a/drivers/hydrophones/src/hydrophones/algorithms.py b/drivers/hydrophones/src/hydrophones/algorithms.py new file mode 100644 index 00000000..122465ff --- /dev/null +++ b/drivers/hydrophones/src/hydrophones/algorithms.py @@ -0,0 +1,256 @@ +from __future__ import division +from hydrophones import util + +import numpy +import scipy +import math +import sys +import matplotlib.pyplot as plt + +def run(samples, sample_rate, v_sound, dist_h, dist_h4): + # Perform algorithm + samples = zero_mean(samples) + freq, amplitude, samples_fft = compute_freq(samples, sample_rate, numpy.array([5e3, 40e3]), plot=True) + fft_sharpness = amplitude**2/numpy.sum(samples_fft) + upsamples, upsample_rate = preprocess(samples, sample_rate, 3e6) + deltas, delta_errors, template_pos, template_width = \ + compute_deltas(upsamples, upsample_rate, freq, 8) + delta_errors = delta_errors / amplitude + if len(deltas) == 3: + pos = compute_pos_4hyd(deltas, upsample_rate, v_sound, dist_h, dist_h4) + else: + pos = numpy.empty(0) + + # Check for errors + errors = [] + if amplitude < 80: + errors.append('Low amplitude at maximum frequency') + if template_pos is None: + errors.append('Failed to find template') + elif numpy.max(delta_errors) > 1e-3: + errors.append('High template match error (%s)' % str(delta_errors)) + + return dict( + errors=errors, + freq=freq, + amplitude=amplitude, + fft_sharpness=fft_sharpness, + samples_fft=samples_fft, + pos=pos, + deltas=deltas, + delta_errors=delta_errors, + upsamples=upsamples, + upsample_rate=upsample_rate, + template_pos=template_pos, + template_width=template_width) + +def zero_mean(samples): + """Zero means data by assuming that the first 32 samples are zeros""" + return samples - numpy.mean(samples[:, 0:32], 1)[:, numpy.newaxis] + +def normalize(samples): + """Rescapes samples so each individual channel lies between -1 and 1""" + return samples / numpy.amax(numpy.abs(samples), 1)[:, numpy.newaxis] + +def compute_freq(samples, sample_rate, freq_range, plot=False): + """Checks whether samples are likely a solid ping and returns the frequency.""" + samples_window = samples * numpy.hamming(samples.shape[1]) + + # Compute fft, find peaks in desired range + fft_length = 2048 + samples_fft = numpy.absolute(numpy.fft.fft(samples_window, fft_length, axis=1))[:, :fft_length/2] + bin_range = freq_to_bin(freq_range, sample_rate, fft_length) + peaks = bin_range[0] + numpy.argmax(samples_fft[:, bin_range[0]:bin_range[1]], axis=1) + + # Sort peaks, take mean of the middle + middle_peaks = numpy.sort(peaks)[len(peaks)//4:len(peaks) - len(peaks)//4] + peak = numpy.mean(middle_peaks) + + freq = bin_to_freq(peak, sample_rate, fft_length) + + amplitude = math.sqrt(numpy.mean(samples_fft[:, round(peak)])) + return freq, amplitude, samples_fft + +def bin_to_freq(bin, sample_rate, fft_length): + return (sample_rate/2) / (fft_length/2) * bin + +def freq_to_bin(freq, sample_rate, fft_length): + return freq * ((fft_length/2) / (sample_rate/2)) + +def preprocess(samples, sample_rate, desired_sample_rate): + """Upsamples data to have approx. desired_sample_rate.""" + # Trim to first ms of data and bandpass + samples = bandpass(samples[:, :.001*sample_rate], sample_rate) + samples = normalize(samples) + + # Upsample each channel + upfact = round(desired_sample_rate/sample_rate) + upsamples = numpy.empty((samples.shape[0], samples.shape[1]*upfact)) + for i in xrange(samples.shape[0]): + upsamples[i, :] = util.resample(samples[i, :], upfact, 1) + return upsamples, sample_rate*upfact + +def bandpass(samples, sample_rate): + """Applies a 20-30khz bandpass FIR filter""" + # 25-40KHz is the range of the pinger for the roboboat competition + fir = scipy.signal.firwin(128, + [19e3/(sample_rate/2), 41e3/(sample_rate/2)], + window='hann', + pass_zero=False) + return scipy.signal.lfilter(fir, 1, samples) + +def compute_deltas(samples, sample_rate, ping_freq, template_periods, plot=False): + """ + Computes N-1 position deltas for N channels, by making a template + for the first channel and matching to all subsequent channels. + """ + period = int(round(sample_rate / ping_freq)) + template_width = period*template_periods+1 + template, template_pos = make_template(samples[0, :], + .2, + template_width) + if template_pos is None: + return numpy.empty(0), numpy.empty(0), None, template_width + start = template_pos - period//2 + stop = template_pos + period//2 + + deltas = numpy.empty(samples.shape[0]-1) + errors = numpy.empty(samples.shape[0]-1) + for i in xrange(deltas.shape[0]): + pos, error = match_template(samples[i+1, :], start, stop, template) + deltas[i] = pos - template_pos + errors[i] = error + + return deltas, errors, template_pos, template_width + +def make_template(channel, thresh, width): + """ + Returns a template of the specified width, centered at the first + point of the channel to exceed thresh. + """ + for pos in xrange(0, channel.shape[0]-width): + if abs(channel[pos+width//4]) >= thresh: + return channel[pos:pos+width], pos + return None, None + +def match_template(channel, start, stop, template): + """ + Matches template to channel, returning the point where the start + of the template should be placed. + """ + start = max(start, 0) + stop = min(stop, channel.shape[0] - template.shape[0]) + mad = mean_absolute_difference(channel, start, stop, template) + min_pt = find_zero(mad) + + return start + min_pt, mad[round(min_pt)] + +def mean_absolute_difference(channel, start, stop, template): + """ + Slides the template along channel from start to stop, giving the + mean absolute difference of the template and channel at each + location. + """ + width = template.shape[0] + mad = numpy.zeros(stop-start) + for i in xrange(start, stop): + mad[i-start] = numpy.mean(numpy.abs(channel[i:i+width] - template)) + return mad + +def find_zero(data): + """ + Finds the sub-sample position of the first zero in a continuous signal, + by first finding the lowest absolute value, then taking the gradient + and performing a linear interpolation near the lowest absolute value. + """ + approx = numpy.argmin(numpy.abs(data)) + d_data = numpy.gradient(data) + + for pos in xrange(max(approx-3,0), min(approx+3, d_data.shape[0]-1)): + if numpy.sign(d_data[pos]) != numpy.sign(d_data[pos+1]): + y2 = d_data[pos+1] + y1 = d_data[pos] + x2 = pos+1 + x1 = pos + return -(x2-x1)/(y2-y1)*y1 + x1 + return approx + +def compute_pos_4hyd(deltas, sample_rate, v_sound, dist_h, dist_h4): + """ + Solves the 4 hydrophone case (3 deltas) for heading, declination, + and sph_dist using a bunch of trig. Future work would be to phrase + this as a NLLSQ problem or something, and adapt it to more + hydrophones. + """ + assert(len(deltas) == 3) + + y1 = deltas[0]/sample_rate*v_sound + y2 = deltas[1]/sample_rate*v_sound + y3 = deltas[2]/sample_rate*v_sound + + dist = abs((y1**2 + y2**2 - 2*dist_h**2)/(2*y1 + 2*y2)) + cos_alpha1 = (2*dist*y1 + y1**2 - dist_h**2)/(-2*dist*dist_h); + cos_alpha2 = -(2*dist*y2 + y2**2 - dist_h**2)/(-2*dist*dist_h); + cos_alpha = (cos_alpha1 + cos_alpha2)/2; + + cos_beta = (2*dist*y3 + y3**2 - dist_h4**2)/(-2*dist*dist_h4); + + dist_x = cos_alpha*dist + dist_y = cos_beta*dist + if dist**2 - (dist_x**2 + dist_y**2) < 0: + dist_z = 0 + else: + dist_z = -math.sqrt(dist**2 - (dist_x**2 + dist_y**2)) + return numpy.array([dist_x, dist_y, dist_z]) + +if __name__ == '__main__': + sample_rate = 300e3 + if len(sys.argv) > 1: + samples = numpy.loadtxt(sys.argv[1], delimiter=',').transpose() + else: + samples = util.make_ping([0, .25, 1.234, -5], {'freq': 23e3, 'sample_rate': sample_rate}) + + r = run(samples, sample_rate, 1497, 2.286000e-02, 2.286000e-02) + + if len(r['errors']) > 0: + print 'ERRORS', r['errors'] + print 'freq', r['freq'], 'amplitude', r['amplitude'] + print 'sharpness', r['fft_sharpness'] + print 'deltas', r['deltas'] + print 'delta errors', r['delta_errors']/r['amplitude'] + print 'pos (hyd coordinates)', r['pos'] + + plt.figure() + plt.plot(samples.transpose()) + plt.title('Raw ping') + + plt.figure() + fft_length = r['samples_fft'].shape[1]*2 + plt.plot(bin_to_freq(numpy.arange(0, fft_length//2), sample_rate, fft_length), + r['samples_fft'].transpose()) + plt.title('FFT') + + plt.figure() + plt.plot(r['upsamples'].transpose()) + plt.title('Upsampled ping') + + if r['template_pos'] is not None: + period = int(round(r['upsample_rate'] / r['freq'])) + template = r['upsamples'][0,r['template_pos']:r['template_pos']+r['template_width']] + plot_start = r['template_pos'] - 2*period + plot_stop = r['template_pos'] + r['template_width'] + 2*period + plt.ioff() + plt.figure() + plt.plot(template) + plt.title('Template') + + for i in xrange(r['deltas'].shape[0]): + plt.figure() + plt.plot(numpy.arange(plot_start, plot_stop), r['upsamples'][i+1, plot_start:plot_stop]) + pos = r['template_pos'] + int(round(r['deltas'][i])) + plt.plot(numpy.arange(pos, pos+r['template_width']), template) + plt.title('Channel %d' % (i+1)) + + plt.show() + + diff --git a/drivers/hydrophones/src/hydrophones/util.py b/drivers/hydrophones/src/hydrophones/util.py new file mode 100644 index 00000000..f19e1d4c --- /dev/null +++ b/drivers/hydrophones/src/hydrophones/util.py @@ -0,0 +1,73 @@ +from __future__ import division + +import numpy +import scipy +import scipy.signal +import math + +from hydrophones.msg import Ping + +def resample(x, p, q): + """Polyphase filter resample, based on MATLAB's resample.m""" + bta = 5 + N = 10 + pqmax = max(p, q) + fc = (1/2)/pqmax + L = 2*N*pqmax + 1 + h = p*scipy.signal.firwin(L-1, 2*fc, window=('kaiser', bta)) + Lhalf = (L-1)/2 + Lx = len(x) + + nz = math.floor(q-(Lhalf % q)) + z = numpy.zeros(nz) + Lhalf += nz + + delay = math.floor(math.ceil(Lhalf)/q) + nz1 = 0 + while math.ceil(((Lx - 1)*p + len(h) + nz1)/q) - delay < math.ceil(Lx*p/q): + nz1 = nz1+1; + h = numpy.hstack([h, numpy.zeros(nz1)]); + y = upfirdn(x,h,p,q) + Ly = math.ceil(Lx*p/q) + y = y[delay:] + y = y[:Ly] + return y + +def upfirdn(x, h, p, q): + # Allocate an expanded array to hold x interspersed with p-1 zeros, + # padded with enough zeros for the fir filter + x_expanded = numpy.zeros((x.shape[0] - 1)*p + h.shape[0]) + + # Insert x values every p elements + x_expanded[:x.shape[0]*p:p] = x + + # Run filter + x_filt = scipy.signal.lfilter(h, 1, x_expanded) + return x_filt + +def make_ping_channel(delay=0, + freq=25e3, + offset=0x7FFF, + ampl=200, + zeros=64, + count=1024, + sample_rate=300e3): + w = 2*math.pi*freq/sample_rate + sinwave = ampl*numpy.sin(w*(numpy.arange(count)-delay)) + + delay_i = round(delay) + window = numpy.zeros(count) + window[zeros+delay_i:] = numpy.minimum(numpy.exp(numpy.arange(count - zeros - delay_i)/10), 2)-1 + + noise = numpy.random.normal(0, .01, count) + + return offset + sinwave * window + noise + +def make_ping(delays=[0, 0, 0, 0], args={}): + return numpy.vstack(make_ping_channel(delay=delay, **args) for delay in delays) + +def samples_to_list(samples): + return samples.transpose().flatten().tolist() + +def ping_to_samples(ping): + return numpy.array(ping.data, dtype=numpy.float64).reshape((ping.samples, ping.channels)).transpose() diff --git a/drivers/paulboard_driver/CMakeLists.txt b/drivers/paulboard_driver/CMakeLists.txt new file mode 100644 index 00000000..6ffedf4f --- /dev/null +++ b/drivers/paulboard_driver/CMakeLists.txt @@ -0,0 +1,66 @@ +# Catkin User Guide: http://www.ros.org/doc/groovy/api/catkin/html/user_guide/user_guide.html +# Catkin CMake Standard: http://www.ros.org/doc/groovy/api/catkin/html/user_guide/standards.html +cmake_minimum_required(VERSION 2.8.3) +project(paulboard_driver) +# Load catkin and all dependencies required for this package +# TODO: remove all from COMPONENTS that are not catkin packages. +find_package(catkin REQUIRED COMPONENTS rospy hydrophones) +catkin_python_setup() + +# CATKIN_MIGRATION: removed during catkin migration +# cmake_minimum_required(VERSION 2.4.6) + +# CATKIN_MIGRATION: removed during catkin migration +# include($ENV{ROS_ROOT}/core/rosbuild/rosbuild.cmake) + +# Set the build type. Options are: +# Coverage : w/ debug symbols, w/o optimization, w/ code-coverage +# Debug : w/ debug symbols, w/o optimization +# Release : w/o debug symbols, w/ optimization +# RelWithDebInfo : w/ debug symbols, w/ optimization +# MinSizeRel : w/o debug symbols, w/ optimization, stripped binaries +#set(ROS_BUILD_TYPE RelWithDebInfo) + + +# CATKIN_MIGRATION: removed during catkin migration +# rosbuild_init() + +#set the default path for built executables to the "bin" directory + +# CATKIN_MIGRATION: removed during catkin migration +# set(EXECUTABLE_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/bin) +#set the default path for built libraries to the "lib" directory + +# CATKIN_MIGRATION: removed during catkin migration +# set(LIBRARY_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/lib) +## Generate added messages and services with any dependencies listed here +#generate_messages( +# DEPENDENCIES +#) + +# catkin_package parameters: http://ros.org/doc/groovy/api/catkin/html/dev_guide/generated_cmake_api.html#catkin-package +# TODO: fill in what other packages will need to use this package +catkin_package( + DEPENDS # TODO + CATKIN_DEPENDS rospy hydrophones + INCLUDE_DIRS # TODO include + LIBRARIES # TODO +) + +include_directories( ${Boost_INCLUDE_DIR} ${catkin_INCLUDE_DIRS}) + + +#uncomment if you have defined messages +#rosbuild_genmsg() +#uncomment if you have defined services +#rosbuild_gensrv() + +#common commands for building c++ executables and libraries +#rosbuild_add_library(${PROJECT_NAME} src/example.cpp) +#target_link_libraries(${PROJECT_NAME} another_library) +#rosbuild_add_boost_directories() +#rosbuild_link_boost(${PROJECT_NAME} thread) +#rosbuild_add_executable(example examples/example.cpp) +#target_link_libraries(example ${PROJECT_NAME}) + +install(PROGRAMS scripts/paulboard_driver DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}) diff --git a/drivers/paulboard_driver/README.md b/drivers/paulboard_driver/README.md new file mode 100644 index 00000000..3db6c494 --- /dev/null +++ b/drivers/paulboard_driver/README.md @@ -0,0 +1,6 @@ +Explanation of `permute` parameter +---------------------------------- + +A `permute` value of `w x y z` means that the connector labeled Aw on the PCB +maps to hydrophone 0 as the hydrophones package wants it, Ax maps to 1, Ay +maps to 2, and Az maps to 3. See `README.md` in the hydrophones package. diff --git a/drivers/paulboard_driver/blobs/SimpleHyd2013.bin b/drivers/paulboard_driver/blobs/SimpleHyd2013.bin new file mode 100644 index 0000000000000000000000000000000000000000..4b4c50a08d614cd4256faaceadbf2c4137bb507d GIT binary patch literal 33440 zcmcG%3tUv!wLiYkKEp7Q(K*AQ8I37t287{}IwIO67n>PWu&J+*njki}KoB1>6Khlu zlX^e}wImLR#-_Fzz#yW%A;#D?m;5@Sg0*QxQWM0q9S{+W5%W?&BmeK3aTJZo?Y;kd z#}5MM?8n+`ulHJe^OGOVwhx{Ac&fkzroop;KjVujiavccV@YvOWLl06@G$$s&PDDG zyhW#CY0k2O)Jt0wk;$Ic(2)909wLTJ)N;r4jKIdysYNrajwNZb#4+5tpp;;KM0$_? zRdOtOzkT7hEI#@W!$-CbDASm`%@J#hJRKU+tU)_`uV2nb*IT&67q&BwR-OdblhHD( zSXj=T!l_V+MITth7rQn5DQWg^@xCjImkDX?Pk{mNIbw73%9vimoToya`G9LJV~U3< zn9L)v`CKbLrFSgJZ7pL?yzo9}tgVjCkdo>f3Yg4TUuiMMn#5Z)#cZ~u~_HJN{j`6vV>RHsW0Ui@?7Sk#QT|*`|9{<@v7&4ebr} zSXVPPq~p9>BU+BFIlR!VIo$YWlRAEJ2+uMD*Gp^>7@(W*wwxPz7MJY+p{#9lghpTA_l{HiX%0~l0{32m)9nakzVtn(htXBDO$;@rx0wsN)0K2LEaer9wfPTuHB zDILme;DN^-OELo0`vYF7lN&EuxKpPvc5Y$2qANUJJBDEW613Whb$0R>uz%f!jM=(6 zxoifl#u1*uGiL0O`3gPL$>uDylQSM-Ewh}U|Ly2|JD)L$UGVeT-S|vV$>*>}CNI6N z+>Ot`1|C1J5&j4l@$4mozI$LSt(P%l0gMdz|RKO2~p zSy{|Zh1LarSymS~)szrmc`r1e+pP&Os2f&*r@9Qe!=(A<1aY1;NgTFby*?(sf(L%3 z<>#@Nibgm~_-+Xe@!~sh-NWaSr8gqeFgM{@DPU&^*Mw)_I|aB-43>PjCQLI)>LP5n z>dJJ$m(ITyKWz|y`EV|aKAbNq>g2gM@r?zx-NHGZdvWd*|HSJ@^Fc<~*~JYC-=+aNcVVE!)uJTDm>OG-L@c=zL+v3$?n@Ge~%m{qi*M=MMlhZCUBBpr9T=7%`|9OSwR9%gB+wWERf+OC!T(~ zm?bdV%Ory=`v)LLHVE^1vEW`m9a6Q&j{Upf-WZWGTVnMw`(6}ND-{9qNlefv!VLW` ztWWMr2vLMzs1&|NRCv{x$2KPnSA-~bAJ*IOrXsU$L+nuLIf3zozh~T{RfL6YiV%(V z{maVu6)WqezSKTb)UqX*Cu-lc;(V>oFv=dX65T2&IOJi_B@pzznMB6_k{!@06%?O5WdC0(NAJddc7{9FT zDd^RCN&=sw@5!$co#rc|VouR0ftK2@1)1&i+=){!K5eWX>lp(6KZErQNvqtTxx$ZX zV)lJKgm63w_=TJu(#fZD!tXBimG{;FXH{cLg-O6~s`I!ehfm?_n5~U){eq|ni-2DP z^WnPTROv{<8_1+1TE&2C3AlCw*Ymx&{;SlG7R|UuVeD*;vs?xFBDRi|Ku^>7_n{V@ zLzxVE7)SU6{Nv^-+BoR6aaml7b7&pYm5mXppb2zmoizcDV$$!_&rd_{^$u;x^?^s2 zB6s-v5pUrA5R5k@i@ztNIj0A8^4XBX`M+ufBq@+9|2Z_-7Ik#Z!%GAuevw=Auv?Ma zVnz*#OKp1h=cjZ{%RX#RSeE8&ii7U9wua7Lmc@?>y9(963f$z-rXJrctibP8L*vw80G;^Kr3eaZyM&8?PqSAyQs!sV^-F6O6S(??ydsg+W3>| zhRQ|bHVC9M=gxjtzy`Hhv%^(PW~}vRl9&TZas4@*|E(ec{C2x8n_>MX9#qknE)pK* z1D5xhaKVRz3pe2)`+@L4u+aP;6(nd{fRj||U4i6`mf1YC{^fZ0&G{gIRqP_!skiV? zHtH=L$U~KLx8wiwn{(5=S==q8Rx*`TFrbTO1q^emNS-IF8{S!tcL1Q{Yn7tU6bgi* zcMcR5uG6hcWCHtCsvAbO@+GExnp`>o_OgHN39yrAX6CM>J&+3WgO*}`?T!7Hu>W6S z{_UbB<+tK;UVNxU=jL@RGMv`x(<5a9^Q<*xFF8=y`Tb-yirIAon@Yb;`-igr7(k24+c%2WDaKQc|1h9s`ZfZq@xQ?`?qMc zY_?4~*s@0p9xgjdGNo`4xH~pk);~A!2!6MwRRY_Du0# zFr>Zl3gGL-^B2UGkO?&JEtn(^Y;Yf3?Cx;I1PAaaCv;SHMC2zP#tDaWBtsDF7g16v z9S2LEmD0uQl!$g$BV9>(FM8j&lA%h*nSH zi#}6;Bxl=V==&Izq9YwoW9D7b!S{rJJ2r$6*kLLUv8Gn4ZX4G>royB)|G3^g9xe0g zjR9g8;EcM|i~URYSPP#o?E1nk9xTaXi<*z|m?=}lE2HMX{wn5i8dt|N`2FXFXZ7i>6js2)xlAMyegtK}ubzI^Ui@*SbI( z5g2UFUXmVZf3cW*-g64+qH-pC^nT{W&7~iCZLWo!@fTr7ErR`k{+@`!+LwrC?30Fa zgSu)e**?()3Vct6!)b4ehs3n>E5c71_Dg2$SfIpPH%p+Y5a3t#c}O3ck1se_N;Gwt&5>+z z>=UA?Q#vi=YO!GHXYeB!+!~@Q=sVz?;tGT5YVy1Hfj4%kH%c!LA^HS8iNB2B2AXgV znIp~6Ta1zlHm1t`Vmb5+YqNd5L-oP|U{_-WpmKPy0i02?qU5@u16y2hB$;sEUtf za>f`|^3}jtpI!UHUG3Vt+h>DfuiqJ;aKeZ1$)Q%AQ$_gq&86iOqmWJ8#=?#Y15N;? z&^AwzQ~WWC&on#j(M+-G5uYF&Rj9)gV_YT<|Bju9zkgci)YSZCjmG(-uXQwTrmzWm z3UPr-72k&VK%QL6x0%a$o^Aw7wk5$nle$XmOil~ZTla)2^{~Ct^+8Pj^Y<4lluZ{O zVbH&6HG0GfvNsv5OvWCW5i2rJ-smz{Gew5{_L%a|J~9FLYtijAp*_RzKif0XH94(P zwjl;JIqgwFK)+1d+bzK?9dKu2J(<|o4D4$<_H`Ndb=ly3_3x?7l#aXYtA9_if4=zM z9sBytJvFnEFTc5`gri##b-#~rE0)fKJr`kXAEI^1wcm{K;c=TUp6=UIvQ-=MlOue4 z>Xo19;z9PmkpCb&$Qg@=`Fxh-h|IOc>;=er`fkJil02u_lNR5v zOw8Guv%c~5l%oTI(gNrkE}il5*;?V6h6P{KDEcuIi+Rm`E)E$7z026Pi6PBsgZxGN zt8;^(Z(Gw_dyh*j%%pB&Olc3FSlmo)9I@@MJ}W39#kYiXCMAgr9%Zu9wPlnqe*wAQ zY?GCL{!qi3k-IpdcLz1SpkmDlA*}gDku|3(_ckLor=xgLJZs$Yw6Kv9P946u9Mdgq zV>)lFsM!D=o(nyq3`>01DMIfI`>X!I{Vk!RAtzkJ{xP0?kL&@%wyz4?IHWod}bT}E6I{u}&98T7B^g|?-t!t|xQh+pE-CRr?!FNu&x;$_68FJ~sG zrha^~Zj0sX(8~pNqFla2cVK9QswUFKnby{U^?PI0cf%9t> ztQA_Z{yXEB^D3E`h!`ftYR^hfNfZA)>4^wvnbxr(-0;&N3Hx^@hg*27&d4Rl*E-9k zKeN~$?HlQD z$}`+;?`YrfIU8o`y>0J&v>k16c3t}?{NS^|dvo-7S2Mvo=EO@rx#^usBg5oREhV23mZZ(#12qXR9ljhm5MYl-q~IoYBM zITwnUYvxvS|zkW;?B`PMqsJ zY%f<=C)#BO+hoch+vKot;L+hM%UtY@&q2rEl!>?7CsRZ$Z;*X5#SA!a=BO@ni<$V( zXP4N#c8UHry9EBjMEGDf^kF8|+ch%iaI~t^EUVIFSrwFGRx!kP;j=)8j86Hhhc6eH~qrxlf6c7d-xgQkl%yslaK%!-*2B(alm0JgD$gKeD;at0bWuF7rO!M zXPW9nuYJ-(_6hI>{78SguFK5u$wx!o5NYDDnVG#C_VWH*_%N{Fc5h3z#mtf))IY%F z7X)P-=<~srP=(b*B3T0Zj)`On=2{x_n7jes73{}`d9qDYsji+pd7~w9Zl%iRDyg=Y zAikWYSiJktg8ga57p?7tTD`)*A9wBr?8`2ne!2#Gv6@qyDQ{B<@B`eZGy2@jU(tTt z+KVWrpFHr$E&U{^-b2Lk&V7J<@c8!Bw;$iR7uN6Ii^e3!^uhb!gHQ3urhosToN?Hj zPbB&*@!EzK$b#z0bDh<8!jTO$N_Z*2g~C?RXo*^!NP>cuOZfkU_sP>~pi~JMnhu z#qXryJMku)ViV+y(i{0#tH+BoFTLK5GdpmG_4(w_RwNa#yC1FUaK|-{X~YS+~z)CHk@5&2~_EZpt~gre#yl?@p4O8!Y3t{EIS<{Y&&QWl(PTb{Tg!`ncWq`WC)Q z|G%UU^1uA@&3ae)Rx((=g$#|fikarBRRLxC*v$74=^j@PA14W3&}Y0 zpCIRC$T%nPpAH$f9p9ahaohcTI#|Z(_1~0ngZcGNUM2Z?yUw|TSDD=5lTSW=y`5L1 znSR&x@04+1k2cab|FVpm{oj;v7@x+LO%apij3YN?oE~)%^C0X06&a`Z>4m$>II_>a zO*fE?!}>bDyD#_rd6BW^E839>!d(i}F-}sd-o6f`zmu=MZ=;=qR1P>doI& z1vsXYf8d9s{}YMD0*36Ph!Z28>K5`kju7!CC|hY@3x z5Ic3MZop`R%(r0kWVC4CQz%;iV;JmR3t$Wbd=|hblf0?Pk;xz6D?XlDSGxRasHuF1 z2KoP+|1_bti)7!84gbjHv$GDPE^A6+3FJ_P`tW2eo900*{P7mu_C(4f7Ul;0-Y0`3 z2T?HhO^`{}`_7 zyTv#Bwj7$a>tiwIez_&)LDmF5{s45=gj&rO%+azKJ}mh0@#C(XKcFU1Bp$u=>S4Ry zYrpyUljsyat*(=Azrm*miBAs@pZe&}co+YaY9S$|Nxyqzc2w8*!d>X^4*&GEuWz## z{%`zKU;OgU`abeav8S*VzV~l(#<83Hs`uGyj}PWoJNQvxzK>V;{r-PWU;g-l;W6g6 z!1lhw-u__EALW)--C@KxG%YnI)Ey-~w4Iy=8S99( zd+Q7RdsWnx+%q_~>a$N=kpI_&uk|zZMbH-{Q!&QPePYmGzL@G@nQFNs7XIxvHiLfr z->^>x%Lo%<;eHLnMoSk^2P^OSN`5dg%4SK0EJd#0M*8G&XJb3KrHkrhfjiV*mEDvh zUfVc7BqecUNTTGe4fe^^hGl;J1#K)fuq|lMNG35! z(?zuY8DtXK5tXnRCU2aQ==9S1x7!VVT?iq4BR)gaSyp%!&y2*D=9`Xh*$<#t?L{1bGb2ty%^dwM;gd>QnAkci^u% z(y-YVVo=LOwG33ZOtmaD9_pWUt5L@^cuWa(XwGM^-d4-NWcfVRN%`v;{Np+o!8d>` zfcKOVcB%_J`^jtij6p zd6gSKaNc1ytWrth+)vJU8Rv&>smb>HX+_ru%os zyz%&3bq&3k|MvQy$?v`*Rs}?&wqY3RWJZe&{s(Gj_*vnI9`=Wbni&!FDY{?0UuFB+ zh5k0rUqXJ1+Lz*CSMdJe-2ugre=EAB*HjB8j8 zSB4V&D|i`d8@?QRI&}0f)XEIK%$E+$;=NSg0QZn;W&E`bI@C7g{@1k)&E8s=Z>?>B zj>kEe0S(pHAlIa6{OsV%QJc$E6(bI-<~gUR3O78y6txbjF99zX<#y8dr$TqwQE!@9 zo`3i;ThvaKAs96d&clhPbcelXK5QGOOh)|(Y8#r=McdbH>oiMZn)5Kn`p&?wiVR8_ zC896z<+y7s1(%W!%7J}1n<-eWZp8uwKl1eE)?VE$<_R+86k?mevOiq<| zhR;3l*ylI4txCp!VTeq&b|a+9GK;tbL~bQPmvIOUw^1HFrafj=6hz@_oDY) zLr(^sYC!r}#V$neU&DJ)LBAL_H`UBxZ&05Yp~l__RKG{LJJ1)_j(W>5)PbUIA2A9c zLS4HaI75A&x0ZckFn^&E#A$`FnRr&5=yt4b1fSj#K53XLE$SYqMib@(y zds|@$&l#ChRN4J&oX-natqM9KSXhCoZ;d2s;9r+7`IS()J3?HYm&o@$vsKfuG6(g8 zZt?1C+r9a>H*<~Rs-WKpZ)xme<&xJ`)$F=>b-<|xUisy+I^oQLVz#>Gex@#)|FNfP zySgNb&0c+Mn!M_r`)y<&1R0aX(+?=T_qFiA)tf_D?lHZaVjm{c7&$NRdn_+6iRT7Q zMa`(2=jGLlr6~A=N^l4=e=r?J4@?? zoQ9l6Y)Dtm$(#>+gq-=67w+R%=2r!5`kuu9VvzU-L$R=qrMGE{UlRJ(IC=HJ8onxM z41a4#8Rq$|*oOM63iN3Ul*HA#9lBqlUvIdISFc<-YDn=`;TXnX$X{L^xR2?d2iz*w zf_$%=#=>09n&KlTWP7LFyT;iJJXnc149_vS4{@rW-~U(Si(`4;;&A@fGkU&9xXv*q z&wFQrwQpHPprc>B-v0;G=OTaG_a`UsAK1bo)J0p*Cf1ov_6KM8{&9k}|5-h!^#>UZ zym!xo^1-OOgHggS#L5d>IAGEAE#Ap~`Moj@IJo|2%-Oft$c`DxP=Cstl@|;JXtOTN z{{P&4f1)<8=L60Hk^vPL-k{vRP_b$P-v}ISy1$H9)J+suzg7&qg>cuRKk9-2>(w`$ zV*7_Lh?##GP4$5qUSw2Nw5S7Gbw_pBe;xJ+@?O(Mv8o3RW|TSt0j~gOOu%;|WV65? z6p>@3=S8yjc|&Hwv;6}GUb(xJYbJ!y`MkEPe=6pyJh{ALiC(;V{1x89oI>x?x>-0Hlzy)7H3@YI>vXHX((#s0 zw+QO3>MFy>fzpXL--&>JIbYFd0G$={ha+aZ*H}_`Ah;z^s>HL&p0z{Wh(7@rI^;ui zcA4p1v1P&{=GHxfuhXFkYP=f?zSbtn%CI+cC3R(j6wr5e73u;y?Z$jUp(d-xE)p=2F3*$p8V=ZGK&m#K%v^ruTb8GWnjbW>{#_-jzk5^TW z*ehC?60k}cuD<&0{X;hz_6BULN`7M1rcW8-0z>Qi2IRi>tU7Rg-~o!0k`D#{=zOHQDxk7&3#|?7 zvT?v6fgdVgt=sXm~0kJj2ZFi<~F-h0v&E7j+|6AxaitH=2uU0&WK-rM^-q5qFt zoBRLhtj~MH$<14Upd_n-e)Mrx?0;m?J zBc(3Dr(YNgFDUVz5dqDL(Tc)95AMpg)UM^G{N43rf6V972gq;M&r{0kbKlsGz0KFH zs=3B~_5VJyk^I-g#s}BRgQ3)t1-^tF=W$5?-*N#GI#R6?zeLnmoY`d&YsdS!4k`m6J)O3#Zr@O3X`Ma zlsLqyWRpG2jdoUQQys4~M==F$LYfexR`6btGeX=Z)Ac68AK^PHLE)u%M`wP$@9lVR zzi`6)-o_a0XPh28QRt1s%ORvw&(IkM*VkJ(>=e|0J7U#49oiGn)3lGszntqpJ0Fb7 z2``K_TEIAsVDz3(^T9dV4LtebJmG~C?VLe=I7b7{yE3$zAV zQ9Nkd)%V`+VUT$d>! z{7(4!JyyU!%j(5D;K2Uc!Sl0&0lUdGO(FOJpUnq<^!L9Fe%Yk;(Tja3@VwFozYX>% z(U6^R|MAUdx8s*}5`Mir4g3?04StXLZ+m|yop5IL$oB7fX~jz?rtjUAPGpmG13o=q zndyZ^0DSbpnuXv%AHDeCmCz>XuJ8_`1uvaIRD9~El|k>`xf$R3p6jB)%a6BcW-z{E zxx4))&UJ+I1Z5Du$5Q@?aDIb7eRPj~l1;;ItVeR618+Y3W5;xYJ$l;1KOV%3v5I2r z27EX9@eUdReaKyd`Ei{s1a%y9B=Dpv>8&0+&R?Bx!C6DHE57a1&Wwy~gm3$Pxoi?^ zn4G}BOvLxK395#t6T~B72|OA$;WO=wPx$mH3GpQ{hDqE4Tb`cL*=65+vWLzBL>(9~ zk^IEk!eG~%;47InHhAf~i|l{o;&7M2OTxrW#UgwsRhr^d{k$+~8OG=$|JobFTpflw zlF_R9sDta|J4N)LPVaN`M#E1kI|%vIjIVlHEBvT~n$?>R;_S}BD3x!eT5V6QrW`_9ry zAYE#qc)kqzp{lVmfj?d_VQ&a2b~v;S7hvCj;9Oy@(}Y--YF0;>uS@i^J)v!nKb)BeL3&FhwPBDF3V+ld}1ooBQ;mm{T z`@@-PiO_H_(J$dWEm~$-iD&f@fuBknP){k_TjZ=|jm=i6pu0ne2*gM*FldZjft8=od}*>V3`lAZdg#6|oQVp{X#er~ zu^L6}-wr=w`*!j<1PS|Oc_(<1PzO;|N7o7zYvuz6NE{7JAq^D59+V2YC64AyDox%(^P<1`7wFqzt`o>Vr1=RJn)@ak z1SjAi8ltw|cQ@elt^w_-KaK09Ia<$+_6ft8Nb~sDMeV)oqCWoi)b~dFZ<8zk-RHEY zcl(e(pR)~h-9Kf?77awUFtb?c}>m$hWi<=WYz^lNh~lqsVbn;Y*8CNR!I}y z3w$Lh`H5^fLAhWVVB#0SfhVJ`R@uJIeNL0Tc!Sl} z9W}1vM2%T#HW-<0^`?*}#H~(9*O+^Zxj2z*Q9j|(eV!K<{Mbdm$D8-^ zc)nNYZ1%+kC?=3IzefQ6E?$}MSRys~)J3Xul9*}QAk3pXhC(fPDC2xeG#gs?)CK8d z$aQ*e$9vr8;Cm1F@<(~!V@+Cn;Z%K<0vWO7a4>2aMdZ)UrUg0GP1omKAUM+A_%&)u zs&UHO5v$jtjsx`zk(OD=+f~Qyd6+%Fs~{@wT-SmclYQCjbBPI7iVxK=<@t)JvSnzK zcsVg4k?7$0w}yVaEAmhThP*JoD@0tqX*-WeP;`j<2YlY0GHO+xQ-Ckzjh|MUxAWDT zUJy%Azw%j3v5@;J&h9;6Rsr*sJ?<9V5B{9<*$M4Y+n%uJ|P z3AXlQnKM4dT~hU5X4ciuxaL?g`}&NJ=Ui}1@BNvc@qGCl*4f+|n&4(OtZ%c_6wgB< z(btYj6R9_=&m5!u}mq$F28TiUvxr|`oeY@j@3B0(`^ZHgH5a;ts0+x3B}7yX{rLhHk)oz}O&;+Q@UYg@JH0k-*-1vNd3Bf|3#>tbqKXE?7l$r%y@eVO8U zZ3NYxHyjLn1$jt{FLvO(f4xJyAU4%AmWRhV4?1EyVbTn`#}DX^eN04la!ss_wtBv=u6BgurQFfr#N*oU0CI>Q%K) z^@2@vKGv^b)|!EVvGcLk%LNH$lJAg7W}MM5U#_Sddw~DR9L}{YH>ehRB2FRu!j!LX z%Hpy0xSOi}X+$)Z^H`2HocIWF5stVW@)vsr$8>uH=$qboZ3`NM^p{+I={1-I7-R22 zsZ4Yu_Hfc`E5XK^>zss`XDp}l1&vlOrmG0&IB28>~FncE8w|O>dN_3#fIDCyw zvua_B#hPo!JbR7cCHf#-(0VpSjpSjKU9Vt_gIBW43OT?H z^~XD$X$JlwljzJ9!8L=hRrtj+X3f8FfIq@#*`Ab9vn6*I&ba2qSMAzNIU`OkhJ7Z< z1CK~cxlH)ZG!+V8;~W#+eUm9#S!Tv2A#iI_J)LcWJoE0eoKeC0o{4An(y1SdvfL%T zGjN>x(%#*Wz2d1*3*uiSf9X7+!U(=z^oddwVKw58F-#C=eEd`$UUzL+)BRPAc7aAe zg2{`{Etn?TQ{?gDqH~Xnvupv?>7SMir~O`Zj^IAWL!Pe4F9Yn4$$A~Ls<1zJ7M7-z znM>faUNP61D?Y{9HKmN3OyR6k4#63uI*wS^_(kU&THIYHQ+vdM%orz*=+l%DeY0PK zXf&TgpP=YD&~Q`r@M|s*>_OR|O7NrW4&ZDo{apF!7GAq;!d~rK_xY)M$8_}b;$}|W zR0FjqyNK4ySY(^DZQwZ3#DbtJLEydP;JbRzLp}KIIPo8Nt{yxQkRs&==FHD&D9SF} z>Wg__na_e)4~_L*pBwUHOmfl;edGM3Y}Ab)c46Rl^;DF;dOqniNovQE!UKlz07<=V zS5gvBN|vTEw|;a$lfrFhvUqj<^Jew7=D&RYXOf@gYA2%@cR#+Uek3fzsiePZWfhzB zEHYwQJ!*vO!wY^VKyDxs#oB7(6ut?u_x|6NH1y?$2#7poBlG6Mez}M~tIcemDL-A# zUF@uNXj9sQjTAqUys-i5FWX1DJ5myG_Ga&ibK?_E;&8J2X;IsRc;AQqI3IjyWm$XS zfugM=j`;ej6zL(>v=s4O>8C2CCDh6h8$8*HG0^WM#`zF2g(m5zUJ_(2q5DCb>jm|; zqOC5qMa-FBG@o#cAnvEg6wfCd(-{^2bN@4i%jeYFfV*5lD%v{i$l&*mB@S(Au&bwP zeshcgyfvR!n<5Jv*3RdaxTx_=_55Gdhu6w-cMW`%s%Hyd4qXl}M!0YY9pkDzG#;V56i0)6R9dtYVw`%V=1RwT7eP_%T{ce6hzgv@&atgov31YnH zpR^!iR`hV(sbEwB5D;pg5!~>$WjMnPSxHG`E0TYKOKpbVXF&mSTCq{(hjEk5i z9|aB$o2%fTquwNp`r|!2Fwux-qr)9bdb;|94$83eZT&%GDD2U84&y}UHLk5R9X01n zQ!mHa3W`ljReTb}x2=ufb4<^!-ZG3?E%&lG{b&)qAX(*%kG=LW<@cAHEu3tuM>kFa zuJ;OAkmK<29%G8o4V>4vuwuU4TwRZ^6NI?dM^V--zV|7W4Hk=Th7?q zgy;3t4*2oTpIT`FK9m5mQ>ZQJhul}mJ`y@eHc6q(E!o5-;4v0>JV-VWjg1Cvgh#@s z<$eX{fT}gXjjBKB0o?aulZk%%M-BDj(u>D-;4!oq=g)k2r2aQ?Y5OJ~$^Q4_(v)%G zIPFQ}+MH<>z$M|3L4FV(35TeYj@1V<>x#tcagS@2AGC(dqV$#d3;*IogPGWhu}W zwLLJW(ArV_4-|EKyhJlunTr?fzXgWGl|-z$(DdqI!pb6pc*DQl`hG0?RblB zFZ?m39&NpJ0iRM81M3z!ME=X)RH@`gO9a5lJNq13&B{)TTt1}|xDU19`&Y7N~h@{f!ns?lSM<@D~r0O3Nb&bH1G8yQfc-Z_MH$7$W@f-f}N3uIR9 zsW^62Bfq*`IM?gY_O6)%xv#j%dt^2d;BV+?cp_;f?!*c(I7Lal{^i#V38EDE%2Ruz zj7*)Vk+k=yFzm8ZHxDfECdLCh}wm;Fn;!uDTfX(z!Z0aG~?*wN*>g-XP(gNJM z)O+i5m6wG0iGL8nt$z^4r^S`x_Em?g?LP%meqs}BzWUh_?VC_0*A5&<#=_lR+oXltKfE#~btn*RvP$Jta_`@!$yow!>SxqG5-%->=5=E!u! zyYbdPfCpmh<%|(-@GAC-&dZV6Y0`guRTrtIT|hks=n~*`l6<>Gn|p82CZd!rSC6aK z**gBGv`KWSFs1w3XtPxps|myXPHCRWns2Ai4L9kt^?}3C7qHtvqn#R}&}=%OaKT5P zt^lG-FMW1#(C6k?R;Nb=5Pe30*NH|?2U!^IedDI`(YVXiqNcoz^x$BcTq^{x6Q#lQ zNcuh1GafWKo_P5NP5S6j`Wp0T9Yl}txqbBbARnK$uf!2s{Ss*Xf{!N0d1=xXky#TD zemhZCDEOix9CyR1SagblO|W?Sy`!I;QhbG@gbyIqjy#X z5ba(0IaOrjoGjet_ij;Nx8BwBdmbM*f&al&^vYAPFOmI=m?Qbyf_{6xEpO{xFaEoN zJ1ie}bKN3RaG<1SECNEKNEvqt1{jN)6dBzikU zHmq9wChnK4Q>|XCXXR0U(Df}E=iL1E`&HyW^>s%nd9TLHl~Lhwh;PK%Ww_qgUKxkn zdRYIHVrF{Of$YAJw;4V z$!DeO1DmB>QNf)c1r3EsNe=B(b&vDHZ1s9|6;RgG5d4=-{Ypr{x!`wCAZFIoqIvt^?{zMtwLw;yWW~0b&=0LMY{ln z^2ewJXwemaYmDW}#K=a-G7-KP&)M4ediRw-iRZ82=fb3IhpcjUP5&f8wNGSRx$8&mSEJq)}s)JveIjH#fPR4h=> z9mVHj8}2FE>OdVQYOO@qrtLyH>=TOXQ2SccG1Uf^p{~<@5gupx%(YLfWFzH^B)_9k zYX*4>ea4vURSWLxiLQ}iQA4V*j==Rrulsc3TA}E5-TF+xnkw%NkOXG6VSQ5g)klEm zIQUL+)J{qm&|Slwn)Ea7XhD2NLVjM$Cl8qfG%`OzKA9jT78$&LaW_(;&I6u)U2A+%vr*RK4`K&KC_k(Z5=16`Q z$a0T8;;D}#0t*WRPdc=em&-!DFT1M|TMJ|{Pe-b5sBHKf^mV1`P)dRli0WbZ;JylC z_}>?yr@!-aWy}}nn@1(L!`^!m`ZXyFwq|(2ZlQ60MC6mq(@l4Sv>GMW_jVWJd)09( z5-nj-53*+54|389`azuBZNRO~s}?((ED5+KxBBWVi$)A74UqF|nb@J|;q*@pW8hq~qRdf4_~L=#+= zIHvN}2mM+KqxpN^k;YFmZWeRT%c{J0M)UsHWS;k$5vBXs%h{qVWeL_u)Ehqy--_;i z8G|wK45kas2LE*{(t(KIO*865z=F*mAFKuyS391Y#%1~9eB>nOS+yTTZ))6Edol2< z-}4Zx$vN)tk0mgdQ|TtXL}LwkQEM<^VR^s13n~@j3bn=nsF#QV+if zIZRcPdbvYeWtI-Xe>{}t*-}q*m3K^bV;sbnWY2!SV;t2jfxiU!!r!Q0%89%Y|6QK| z|EgHs8tMv2s!G~cyFMOB_FTx4wWu9*OgCkG(lkGDe?4kP0#ZFO^;IcJGVQM^-{@Ys zF!DOl2Jr;V@8>fWn+^O>M7Z&ZY5y;Kp_Stw0-T9ld{bF1r2wf z!}-JMeYqCM0<9Wy;=`-(G1~L>e5yVf_Y))jhdA7PzTX7<$BKI+R`}wq-?v^>qB!NK zkvsdyfV$Ag-~|@sb5Ek))7lDK$r0NVAR|67&G>ITaGN7G6EuVvJKc}!&J<(at)bR> zqA$F!PxZXvm2K3|WRInJaB8+E0BgnlsMSI^{x(M7wuY$;AG-gK#=-3(ZrC3lygx|e zp$%fPR^U&2gvO(F(8rW7d*P=uzkt_9aHH0s$s#F7T7x9iZc9gKpJlt%N^?##j=cfz z2J80`{Hw&eapebJ5s=$@ z@O01=RypP{hYZ0TK?!kv&XBmq5|Y_iAM8iuv>45Oi|)qzWVN<4R*_lvuIrt#jRk|V z7vqETT$1W)&{S5yJ!jzW;Vz(`%BPI-xYp3x@m1&LKQvAVd|M#7f%6Yl;YBAK*!$`8 zMit-lsL@cJ7Zf}sujf(ls4;3NTk0vl_eUncJ`VK+doID}L@ee1*JKp$&FP4XOc|WtE|074uG9)$6Ysl4mBzkbR*+H>LbC?o5;=YBRm7`sjX|=wx*v6w4-^Z%_m!CY{&Pp%ya!w&{2kS*#iLo?c^Nui z+uYU=!f?;m_)#RAAhVn}qyt|@Idjs?6qz+^$pjDr@v!d+szixT`ZZx&F+n3u196qDBK;EL=%Hhigo+vUm^b zi~4`1ZE8E~7W$4%;r*v%_p0==c$a!dxL{k$PK}k~FKLY7`-ar!rZ16s`XY(<%?jq8 zZ2J2*@1>CbaH*%I`1XQ!`3USXkrvDUsX?cTcXlKe#*toU)liV6( z6XGFC+@6KIoWq$LwMwe1#=)fv|P@Zj_pDH2>6W7 zsGu%#iT`hB(0lk8_|W;9>PK#CGt2k)HK+|5v;{4&MNogDfmYpC#DD3$nU>RYKM)a}*J=VKWCoHfzX|3MR21J^t&R!A3Op* zf5?(s;k^er^}LSNOiPlnZrlT?_1e*=Lz&a6D4uszj^gLQw`6~G#$n8K=DnjmQv_s6 zx`%#;UdP`R*)KqkGx)3OPS|twoIW`dcd27y8LIGewss$wmE84uPT|bL58qz>jwGKM z<~m1qdCq#oI77E1zflc7kpg7pJXc_w!j=?a?}kZB$mfdn`m$8xPveg( zw1`m}hSKHZXV4nT>~QM|BPH7)f74F$+rZIom3wzKYt`d43FE6I*rCH)0wTi?S#8LPuX5S z-Aw%?@S#Nv%TT$g9S46>TYnpZp`jUn4~-co<1Q^u{1+v;gfD+m7eKg1-6`-+_wBFY z=kbMFE1Yk_ov$l)za16qG6idx{In=p(H1oZ#U_$JUz!sFC@MnqWeIk9>5;}E{Ly>* zKhvOPnfYsl=wZIQBZeZ<=zGsPP(5Px(qcNUb z-uuQb5Q29ghl#&wgm`lRh}Ef$}sU-_fCvqBEe8;dx{MwMp4w3XB5ZVqv4kiRG|b`Lpz>HW16_)!bu$Mmu+rdekTYcPpz%;^=+1iL?`&;gS!e9jNmnnu zCr)`%%NLApn3(iI(g?Bvh8nXj63?aS?q~C`=X2S}u;t?R@OiWFvtkVUiMTy1o^KDE zHx|F!jA8NS?O_x6c1w$H8mINqXD)Jm?`J+yZ!laLXBffU<%^7bQPBcT`J!SzTI24h zsN2kO9#eL0EN7Wum-_N3l5f4wIGg(Z;&kV|qhW8u%Fi3h8{cxoUimp>k=J%0=v`;#O>(li_=;mIMzk_(ZAOADlM~rYLvr#U$O)Hf`o)>tA zNTs%ElGnbt(fk|Vym#iJwi0@P9iEU7kaB4t@K+kAF!OT55hka1QT+Vf``CzG>Rrfr z)$*jI_p=R;e)PJxek@SwOvOFpRBr~KOqyV=6(VO|8CCfDu=UiA-ODZ-;S)mM{Zq0E zP2lZj&|h=)2>gu^-GgqN$zk!($Ab;NLAS=+KQ^SKQxk8W2P(QKCK?{$0@LLra;EWxzsxm zSGe`L;U6ZQ9wo7v3!d0^ido{4KUlW5;qAzo#qsH%l06q3I}qBM8pT^%Yk1xp6a(99 zg3iI;VYINKmB^iYbY|3bnNcTTruvoY5sX*k{#+P<9{A>XznSvVr~%Nzc8G>g_c*t82GVq z*P*ISWgj35adFlN+eNC67HpFlodG_;UE(dix`jW@gfbRoC=e?8?p0z!SOKc8oV5$9 z^f(9eH?-=Ava!GIfg#_&y;#}(2#bd=6n{z={z19g^#SSxGqGAs>AfD2Gn<<9`eZ9M2l!>8VE(aR)zc`FI-my-ovEWzdY3Sz@{*RpT{_+ zKi37_EMw2c7~-#^3K{oLeXKr`qR9^|S^-W$_B@B~Rk-na4Dt#5y&mHRsLZkO6WRY) z+||c6bzSlM-gDyQ703%QC`=$Ofq)ZCXWW8Yl%ntzx>n0tJ~HSiu^qqKDT!^_PT~aC zISGSIN*kxRD3Kz~Y03yyBv1#fHce{+Yg(plicmE%gtUAOfjR-*Mhndb`<>?xl8n{W z{@GESckeytp8N6MJ@4Fm&wVF@b)3@Hu|l@FBKVIkYhE;>IwFH1+rZF@);Sj_Ya|o$ z5yFe!S=)~d57PQ!EOW6)Jg8oKc)yUP%*%iZHhM1`cNRS1-=_n87$@j^}VAoX6qMZ}%(Vf^`eDQ9h@&eW$ zc3_`u$Au-i+8klL-5|Q2TZwPTMyQ-gdSBp9|5r%JuL@^DzvzSwMwx5)oX2$8^UG7$ zf3GvzZmOI6JIUwlm8D9H^2vh^==v?t?T^6D_>Upm>z!4Y8(hvYlYNswd-i1CM0|h~ zNdJMoh;X$2$dAdkE@}IbtPG~kW5xGB)Z^TZ{%_eI>Wg13(BnHcdg#_7e4}LJCmU^d z=sORB@u>n))1~Z3pu=T}#?gOj>BK9gzm3#uzEvIA8FJX$a4uCRCeJA(%bb0s@$+T- zw%O2{h3Q+*J9GdXQiKE>|Muvsvs2U!F;=? zAhr7`VYP_mlgb0Ys)0b zm&?!mXS2l*25%zK&9 zkdOQi@yTK(ERJ^@B= zDc^J$etG+Ic~|c*9o|?z4|>sKY;~UGuUIHqD&~t8%&&;nk>_SjBl%KOtV_JR0N*>f zeH`O6s_~+4a!~)=RTb%;EsA}lpNfq!+{?r}@KNvV>E%2v|mm8%WYY1sFkms$BH{0qAMYQQ~%S3JpENCk;>OTZjqU<+T&tAug4`bw_4+4TfDWkc8@c#fxEmW{;((GVf3C! ze%KfjPlVBpsj$`-lRT}H(QH$q9m!5N$%$H+IMslfQ61p!MyKEFb9+51cX;i7CwJQe zJW%IT{k+QCz#9~dW-fYYV{BSN@HbZ5#=B#+V|~!Gmw;c=n$3Ozz;nFLwFF#v+imj zX!i%vmAuX;rv{2fOaF`nsaPt|rp$z`DM+Mm(m;3w#0fTQAU*;J&r=ooKLyNdEl)hT zZoT!Xr|pg^r~1mn_@>QUwr(q9g<8et%B3HWOxb;Bn{i+&E>nD4n{%DOLaeZS$d_ab zD86`BoFkr;R)k-HIG=*O1%TqlS@FMuw}v1BfL8X^{f|MMY4Yqg@X)YW=?CH58vK$p zwtqbDxSF70&w>Y}ZSmki2K|9yJqfA-QO<-f%q6V&OjU~J+WHbrA5T(g_-Gn@Bn`gQ z@%8+D_OHj+q`|w>;GJpkwlw&GH0|q8gMTLtK9&aG^}plk*?BkTE&cU>!MGFQ@R#A- Fe* + + + + + + + + + + + + + + diff --git a/drivers/paulboard_driver/package.xml b/drivers/paulboard_driver/package.xml new file mode 100644 index 00000000..00de2fc6 --- /dev/null +++ b/drivers/paulboard_driver/package.xml @@ -0,0 +1,29 @@ + + paulboard_driver + 1.0.0 + paulboard_driver + Matthew Thompson + + BSD + + http://ros.org/wiki/paulboard_driver + + + Matthew Thompson + + + catkin + + + rospy + hydrophones + + + rospy + hydrophones + + + + + + \ No newline at end of file diff --git a/drivers/paulboard_driver/scripts/paulboard_driver b/drivers/paulboard_driver/scripts/paulboard_driver new file mode 100755 index 00000000..53a0e29c --- /dev/null +++ b/drivers/paulboard_driver/scripts/paulboard_driver @@ -0,0 +1,149 @@ +#!/usr/bin/env python + +import serial +import numpy + +import roslib +roslib.load_manifest('paulboard_driver') +import rospy + +from std_msgs.msg import Header + +from paulboard_driver import ais_bootloader +from hydrophones.msg import Ping +from hydrophones import util + +class Node(object): + def __init__(self, ser): + self.sample_rate = rospy.get_param('~sample_rate', 300e3) + self.thresh = rospy.get_param('~thresh', 500) + self.frame = rospy.get_param('~frame', '/hydrophones') + permute_str = rospy.get_param('~permute', '1 2 3 4') + self.permute = [int(v)-1 for v in permute_str.split(' ')] + self.ser = ser + self.pub = rospy.Publisher('hydrophones/ping', Ping) + self.buf = "" + self.last_packet_num = None + + self.ser.baudrate = 1152000 + + def run_command(self, cmd): + self.ser.timeout = .2 + self.ser.write(cmd) + response = self.ser.read(128) + if 'OK' not in response: + raise RuntimeError('Invalid response to command "%s"\n got: "%s"' % (cmd, response)) + + def run(self): + self.run_command('samplerate set\r%d\r' % (self.sample_rate)) + self.run_command('thresh set\r%d\r' % (self.thresh)) + self.ser.write('go\r') + self.ser.read(99999) + self.ser.timeout = .1 + rospy.loginfo('Paulboard running') + + buf = "" + while not rospy.is_shutdown(): + got = self.ser.read(10240) + if len(got) > 0: + buf += got + elif len(buf) > 0: + self.parse_publish_ping(buf) + buf = "" + + def parse_publish_ping(self, buf): + result = self.try_parse_ping(buf) + if result is None: + rospy.logerr('Got bad ping packet') + return + + packet_num, samples = result + if self.last_packet_num is not None and packet_num != self.last_packet_num+1: + rospy.logwarn('Dropped %d ping packet(s)' % (packet_num - (self.last_packet_num+1))) + self.last_packet_num = packet_num + + samples = samples[self.permute, :] + + self.pub.publish(Ping( + header=Header(stamp=rospy.Time.now(), + frame_id=self.frame), + channels=samples.shape[0], + samples=samples.shape[1], + data=util.samples_to_list(samples), + sample_rate=self.sample_rate)) + + def try_parse_ping(self, buf): + # Verify we got a header + if len(buf) < 8: + return None + header = word16(buf[0:2]) + if header != 0x1234: + return None + + # Parse header, verify we have all data + packet_num = word16(buf[2:4]) + sample_rate = word16(buf[4:6])*1000 + sample_count = word16(buf[6:8]) + if len(buf) < 8 + 8*sample_count + 4: + return None + + # Parse all samples into an array of column vectors + pos = 8 + samples = numpy.zeros((4, sample_count), numpy.uint16) + for sample in xrange(sample_count): + for chan in xrange(4): + samples[chan, sample] = word16(buf[pos:pos+2]) + pos += 2 + + # Parse footer + checksum = word16(buf[pos:pos+2]) + footer = word16(buf[pos+2:pos+4]) + if footer != 0x4321: + return None + + return (packet_num, samples) + +def word16(str): + return ord(str[0]) + (ord(str[1]) << 8) + +def check_board_bootloaded(ser): + ser.timeout = .5 + ser.write('\r') + got = ser.read(99999) + if got.endswith('> '): + return True + return False + +def bootload_board(ser): + path = roslib.packages.resource_file('paulboard_driver', 'blobs', 'SimpleHyd2013.bin') + print 'Bin path: ', path + ser.timeout = 1 + ser.baudrate = 115200 + with open(path, 'rb') as file: + ais_bootloader.boot(ser, file) + rospy.sleep(0.5) # Give program time to start + +if __name__ == '__main__': + rospy.init_node('paulboard_driver') + port = rospy.get_param('~port') + print 'Port: ', port + + with serial.Serial(port, 1152000, timeout=.3) as ser: + if check_board_bootloaded(ser): + rospy.loginfo('PaulBoard already bootloaded') + else: + rospy.loginfo('Bootloading PaulBoard') + while True: + try: + bootload_board(ser) + rospy.loginfo('Bootloading complete') + break + except ais_bootloader.Exception, ex: + rospy.logerr('Failed to bootload: %s' % ex) + if rospy.is_shutdown(): + break + rospy.sleep(1) + + Node(ser).run() + + diff --git a/drivers/paulboard_driver/setup.py b/drivers/paulboard_driver/setup.py new file mode 100644 index 00000000..bc1213b0 --- /dev/null +++ b/drivers/paulboard_driver/setup.py @@ -0,0 +1,13 @@ +## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD + +from distutils.core import setup +from catkin_pkg.python_setup import generate_distutils_setup + +# fetch values from package.xml +setup_args = generate_distutils_setup( + packages=['paulboard_driver'], + package_dir={'': 'src'}, + requires=[], # TODO +) + +setup(**setup_args) \ No newline at end of file diff --git a/drivers/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin b/drivers/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin new file mode 100644 index 0000000000000000000000000000000000000000..4b4c50a08d614cd4256faaceadbf2c4137bb507d GIT binary patch literal 33440 zcmcG%3tUv!wLiYkKEp7Q(K*AQ8I37t287{}IwIO67n>PWu&J+*njki}KoB1>6Khlu zlX^e}wImLR#-_Fzz#yW%A;#D?m;5@Sg0*QxQWM0q9S{+W5%W?&BmeK3aTJZo?Y;kd z#}5MM?8n+`ulHJe^OGOVwhx{Ac&fkzroop;KjVujiavccV@YvOWLl06@G$$s&PDDG zyhW#CY0k2O)Jt0wk;$Ic(2)909wLTJ)N;r4jKIdysYNrajwNZb#4+5tpp;;KM0$_? zRdOtOzkT7hEI#@W!$-CbDASm`%@J#hJRKU+tU)_`uV2nb*IT&67q&BwR-OdblhHD( zSXj=T!l_V+MITth7rQn5DQWg^@xCjImkDX?Pk{mNIbw73%9vimoToya`G9LJV~U3< zn9L)v`CKbLrFSgJZ7pL?yzo9}tgVjCkdo>f3Yg4TUuiMMn#5Z)#cZ~u~_HJN{j`6vV>RHsW0Ui@?7Sk#QT|*`|9{<@v7&4ebr} zSXVPPq~p9>BU+BFIlR!VIo$YWlRAEJ2+uMD*Gp^>7@(W*wwxPz7MJY+p{#9lghpTA_l{HiX%0~l0{32m)9nakzVtn(htXBDO$;@rx0wsN)0K2LEaer9wfPTuHB zDILme;DN^-OELo0`vYF7lN&EuxKpPvc5Y$2qANUJJBDEW613Whb$0R>uz%f!jM=(6 zxoifl#u1*uGiL0O`3gPL$>uDylQSM-Ewh}U|Ly2|JD)L$UGVeT-S|vV$>*>}CNI6N z+>Ot`1|C1J5&j4l@$4mozI$LSt(P%l0gMdz|RKO2~p zSy{|Zh1LarSymS~)szrmc`r1e+pP&Os2f&*r@9Qe!=(A<1aY1;NgTFby*?(sf(L%3 z<>#@Nibgm~_-+Xe@!~sh-NWaSr8gqeFgM{@DPU&^*Mw)_I|aB-43>PjCQLI)>LP5n z>dJJ$m(ITyKWz|y`EV|aKAbNq>g2gM@r?zx-NHGZdvWd*|HSJ@^Fc<~*~JYC-=+aNcVVE!)uJTDm>OG-L@c=zL+v3$?n@Ge~%m{qi*M=MMlhZCUBBpr9T=7%`|9OSwR9%gB+wWERf+OC!T(~ zm?bdV%Ory=`v)LLHVE^1vEW`m9a6Q&j{Upf-WZWGTVnMw`(6}ND-{9qNlefv!VLW` ztWWMr2vLMzs1&|NRCv{x$2KPnSA-~bAJ*IOrXsU$L+nuLIf3zozh~T{RfL6YiV%(V z{maVu6)WqezSKTb)UqX*Cu-lc;(V>oFv=dX65T2&IOJi_B@pzznMB6_k{!@06%?O5WdC0(NAJddc7{9FT zDd^RCN&=sw@5!$co#rc|VouR0ftK2@1)1&i+=){!K5eWX>lp(6KZErQNvqtTxx$ZX zV)lJKgm63w_=TJu(#fZD!tXBimG{;FXH{cLg-O6~s`I!ehfm?_n5~U){eq|ni-2DP z^WnPTROv{<8_1+1TE&2C3AlCw*Ymx&{;SlG7R|UuVeD*;vs?xFBDRi|Ku^>7_n{V@ zLzxVE7)SU6{Nv^-+BoR6aaml7b7&pYm5mXppb2zmoizcDV$$!_&rd_{^$u;x^?^s2 zB6s-v5pUrA5R5k@i@ztNIj0A8^4XBX`M+ufBq@+9|2Z_-7Ik#Z!%GAuevw=Auv?Ma zVnz*#OKp1h=cjZ{%RX#RSeE8&ii7U9wua7Lmc@?>y9(963f$z-rXJrctibP8L*vw80G;^Kr3eaZyM&8?PqSAyQs!sV^-F6O6S(??ydsg+W3>| zhRQ|bHVC9M=gxjtzy`Hhv%^(PW~}vRl9&TZas4@*|E(ec{C2x8n_>MX9#qknE)pK* z1D5xhaKVRz3pe2)`+@L4u+aP;6(nd{fRj||U4i6`mf1YC{^fZ0&G{gIRqP_!skiV? zHtH=L$U~KLx8wiwn{(5=S==q8Rx*`TFrbTO1q^emNS-IF8{S!tcL1Q{Yn7tU6bgi* zcMcR5uG6hcWCHtCsvAbO@+GExnp`>o_OgHN39yrAX6CM>J&+3WgO*}`?T!7Hu>W6S z{_UbB<+tK;UVNxU=jL@RGMv`x(<5a9^Q<*xFF8=y`Tb-yirIAon@Yb;`-igr7(k24+c%2WDaKQc|1h9s`ZfZq@xQ?`?qMc zY_?4~*s@0p9xgjdGNo`4xH~pk);~A!2!6MwRRY_Du0# zFr>Zl3gGL-^B2UGkO?&JEtn(^Y;Yf3?Cx;I1PAaaCv;SHMC2zP#tDaWBtsDF7g16v z9S2LEmD0uQl!$g$BV9>(FM8j&lA%h*nSH zi#}6;Bxl=V==&Izq9YwoW9D7b!S{rJJ2r$6*kLLUv8Gn4ZX4G>royB)|G3^g9xe0g zjR9g8;EcM|i~URYSPP#o?E1nk9xTaXi<*z|m?=}lE2HMX{wn5i8dt|N`2FXFXZ7i>6js2)xlAMyegtK}ubzI^Ui@*SbI( z5g2UFUXmVZf3cW*-g64+qH-pC^nT{W&7~iCZLWo!@fTr7ErR`k{+@`!+LwrC?30Fa zgSu)e**?()3Vct6!)b4ehs3n>E5c71_Dg2$SfIpPH%p+Y5a3t#c}O3ck1se_N;Gwt&5>+z z>=UA?Q#vi=YO!GHXYeB!+!~@Q=sVz?;tGT5YVy1Hfj4%kH%c!LA^HS8iNB2B2AXgV znIp~6Ta1zlHm1t`Vmb5+YqNd5L-oP|U{_-WpmKPy0i02?qU5@u16y2hB$;sEUtf za>f`|^3}jtpI!UHUG3Vt+h>DfuiqJ;aKeZ1$)Q%AQ$_gq&86iOqmWJ8#=?#Y15N;? z&^AwzQ~WWC&on#j(M+-G5uYF&Rj9)gV_YT<|Bju9zkgci)YSZCjmG(-uXQwTrmzWm z3UPr-72k&VK%QL6x0%a$o^Aw7wk5$nle$XmOil~ZTla)2^{~Ct^+8Pj^Y<4lluZ{O zVbH&6HG0GfvNsv5OvWCW5i2rJ-smz{Gew5{_L%a|J~9FLYtijAp*_RzKif0XH94(P zwjl;JIqgwFK)+1d+bzK?9dKu2J(<|o4D4$<_H`Ndb=ly3_3x?7l#aXYtA9_if4=zM z9sBytJvFnEFTc5`gri##b-#~rE0)fKJr`kXAEI^1wcm{K;c=TUp6=UIvQ-=MlOue4 z>Xo19;z9PmkpCb&$Qg@=`Fxh-h|IOc>;=er`fkJil02u_lNR5v zOw8Guv%c~5l%oTI(gNrkE}il5*;?V6h6P{KDEcuIi+Rm`E)E$7z026Pi6PBsgZxGN zt8;^(Z(Gw_dyh*j%%pB&Olc3FSlmo)9I@@MJ}W39#kYiXCMAgr9%Zu9wPlnqe*wAQ zY?GCL{!qi3k-IpdcLz1SpkmDlA*}gDku|3(_ckLor=xgLJZs$Yw6Kv9P946u9Mdgq zV>)lFsM!D=o(nyq3`>01DMIfI`>X!I{Vk!RAtzkJ{xP0?kL&@%wyz4?IHWod}bT}E6I{u}&98T7B^g|?-t!t|xQh+pE-CRr?!FNu&x;$_68FJ~sG zrha^~Zj0sX(8~pNqFla2cVK9QswUFKnby{U^?PI0cf%9t> ztQA_Z{yXEB^D3E`h!`ftYR^hfNfZA)>4^wvnbxr(-0;&N3Hx^@hg*27&d4Rl*E-9k zKeN~$?HlQD z$}`+;?`YrfIU8o`y>0J&v>k16c3t}?{NS^|dvo-7S2Mvo=EO@rx#^usBg5oREhV23mZZ(#12qXR9ljhm5MYl-q~IoYBM zITwnUYvxvS|zkW;?B`PMqsJ zY%f<=C)#BO+hoch+vKot;L+hM%UtY@&q2rEl!>?7CsRZ$Z;*X5#SA!a=BO@ni<$V( zXP4N#c8UHry9EBjMEGDf^kF8|+ch%iaI~t^EUVIFSrwFGRx!kP;j=)8j86Hhhc6eH~qrxlf6c7d-xgQkl%yslaK%!-*2B(alm0JgD$gKeD;at0bWuF7rO!M zXPW9nuYJ-(_6hI>{78SguFK5u$wx!o5NYDDnVG#C_VWH*_%N{Fc5h3z#mtf))IY%F z7X)P-=<~srP=(b*B3T0Zj)`On=2{x_n7jes73{}`d9qDYsji+pd7~w9Zl%iRDyg=Y zAikWYSiJktg8ga57p?7tTD`)*A9wBr?8`2ne!2#Gv6@qyDQ{B<@B`eZGy2@jU(tTt z+KVWrpFHr$E&U{^-b2Lk&V7J<@c8!Bw;$iR7uN6Ii^e3!^uhb!gHQ3urhosToN?Hj zPbB&*@!EzK$b#z0bDh<8!jTO$N_Z*2g~C?RXo*^!NP>cuOZfkU_sP>~pi~JMnhu z#qXryJMku)ViV+y(i{0#tH+BoFTLK5GdpmG_4(w_RwNa#yC1FUaK|-{X~YS+~z)CHk@5&2~_EZpt~gre#yl?@p4O8!Y3t{EIS<{Y&&QWl(PTb{Tg!`ncWq`WC)Q z|G%UU^1uA@&3ae)Rx((=g$#|fikarBRRLxC*v$74=^j@PA14W3&}Y0 zpCIRC$T%nPpAH$f9p9ahaohcTI#|Z(_1~0ngZcGNUM2Z?yUw|TSDD=5lTSW=y`5L1 znSR&x@04+1k2cab|FVpm{oj;v7@x+LO%apij3YN?oE~)%^C0X06&a`Z>4m$>II_>a zO*fE?!}>bDyD#_rd6BW^E839>!d(i}F-}sd-o6f`zmu=MZ=;=qR1P>doI& z1vsXYf8d9s{}YMD0*36Ph!Z28>K5`kju7!CC|hY@3x z5Ic3MZop`R%(r0kWVC4CQz%;iV;JmR3t$Wbd=|hblf0?Pk;xz6D?XlDSGxRasHuF1 z2KoP+|1_bti)7!84gbjHv$GDPE^A6+3FJ_P`tW2eo900*{P7mu_C(4f7Ul;0-Y0`3 z2T?HhO^`{}`_7 zyTv#Bwj7$a>tiwIez_&)LDmF5{s45=gj&rO%+azKJ}mh0@#C(XKcFU1Bp$u=>S4Ry zYrpyUljsyat*(=Azrm*miBAs@pZe&}co+YaY9S$|Nxyqzc2w8*!d>X^4*&GEuWz## z{%`zKU;OgU`abeav8S*VzV~l(#<83Hs`uGyj}PWoJNQvxzK>V;{r-PWU;g-l;W6g6 z!1lhw-u__EALW)--C@KxG%YnI)Ey-~w4Iy=8S99( zd+Q7RdsWnx+%q_~>a$N=kpI_&uk|zZMbH-{Q!&QPePYmGzL@G@nQFNs7XIxvHiLfr z->^>x%Lo%<;eHLnMoSk^2P^OSN`5dg%4SK0EJd#0M*8G&XJb3KrHkrhfjiV*mEDvh zUfVc7BqecUNTTGe4fe^^hGl;J1#K)fuq|lMNG35! z(?zuY8DtXK5tXnRCU2aQ==9S1x7!VVT?iq4BR)gaSyp%!&y2*D=9`Xh*$<#t?L{1bGb2ty%^dwM;gd>QnAkci^u% z(y-YVVo=LOwG33ZOtmaD9_pWUt5L@^cuWa(XwGM^-d4-NWcfVRN%`v;{Np+o!8d>` zfcKOVcB%_J`^jtij6p zd6gSKaNc1ytWrth+)vJU8Rv&>smb>HX+_ru%os zyz%&3bq&3k|MvQy$?v`*Rs}?&wqY3RWJZe&{s(Gj_*vnI9`=Wbni&!FDY{?0UuFB+ zh5k0rUqXJ1+Lz*CSMdJe-2ugre=EAB*HjB8j8 zSB4V&D|i`d8@?QRI&}0f)XEIK%$E+$;=NSg0QZn;W&E`bI@C7g{@1k)&E8s=Z>?>B zj>kEe0S(pHAlIa6{OsV%QJc$E6(bI-<~gUR3O78y6txbjF99zX<#y8dr$TqwQE!@9 zo`3i;ThvaKAs96d&clhPbcelXK5QGOOh)|(Y8#r=McdbH>oiMZn)5Kn`p&?wiVR8_ zC896z<+y7s1(%W!%7J}1n<-eWZp8uwKl1eE)?VE$<_R+86k?mevOiq<| zhR;3l*ylI4txCp!VTeq&b|a+9GK;tbL~bQPmvIOUw^1HFrafj=6hz@_oDY) zLr(^sYC!r}#V$neU&DJ)LBAL_H`UBxZ&05Yp~l__RKG{LJJ1)_j(W>5)PbUIA2A9c zLS4HaI75A&x0ZckFn^&E#A$`FnRr&5=yt4b1fSj#K53XLE$SYqMib@(y zds|@$&l#ChRN4J&oX-natqM9KSXhCoZ;d2s;9r+7`IS()J3?HYm&o@$vsKfuG6(g8 zZt?1C+r9a>H*<~Rs-WKpZ)xme<&xJ`)$F=>b-<|xUisy+I^oQLVz#>Gex@#)|FNfP zySgNb&0c+Mn!M_r`)y<&1R0aX(+?=T_qFiA)tf_D?lHZaVjm{c7&$NRdn_+6iRT7Q zMa`(2=jGLlr6~A=N^l4=e=r?J4@?? zoQ9l6Y)Dtm$(#>+gq-=67w+R%=2r!5`kuu9VvzU-L$R=qrMGE{UlRJ(IC=HJ8onxM z41a4#8Rq$|*oOM63iN3Ul*HA#9lBqlUvIdISFc<-YDn=`;TXnX$X{L^xR2?d2iz*w zf_$%=#=>09n&KlTWP7LFyT;iJJXnc149_vS4{@rW-~U(Si(`4;;&A@fGkU&9xXv*q z&wFQrwQpHPprc>B-v0;G=OTaG_a`UsAK1bo)J0p*Cf1ov_6KM8{&9k}|5-h!^#>UZ zym!xo^1-OOgHggS#L5d>IAGEAE#Ap~`Moj@IJo|2%-Oft$c`DxP=Cstl@|;JXtOTN z{{P&4f1)<8=L60Hk^vPL-k{vRP_b$P-v}ISy1$H9)J+suzg7&qg>cuRKk9-2>(w`$ zV*7_Lh?##GP4$5qUSw2Nw5S7Gbw_pBe;xJ+@?O(Mv8o3RW|TSt0j~gOOu%;|WV65? z6p>@3=S8yjc|&Hwv;6}GUb(xJYbJ!y`MkEPe=6pyJh{ALiC(;V{1x89oI>x?x>-0Hlzy)7H3@YI>vXHX((#s0 zw+QO3>MFy>fzpXL--&>JIbYFd0G$={ha+aZ*H}_`Ah;z^s>HL&p0z{Wh(7@rI^;ui zcA4p1v1P&{=GHxfuhXFkYP=f?zSbtn%CI+cC3R(j6wr5e73u;y?Z$jUp(d-xE)p=2F3*$p8V=ZGK&m#K%v^ruTb8GWnjbW>{#_-jzk5^TW z*ehC?60k}cuD<&0{X;hz_6BULN`7M1rcW8-0z>Qi2IRi>tU7Rg-~o!0k`D#{=zOHQDxk7&3#|?7 zvT?v6fgdVgt=sXm~0kJj2ZFi<~F-h0v&E7j+|6AxaitH=2uU0&WK-rM^-q5qFt zoBRLhtj~MH$<14Upd_n-e)Mrx?0;m?J zBc(3Dr(YNgFDUVz5dqDL(Tc)95AMpg)UM^G{N43rf6V972gq;M&r{0kbKlsGz0KFH zs=3B~_5VJyk^I-g#s}BRgQ3)t1-^tF=W$5?-*N#GI#R6?zeLnmoY`d&YsdS!4k`m6J)O3#Zr@O3X`Ma zlsLqyWRpG2jdoUQQys4~M==F$LYfexR`6btGeX=Z)Ac68AK^PHLE)u%M`wP$@9lVR zzi`6)-o_a0XPh28QRt1s%ORvw&(IkM*VkJ(>=e|0J7U#49oiGn)3lGszntqpJ0Fb7 z2``K_TEIAsVDz3(^T9dV4LtebJmG~C?VLe=I7b7{yE3$zAV zQ9Nkd)%V`+VUT$d>! z{7(4!JyyU!%j(5D;K2Uc!Sl0&0lUdGO(FOJpUnq<^!L9Fe%Yk;(Tja3@VwFozYX>% z(U6^R|MAUdx8s*}5`Mir4g3?04StXLZ+m|yop5IL$oB7fX~jz?rtjUAPGpmG13o=q zndyZ^0DSbpnuXv%AHDeCmCz>XuJ8_`1uvaIRD9~El|k>`xf$R3p6jB)%a6BcW-z{E zxx4))&UJ+I1Z5Du$5Q@?aDIb7eRPj~l1;;ItVeR618+Y3W5;xYJ$l;1KOV%3v5I2r z27EX9@eUdReaKyd`Ei{s1a%y9B=Dpv>8&0+&R?Bx!C6DHE57a1&Wwy~gm3$Pxoi?^ zn4G}BOvLxK395#t6T~B72|OA$;WO=wPx$mH3GpQ{hDqE4Tb`cL*=65+vWLzBL>(9~ zk^IEk!eG~%;47InHhAf~i|l{o;&7M2OTxrW#UgwsRhr^d{k$+~8OG=$|JobFTpflw zlF_R9sDta|J4N)LPVaN`M#E1kI|%vIjIVlHEBvT~n$?>R;_S}BD3x!eT5V6QrW`_9ry zAYE#qc)kqzp{lVmfj?d_VQ&a2b~v;S7hvCj;9Oy@(}Y--YF0;>uS@i^J)v!nKb)BeL3&FhwPBDF3V+ld}1ooBQ;mm{T z`@@-PiO_H_(J$dWEm~$-iD&f@fuBknP){k_TjZ=|jm=i6pu0ne2*gM*FldZjft8=od}*>V3`lAZdg#6|oQVp{X#er~ zu^L6}-wr=w`*!j<1PS|Oc_(<1PzO;|N7o7zYvuz6NE{7JAq^D59+V2YC64AyDox%(^P<1`7wFqzt`o>Vr1=RJn)@ak z1SjAi8ltw|cQ@elt^w_-KaK09Ia<$+_6ft8Nb~sDMeV)oqCWoi)b~dFZ<8zk-RHEY zcl(e(pR)~h-9Kf?77awUFtb?c}>m$hWi<=WYz^lNh~lqsVbn;Y*8CNR!I}y z3w$Lh`H5^fLAhWVVB#0SfhVJ`R@uJIeNL0Tc!Sl} z9W}1vM2%T#HW-<0^`?*}#H~(9*O+^Zxj2z*Q9j|(eV!K<{Mbdm$D8-^ zc)nNYZ1%+kC?=3IzefQ6E?$}MSRys~)J3Xul9*}QAk3pXhC(fPDC2xeG#gs?)CK8d z$aQ*e$9vr8;Cm1F@<(~!V@+Cn;Z%K<0vWO7a4>2aMdZ)UrUg0GP1omKAUM+A_%&)u zs&UHO5v$jtjsx`zk(OD=+f~Qyd6+%Fs~{@wT-SmclYQCjbBPI7iVxK=<@t)JvSnzK zcsVg4k?7$0w}yVaEAmhThP*JoD@0tqX*-WeP;`j<2YlY0GHO+xQ-Ckzjh|MUxAWDT zUJy%Azw%j3v5@;J&h9;6Rsr*sJ?<9V5B{9<*$M4Y+n%uJ|P z3AXlQnKM4dT~hU5X4ciuxaL?g`}&NJ=Ui}1@BNvc@qGCl*4f+|n&4(OtZ%c_6wgB< z(btYj6R9_=&m5!u}mq$F28TiUvxr|`oeY@j@3B0(`^ZHgH5a;ts0+x3B}7yX{rLhHk)oz}O&;+Q@UYg@JH0k-*-1vNd3Bf|3#>tbqKXE?7l$r%y@eVO8U zZ3NYxHyjLn1$jt{FLvO(f4xJyAU4%AmWRhV4?1EyVbTn`#}DX^eN04la!ss_wtBv=u6BgurQFfr#N*oU0CI>Q%K) z^@2@vKGv^b)|!EVvGcLk%LNH$lJAg7W}MM5U#_Sddw~DR9L}{YH>ehRB2FRu!j!LX z%Hpy0xSOi}X+$)Z^H`2HocIWF5stVW@)vsr$8>uH=$qboZ3`NM^p{+I={1-I7-R22 zsZ4Yu_Hfc`E5XK^>zss`XDp}l1&vlOrmG0&IB28>~FncE8w|O>dN_3#fIDCyw zvua_B#hPo!JbR7cCHf#-(0VpSjpSjKU9Vt_gIBW43OT?H z^~XD$X$JlwljzJ9!8L=hRrtj+X3f8FfIq@#*`Ab9vn6*I&ba2qSMAzNIU`OkhJ7Z< z1CK~cxlH)ZG!+V8;~W#+eUm9#S!Tv2A#iI_J)LcWJoE0eoKeC0o{4An(y1SdvfL%T zGjN>x(%#*Wz2d1*3*uiSf9X7+!U(=z^oddwVKw58F-#C=eEd`$UUzL+)BRPAc7aAe zg2{`{Etn?TQ{?gDqH~Xnvupv?>7SMir~O`Zj^IAWL!Pe4F9Yn4$$A~Ls<1zJ7M7-z znM>faUNP61D?Y{9HKmN3OyR6k4#63uI*wS^_(kU&THIYHQ+vdM%orz*=+l%DeY0PK zXf&TgpP=YD&~Q`r@M|s*>_OR|O7NrW4&ZDo{apF!7GAq;!d~rK_xY)M$8_}b;$}|W zR0FjqyNK4ySY(^DZQwZ3#DbtJLEydP;JbRzLp}KIIPo8Nt{yxQkRs&==FHD&D9SF} z>Wg__na_e)4~_L*pBwUHOmfl;edGM3Y}Ab)c46Rl^;DF;dOqniNovQE!UKlz07<=V zS5gvBN|vTEw|;a$lfrFhvUqj<^Jew7=D&RYXOf@gYA2%@cR#+Uek3fzsiePZWfhzB zEHYwQJ!*vO!wY^VKyDxs#oB7(6ut?u_x|6NH1y?$2#7poBlG6Mez}M~tIcemDL-A# zUF@uNXj9sQjTAqUys-i5FWX1DJ5myG_Ga&ibK?_E;&8J2X;IsRc;AQqI3IjyWm$XS zfugM=j`;ej6zL(>v=s4O>8C2CCDh6h8$8*HG0^WM#`zF2g(m5zUJ_(2q5DCb>jm|; zqOC5qMa-FBG@o#cAnvEg6wfCd(-{^2bN@4i%jeYFfV*5lD%v{i$l&*mB@S(Au&bwP zeshcgyfvR!n<5Jv*3RdaxTx_=_55Gdhu6w-cMW`%s%Hyd4qXl}M!0YY9pkDzG#;V56i0)6R9dtYVw`%V=1RwT7eP_%T{ce6hzgv@&atgov31YnH zpR^!iR`hV(sbEwB5D;pg5!~>$WjMnPSxHG`E0TYKOKpbVXF&mSTCq{(hjEk5i z9|aB$o2%fTquwNp`r|!2Fwux-qr)9bdb;|94$83eZT&%GDD2U84&y}UHLk5R9X01n zQ!mHa3W`ljReTb}x2=ufb4<^!-ZG3?E%&lG{b&)qAX(*%kG=LW<@cAHEu3tuM>kFa zuJ;OAkmK<29%G8o4V>4vuwuU4TwRZ^6NI?dM^V--zV|7W4Hk=Th7?q zgy;3t4*2oTpIT`FK9m5mQ>ZQJhul}mJ`y@eHc6q(E!o5-;4v0>JV-VWjg1Cvgh#@s z<$eX{fT}gXjjBKB0o?aulZk%%M-BDj(u>D-;4!oq=g)k2r2aQ?Y5OJ~$^Q4_(v)%G zIPFQ}+MH<>z$M|3L4FV(35TeYj@1V<>x#tcagS@2AGC(dqV$#d3;*IogPGWhu}W zwLLJW(ArV_4-|EKyhJlunTr?fzXgWGl|-z$(DdqI!pb6pc*DQl`hG0?RblB zFZ?m39&NpJ0iRM81M3z!ME=X)RH@`gO9a5lJNq13&B{)TTt1}|xDU19`&Y7N~h@{f!ns?lSM<@D~r0O3Nb&bH1G8yQfc-Z_MH$7$W@f-f}N3uIR9 zsW^62Bfq*`IM?gY_O6)%xv#j%dt^2d;BV+?cp_;f?!*c(I7Lal{^i#V38EDE%2Ruz zj7*)Vk+k=yFzm8ZHxDfECdLCh}wm;Fn;!uDTfX(z!Z0aG~?*wN*>g-XP(gNJM z)O+i5m6wG0iGL8nt$z^4r^S`x_Em?g?LP%meqs}BzWUh_?VC_0*A5&<#=_lR+oXltKfE#~btn*RvP$Jta_`@!$yow!>SxqG5-%->=5=E!u! zyYbdPfCpmh<%|(-@GAC-&dZV6Y0`guRTrtIT|hks=n~*`l6<>Gn|p82CZd!rSC6aK z**gBGv`KWSFs1w3XtPxps|myXPHCRWns2Ai4L9kt^?}3C7qHtvqn#R}&}=%OaKT5P zt^lG-FMW1#(C6k?R;Nb=5Pe30*NH|?2U!^IedDI`(YVXiqNcoz^x$BcTq^{x6Q#lQ zNcuh1GafWKo_P5NP5S6j`Wp0T9Yl}txqbBbARnK$uf!2s{Ss*Xf{!N0d1=xXky#TD zemhZCDEOix9CyR1SagblO|W?Sy`!I;QhbG@gbyIqjy#X z5ba(0IaOrjoGjet_ij;Nx8BwBdmbM*f&al&^vYAPFOmI=m?Qbyf_{6xEpO{xFaEoN zJ1ie}bKN3RaG<1SECNEKNEvqt1{jN)6dBzikU zHmq9wChnK4Q>|XCXXR0U(Df}E=iL1E`&HyW^>s%nd9TLHl~Lhwh;PK%Ww_qgUKxkn zdRYIHVrF{Of$YAJw;4V z$!DeO1DmB>QNf)c1r3EsNe=B(b&vDHZ1s9|6;RgG5d4=-{Ypr{x!`wCAZFIoqIvt^?{zMtwLw;yWW~0b&=0LMY{ln z^2ewJXwemaYmDW}#K=a-G7-KP&)M4ediRw-iRZ82=fb3IhpcjUP5&f8wNGSRx$8&mSEJq)}s)JveIjH#fPR4h=> z9mVHj8}2FE>OdVQYOO@qrtLyH>=TOXQ2SccG1Uf^p{~<@5gupx%(YLfWFzH^B)_9k zYX*4>ea4vURSWLxiLQ}iQA4V*j==Rrulsc3TA}E5-TF+xnkw%NkOXG6VSQ5g)klEm zIQUL+)J{qm&|Slwn)Ea7XhD2NLVjM$Cl8qfG%`OzKA9jT78$&LaW_(;&I6u)U2A+%vr*RK4`K&KC_k(Z5=16`Q z$a0T8;;D}#0t*WRPdc=em&-!DFT1M|TMJ|{Pe-b5sBHKf^mV1`P)dRli0WbZ;JylC z_}>?yr@!-aWy}}nn@1(L!`^!m`ZXyFwq|(2ZlQ60MC6mq(@l4Sv>GMW_jVWJd)09( z5-nj-53*+54|389`azuBZNRO~s}?((ED5+KxBBWVi$)A74UqF|nb@J|;q*@pW8hq~qRdf4_~L=#+= zIHvN}2mM+KqxpN^k;YFmZWeRT%c{J0M)UsHWS;k$5vBXs%h{qVWeL_u)Ehqy--_;i z8G|wK45kas2LE*{(t(KIO*865z=F*mAFKuyS391Y#%1~9eB>nOS+yTTZ))6Edol2< z-}4Zx$vN)tk0mgdQ|TtXL}LwkQEM<^VR^s13n~@j3bn=nsF#QV+if zIZRcPdbvYeWtI-Xe>{}t*-}q*m3K^bV;sbnWY2!SV;t2jfxiU!!r!Q0%89%Y|6QK| z|EgHs8tMv2s!G~cyFMOB_FTx4wWu9*OgCkG(lkGDe?4kP0#ZFO^;IcJGVQM^-{@Ys zF!DOl2Jr;V@8>fWn+^O>M7Z&ZY5y;Kp_Stw0-T9ld{bF1r2wf z!}-JMeYqCM0<9Wy;=`-(G1~L>e5yVf_Y))jhdA7PzTX7<$BKI+R`}wq-?v^>qB!NK zkvsdyfV$Ag-~|@sb5Ek))7lDK$r0NVAR|67&G>ITaGN7G6EuVvJKc}!&J<(at)bR> zqA$F!PxZXvm2K3|WRInJaB8+E0BgnlsMSI^{x(M7wuY$;AG-gK#=-3(ZrC3lygx|e zp$%fPR^U&2gvO(F(8rW7d*P=uzkt_9aHH0s$s#F7T7x9iZc9gKpJlt%N^?##j=cfz z2J80`{Hw&eapebJ5s=$@ z@O01=RypP{hYZ0TK?!kv&XBmq5|Y_iAM8iuv>45Oi|)qzWVN<4R*_lvuIrt#jRk|V z7vqETT$1W)&{S5yJ!jzW;Vz(`%BPI-xYp3x@m1&LKQvAVd|M#7f%6Yl;YBAK*!$`8 zMit-lsL@cJ7Zf}sujf(ls4;3NTk0vl_eUncJ`VK+doID}L@ee1*JKp$&FP4XOc|WtE|074uG9)$6Ysl4mBzkbR*+H>LbC?o5;=YBRm7`sjX|=wx*v6w4-^Z%_m!CY{&Pp%ya!w&{2kS*#iLo?c^Nui z+uYU=!f?;m_)#RAAhVn}qyt|@Idjs?6qz+^$pjDr@v!d+szixT`ZZx&F+n3u196qDBK;EL=%Hhigo+vUm^b zi~4`1ZE8E~7W$4%;r*v%_p0==c$a!dxL{k$PK}k~FKLY7`-ar!rZ16s`XY(<%?jq8 zZ2J2*@1>CbaH*%I`1XQ!`3USXkrvDUsX?cTcXlKe#*toU)liV6( z6XGFC+@6KIoWq$LwMwe1#=)fv|P@Zj_pDH2>6W7 zsGu%#iT`hB(0lk8_|W;9>PK#CGt2k)HK+|5v;{4&MNogDfmYpC#DD3$nU>RYKM)a}*J=VKWCoHfzX|3MR21J^t&R!A3Op* zf5?(s;k^er^}LSNOiPlnZrlT?_1e*=Lz&a6D4uszj^gLQw`6~G#$n8K=DnjmQv_s6 zx`%#;UdP`R*)KqkGx)3OPS|twoIW`dcd27y8LIGewss$wmE84uPT|bL58qz>jwGKM z<~m1qdCq#oI77E1zflc7kpg7pJXc_w!j=?a?}kZB$mfdn`m$8xPveg( zw1`m}hSKHZXV4nT>~QM|BPH7)f74F$+rZIom3wzKYt`d43FE6I*rCH)0wTi?S#8LPuX5S z-Aw%?@S#Nv%TT$g9S46>TYnpZp`jUn4~-co<1Q^u{1+v;gfD+m7eKg1-6`-+_wBFY z=kbMFE1Yk_ov$l)za16qG6idx{In=p(H1oZ#U_$JUz!sFC@MnqWeIk9>5;}E{Ly>* zKhvOPnfYsl=wZIQBZeZ<=zGsPP(5Px(qcNUb z-uuQb5Q29ghl#&wgm`lRh}Ef$}sU-_fCvqBEe8;dx{MwMp4w3XB5ZVqv4kiRG|b`Lpz>HW16_)!bu$Mmu+rdekTYcPpz%;^=+1iL?`&;gS!e9jNmnnu zCr)`%%NLApn3(iI(g?Bvh8nXj63?aS?q~C`=X2S}u;t?R@OiWFvtkVUiMTy1o^KDE zHx|F!jA8NS?O_x6c1w$H8mINqXD)Jm?`J+yZ!laLXBffU<%^7bQPBcT`J!SzTI24h zsN2kO9#eL0EN7Wum-_N3l5f4wIGg(Z;&kV|qhW8u%Fi3h8{cxoUimp>k=J%0=v`;#O>(li_=;mIMzk_(ZAOADlM~rYLvr#U$O)Hf`o)>tA zNTs%ElGnbt(fk|Vym#iJwi0@P9iEU7kaB4t@K+kAF!OT55hka1QT+Vf``CzG>Rrfr z)$*jI_p=R;e)PJxek@SwOvOFpRBr~KOqyV=6(VO|8CCfDu=UiA-ODZ-;S)mM{Zq0E zP2lZj&|h=)2>gu^-GgqN$zk!($Ab;NLAS=+KQ^SKQxk8W2P(QKCK?{$0@LLra;EWxzsxm zSGe`L;U6ZQ9wo7v3!d0^ido{4KUlW5;qAzo#qsH%l06q3I}qBM8pT^%Yk1xp6a(99 zg3iI;VYINKmB^iYbY|3bnNcTTruvoY5sX*k{#+P<9{A>XznSvVr~%Nzc8G>g_c*t82GVq z*P*ISWgj35adFlN+eNC67HpFlodG_;UE(dix`jW@gfbRoC=e?8?p0z!SOKc8oV5$9 z^f(9eH?-=Ava!GIfg#_&y;#}(2#bd=6n{z={z19g^#SSxGqGAs>AfD2Gn<<9`eZ9M2l!>8VE(aR)zc`FI-my-ovEWzdY3Sz@{*RpT{_+ zKi37_EMw2c7~-#^3K{oLeXKr`qR9^|S^-W$_B@B~Rk-na4Dt#5y&mHRsLZkO6WRY) z+||c6bzSlM-gDyQ703%QC`=$Ofq)ZCXWW8Yl%ntzx>n0tJ~HSiu^qqKDT!^_PT~aC zISGSIN*kxRD3Kz~Y03yyBv1#fHce{+Yg(plicmE%gtUAOfjR-*Mhndb`<>?xl8n{W z{@GESckeytp8N6MJ@4Fm&wVF@b)3@Hu|l@FBKVIkYhE;>IwFH1+rZF@);Sj_Ya|o$ z5yFe!S=)~d57PQ!EOW6)Jg8oKc)yUP%*%iZHhM1`cNRS1-=_n87$@j^}VAoX6qMZ}%(Vf^`eDQ9h@&eW$ zc3_`u$Au-i+8klL-5|Q2TZwPTMyQ-gdSBp9|5r%JuL@^DzvzSwMwx5)oX2$8^UG7$ zf3GvzZmOI6JIUwlm8D9H^2vh^==v?t?T^6D_>Upm>z!4Y8(hvYlYNswd-i1CM0|h~ zNdJMoh;X$2$dAdkE@}IbtPG~kW5xGB)Z^TZ{%_eI>Wg13(BnHcdg#_7e4}LJCmU^d z=sORB@u>n))1~Z3pu=T}#?gOj>BK9gzm3#uzEvIA8FJX$a4uCRCeJA(%bb0s@$+T- zw%O2{h3Q+*J9GdXQiKE>|Muvsvs2U!F;=? zAhr7`VYP_mlgb0Ys)0b zm&?!mXS2l*25%zK&9 zkdOQi@yTK(ERJ^@B= zDc^J$etG+Ic~|c*9o|?z4|>sKY;~UGuUIHqD&~t8%&&;nk>_SjBl%KOtV_JR0N*>f zeH`O6s_~+4a!~)=RTb%;EsA}lpNfq!+{?r}@KNvV>E%2v|mm8%WYY1sFkms$BH{0qAMYQQ~%S3JpENCk;>OTZjqU<+T&tAug4`bw_4+4TfDWkc8@c#fxEmW{;((GVf3C! ze%KfjPlVBpsj$`-lRT}H(QH$q9m!5N$%$H+IMslfQ61p!MyKEFb9+51cX;i7CwJQe zJW%IT{k+QCz#9~dW-fYYV{BSN@HbZ5#=B#+V|~!Gmw;c=n$3Ozz;nFLwFF#v+imj zX!i%vmAuX;rv{2fOaF`nsaPt|rp$z`DM+Mm(m;3w#0fTQAU*;J&r=ooKLyNdEl)hT zZoT!Xr|pg^r~1mn_@>QUwr(q9g<8et%B3HWOxb;Bn{i+&E>nD4n{%DOLaeZS$d_ab zD86`BoFkr;R)k-HIG=*O1%TqlS@FMuw}v1BfL8X^{f|MMY4Yqg@X)YW=?CH58vK$p zwtqbDxSF70&w>Y}ZSmki2K|9yJqfA-QO<-f%q6V&OjU~J+WHbrA5T(g_-Gn@Bn`gQ z@%8+D_OHj+q`|w>;GJpkwlw&GH0|q8gMTLtK9&aG^}plk*?BkTE&cU>!MGFQ@R#A- Fe*> 8) & 0xFF) + \ + chr((word >> 16) & 0xFF) + \ + chr((word >> 24) & 0xFF) + +def str2word(s): + vals = map(ord, s) + return vals[0] + (vals[1] << 8) + (vals[2] << 16) + (vals[3] << 24) + +def read_word_timeout(ser): + s = ser.read(4) + if len(s) < 4: + raise Exception('Timeout while reading word') + return str2word(s) + +def sws(ser): + """Start word sync""" + logging.debug('Performing SWS') + ser.write(chr(XMT_START_WORD >> 24)) + while True: + response = ser.read(1) + if len(response) == 0: + raise Exception('Timeout during start word sync') + if ord(response[0]) == (RCV_START_WORD >> 24): + return True + +def pos(ser, N=10): + """Ping opcode sync""" + logging.debug('Performing POS') + ser.write(word2str(OP_PINGDEVICE)) + if read_word_timeout(ser) != op2ack(OP_PINGDEVICE): + raise Exception('Invalid response to ping opcode') + delay() + + ser.write(word2str(N)) + if read_word_timeout(ser) != N: + raise Exception('Invalid response to ping count') + + for i in xrange(1, N+1): + ser.write(word2str(i)) + if read_word_timeout(ser) != i: + raise Exception('Invalid response to ping %d' % i) + return True + +def os(ser, op): + ser.write(word2str(op)) + response = read_word_timeout(ser) + if response != op2ack(op): + raise Exception('Invalid response to opcode (%x, expected %x)' % (response, op2ack(op))) + +def boot(ser, file): + magic = str2word(file.read(4)) + if magic != MAGICWORD: + raise Exception('Invalid magic word in file') + + sws(ser) + pos(ser) + + while True: + delay() + op = str2word(file.read(4)) + os(ser, op) + + if op == OP_SECTIONLOAD or op == OP_CSECTIONLOAD: + addr = str2word(file.read(4)) + ser.write(word2str(addr)) + size = str2word(file.read(4)) + ser.write(word2str(size)) + logging.debug('SECTIONLOAD of %d bytes to 0x%x' % (size, addr)) + ser.write(file.read(size)) + elif op == OP_FXNEXEC: + args = str2word(file.read(4)) + ser.write(word2str(args)) + words = (args >> 16)*4 + logging.debug('FXNEXEC of %d bytes' % words) + ser.write(file.read(words)) + elif op == OP_JUMPCLOSE: + addr = str2word(file.read(4)) + logging.debug('JUMPLOAD to 0x%x' % addr) + ser.write(word2str(addr)) + break + elif op == OP_CRCEN: + logging.debug('Enabled CRC') + elif op == OP_CRCDIS: + logging.debug('Disabled CRC') + elif op == OP_CRCREQ: + calc_crc = read_word_timeout(ser) + real_crc = str2word(file.read(4)) + seek = str2word(file.read(4)) + if seek > 0x7FFFFFFF: + seek = (seek-1) - 0xFFFFFFFF + if real_crc == calc_crc: + logging.debug('CRC passed') + else: + logging.debug('CRC failed') + file.seek(seek, 1) + else: + raise Exception('Unknown opcode 0x%x' % op) + + donestr = ser.read(8) + if donestr != " DONE\0": + raise Exception('Invalid done string: %s' % donestr) + + return True + +def main(): + ser = serial.Serial('/dev/ttyUSB0', 115200, timeout=1) + with open('SimpleHyd2013.bin') as file: + boot(ser, file) + +if __name__ == '__main__': + logging.basicConfig(level=logging.DEBUG) + main() + From d6fc5453f3754292cb3a82d93bb4cf734096e6df Mon Sep 17 00:00:00 2001 From: David Santiago Soto Date: Fri, 29 Jul 2016 10:10:42 -0700 Subject: [PATCH 082/267] LEGACY SONAR (#179) * LAUNCH: configure perception nodes w/ yaml files * COMMAND: make autonomous mode easy to switch on and off * COMMAND: address PR175 comments * LEGACY SONAR: fix configuration parameters to match current sub --- drivers/hydrophones/scripts/hydrophones | 6 +++--- drivers/paulboard_driver/launch/test.launch | 16 ---------------- 2 files changed, 3 insertions(+), 19 deletions(-) delete mode 100644 drivers/paulboard_driver/launch/test.launch diff --git a/drivers/hydrophones/scripts/hydrophones b/drivers/hydrophones/scripts/hydrophones index f62219f5..13896643 100755 --- a/drivers/hydrophones/scripts/hydrophones +++ b/drivers/hydrophones/scripts/hydrophones @@ -52,8 +52,8 @@ dist_h = rospy.get_param('~dist_h') dist_h4 = rospy.get_param('~dist_h4') v_sound = rospy.get_param('~v_sound') template_periods = rospy.get_param('~template_periods', 3) -pub = rospy.Publisher('hydrophones/processed', ProcessedPing) -pub_pose = rospy.Publisher('hydrophones/pose', PoseStamped) -pub_debug = rospy.Publisher('hydrophones/debug', Debug) +pub = rospy.Publisher('hydrophones/processed', ProcessedPing, queue_size=10) +pub_pose = rospy.Publisher('hydrophones/pose', PoseStamped, queue_size=10) +pub_debug = rospy.Publisher('hydrophones/debug', Debug, queue_size=10) sub = rospy.Subscriber('hydrophones/ping', Ping, process_ping) rospy.spin() diff --git a/drivers/paulboard_driver/launch/test.launch b/drivers/paulboard_driver/launch/test.launch deleted file mode 100644 index 78955283..00000000 --- a/drivers/paulboard_driver/launch/test.launch +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - - - - - - - - - From e86790cf7a982ac57dd26fe47f6a6da2896425d4 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 19 Aug 2016 03:37:44 -0400 Subject: [PATCH 083/267] Alarms, Launch, Utils (#30) * UTILS: Moved sub8 utils over to remove dependency * GAZEBO: Fix build issues * LAUNCH: Add launch for lidar and cameras * TOOLS: Fix wrong name * ALARMS: Make alarm kill work, fix controller startup * LAUNCH: Seperate TF into it's own launch file * SRVS: Add navigator srv directory * Fix PR issues. --- .../navigator_tools/__init__.py | 13 +- ...geometry_helper.py => geometry_helpers.py} | 0 .../navigator_tools/image_helpers.py | 114 +++++++++++ .../navigator_tools/init_helpers.py | 55 +++++ .../navigator_tools/msg_helper.py | 13 -- .../navigator_tools/msg_helpers.py | 188 ++++++++++++++++++ .../navigator_tools/threading_helpers.py | 30 +++ 7 files changed, 398 insertions(+), 15 deletions(-) rename utils/navigator_tools/navigator_tools/{geometry_helper.py => geometry_helpers.py} (100%) create mode 100644 utils/navigator_tools/navigator_tools/image_helpers.py create mode 100644 utils/navigator_tools/navigator_tools/init_helpers.py delete mode 100644 utils/navigator_tools/navigator_tools/msg_helper.py create mode 100644 utils/navigator_tools/navigator_tools/msg_helpers.py create mode 100644 utils/navigator_tools/navigator_tools/threading_helpers.py diff --git a/utils/navigator_tools/navigator_tools/__init__.py b/utils/navigator_tools/navigator_tools/__init__.py index 7f79d00b..59d64b82 100644 --- a/utils/navigator_tools/navigator_tools/__init__.py +++ b/utils/navigator_tools/navigator_tools/__init__.py @@ -1,2 +1,11 @@ -from geometry_helper import quat_to_euler, euler_to_quat, normalize, compose_transformation -from msg_helper import ros_to_np_3D \ No newline at end of file +import image_helpers +import init_helpers +import msg_helpers +import threading_helpers +import geometry_helpers + +from init_helpers import * +from image_helpers import * +from msg_helpers import * +from threading_helpers import * +from geometry_helpers import * diff --git a/utils/navigator_tools/navigator_tools/geometry_helper.py b/utils/navigator_tools/navigator_tools/geometry_helpers.py similarity index 100% rename from utils/navigator_tools/navigator_tools/geometry_helper.py rename to utils/navigator_tools/navigator_tools/geometry_helpers.py diff --git a/utils/navigator_tools/navigator_tools/image_helpers.py b/utils/navigator_tools/navigator_tools/image_helpers.py new file mode 100644 index 00000000..0735f010 --- /dev/null +++ b/utils/navigator_tools/navigator_tools/image_helpers.py @@ -0,0 +1,114 @@ +#!/usr/bin/python +''' +Note: + The repeated use of CvBridge (instead of using make_image_msg and get_image_msg in the classes) + is intentional, to avoid the use of a global cvbridge, and to avoid reinstantiating a CvBrige for each use. +''' +import rospy +import numpy as np +from os import path +from cv_bridge import CvBridge, CvBridgeError +from sensor_msgs.msg import Image, CameraInfo +from sub8_ros_tools.init_helpers import wait_for_param + + +def get_parameter_range(parameter_root): + ''' + ex: parameter_root='/vision/buoy/red' + this will then fetch /vision/buoy/red/hsv_low and hsv_high + ''' + low_param, high_param = parameter_root + '/hsv_low', parameter_root + '/hsv_high' + + rospy.logwarn("Blocking -- waiting for parameters {} and {}".format(low_param, high_param)) + + wait_for_param(low_param) + wait_for_param(high_param) + low = rospy.get_param(low_param) + high = rospy.get_param(high_param) + + rospy.loginfo("Got {} and {}".format(low_param, high_param)) + return np.array([low, high]).transpose() + + +def make_image_msg(cv_image, encoding='bgr8'): + '''Take a cv image, and produce a ROS image message''' + bridge = CvBridge() + image_message = bridge.cv2_to_imgmsg(cv_image, encoding) + return image_message + + +def get_image_msg(ros_image, encoding='bgr8'): + '''Take a ros image message, and yield an opencv image''' + bridge = CvBridge() + cv_image = bridge.imgmsg_to_cv2(ros_image, desired_encoding=encoding) + return cv_image + + +class Image_Publisher(object): + def __init__(self, topic, encoding="bgr8", queue_size=1): + '''Create an essentially normal publisher, that will publish images without conversion hassle''' + self.bridge = CvBridge() + self.encoding = encoding + self.im_pub = rospy.Publisher(topic, Image, queue_size=queue_size) + + def publish(self, cv_image): + try: + image_message = self.bridge.cv2_to_imgmsg(cv_image, self.encoding) + self.im_pub.publish(image_message) + except CvBridgeError, e: + # Intentionally absorb CvBridge Errors + rospy.logerr(e) + + +class Image_Subscriber(object): + def __init__(self, topic, callback=None, encoding="bgr8", queue_size=1): + '''Calls $callback on each image every time a new image is published on $topic + Assumes topic of type "sensor_msgs/Image" + This behaves like a conventional subscriber, except handling the additional image conversion + ''' + if callback is None: + callback = lambda im: setattr(self, 'last_image', im) + + self.encoding = encoding + self.camera_info = None + self.last_image_time = None + self.last_image = None + self.im_sub = rospy.Subscriber(topic, Image, self.convert, queue_size=queue_size) + + root_topic, image_subtopic = path.split(topic) + self.info_sub = rospy.Subscriber(root_topic + '/camera_info', CameraInfo, self.info_cb, queue_size=queue_size) + + self.bridge = CvBridge() + self.callback = callback + + def wait_for_camera_info(self, timeout=10): + ''' + Blocks until camera info has been received. + Note: 'timeout' is in seconds. + ''' + rospy.logwarn("Blocking -- waiting at most %d seconds for camera info." % timeout) + + timeout = rospy.Duration(timeout) + start_time = rospy.Time.now() + while (rospy.Time.now() - start_time < timeout) and (not rospy.is_shutdown()): + if self.camera_info is not None: + rospy.loginfo("Camera info found!") + return self.camera_info + rospy.sleep(.2) + + rospy.logerr("Camera info not found.") + raise Exception("Camera info not found.") + + def info_cb(self, msg): + """The path trick here is a hack""" + self.info_sub.unregister() + self.camera_info = msg + + def convert(self, data): + self.last_image_time = data.header.stamp + try: + image = self.bridge.imgmsg_to_cv2(data, desired_encoding=self.encoding) + self.callback(image) + except CvBridgeError, e: + # Intentionally absorb CvBridge Errors + rospy.logerr(e) \ No newline at end of file diff --git a/utils/navigator_tools/navigator_tools/init_helpers.py b/utils/navigator_tools/navigator_tools/init_helpers.py new file mode 100644 index 00000000..91ca8277 --- /dev/null +++ b/utils/navigator_tools/navigator_tools/init_helpers.py @@ -0,0 +1,55 @@ +#!/usr/bin/python +import rospy +import rostest +import time + + +def wait_for_param(param_name, timeout=None, poll_rate=0.1): + '''Blocking wait for a parameter named $parameter_name to exist + Poll at frequency $poll_rate + Once the parameter exists, return get and return it. + + This function intentionally leaves failure logging duties to the developer + ''' + start_time = time.time() + rate = rospy.Rate(poll_rate) + while not rospy.is_shutdown(): + + # Check if the parameter now exists + if rospy.has_param(param_name): + return rospy.get_param(param_name) + + # If we exceed a defined timeout, return None + if timeout is not None: + if time.time() - start_time > timeout: + return None + + # Continue to poll at poll_rate + rate.sleep() + + +def wait_for_subscriber(node_name, topic, timeout=5.0): + '''Blocks until $node_name subscribes to $topic + Useful mostly in integration tests -- + I would counsel against use elsewhere + ''' + end_time = time.time() + timeout + + # Wait for time-out or ros-shutdown + while (time.time() < end_time) and (not rospy.is_shutdown()): + subscribed = rostest.is_subscriber( + rospy.resolve_name(topic), + rospy.resolve_name(node_name) + ) + # Success scenario: node subscribes + if subscribed: + break + time.sleep(0.1) + + # Could do this with a while/else + # But chose to explicitly check + success = rostest.is_subscriber( + rospy.resolve_name(topic), + rospy.resolve_name(node_name) + ) + return success \ No newline at end of file diff --git a/utils/navigator_tools/navigator_tools/msg_helper.py b/utils/navigator_tools/navigator_tools/msg_helper.py deleted file mode 100644 index bdc2a734..00000000 --- a/utils/navigator_tools/navigator_tools/msg_helper.py +++ /dev/null @@ -1,13 +0,0 @@ -from __future__ import division -import numpy as np -from geometry_msgs.msg import Quaternion - -''' - A file to assist with some messages that are commonly used - on the Wam-V -''' - -def ros_to_np_3D(msg): - - xyz_array = np.array(([msg.x, msg.y, msg.z])) - return xyz_array diff --git a/utils/navigator_tools/navigator_tools/msg_helpers.py b/utils/navigator_tools/navigator_tools/msg_helpers.py new file mode 100644 index 00000000..981f4ee0 --- /dev/null +++ b/utils/navigator_tools/navigator_tools/msg_helpers.py @@ -0,0 +1,188 @@ +import numpy as np +from tf import transformations +import geometry_msgs.msg as geometry_msgs +import std_msgs.msg as std_msgs +import nav_msgs.msg as nav_msgs +import rospy + + +def rosmsg_to_numpy(rosmsg, keys=None): + '''Convert an arbitrary ROS msg to a numpy array + With no additional arguments, it will by default handle: + Point2D, Point3D, Vector3D, and quaternions + + Ex: + quat = Quaternion(1.0, 0.0, 0.0, 0.0) + quat is not a vector, you have quat.x, quat.y,... and you can't do math on that + + But wait, there's hope! + rosmsg_to_numpy(quat) -> array([1.0, 0.0, 0.0, 0.0]) + + Yielding a vector, which you can do math on! + + Further, imagine a bounding box message, BB, with properties BB.x, BB.h, BB.y, and BB.w + + rosmsg_to_numpy(BB, ['x', 'h', 'y', 'w']) -> array([BB.x, BB.h, BB.y, BB.w]) + or... + rosmsg_to_numpy(some_Pose2D, ['x', 'y', 'yaw']) = array([x, y, yaw]) + + Note: + - This function is designed to handle the most common use cases (vectors, points and quaternions) + without requiring any additional arguments. + ''' + if keys is None: + keys = ['x', 'y', 'z', 'w'] + output_array = [] + for key in keys: + # This is not necessarily the fastest way to do this + if hasattr(rosmsg, key): + output_array.append(getattr(rosmsg, key)) + else: + break + + return np.array(output_array).astype(np.float32) + + else: + output_array = np.zeros(len(keys), np.float32) + for n, key in enumerate(keys): + output_array[n] = getattr(rosmsg, key) + + return output_array + + +def pose_to_numpy(pose): + '''TODO: Unit-test + returns (position, orientation) + ''' + position = rosmsg_to_numpy(pose.position) + orientation = rosmsg_to_numpy(pose.orientation) + return position, orientation + + +def twist_to_numpy(twist): + '''TODO: Unit-test + Convert a twist message into a tuple of numpy arrays + returns (linear, angular) + ''' + linear = rosmsg_to_numpy(twist.linear) + angular = rosmsg_to_numpy(twist.angular) + return linear, angular + + +def posetwist_to_numpy(posetwist): + pose = pose_to_numpy(posetwist.pose) + twist = twist_to_numpy(posetwist.twist) + return pose, twist + + +def odometry_to_numpy(odom): + '''TODO: Unit-test + Convert an odometry message into a tuple of numpy arrays + returns (pose, twist, pose_covariance, twist_covariance) + ''' + pose = pose_to_numpy(odom.pose.pose) + pose_covariance = np.array(odom.pose.covariance).reshape(6, 6) + + twist = twist_to_numpy(odom.twist.twist) + twist_covariance = np.array(odom.twist.covariance).reshape(6, 6) + + return pose, twist, pose_covariance, twist_covariance + + +def wrench_to_numpy(wrench): + force = rosmsg_to_numpy(wrench.force) + torque = rosmsg_to_numpy(wrench.torque) + return force, torque + + +def numpy_to_point(np_vector): + return geometry_msgs.Point(*np_vector) + + +def numpy_to_quaternion(np_quaternion): + return geometry_msgs.Quaternion(*np_quaternion) + + +def numpy_to_twist(linear_vel, angular_vel): + return geometry_msgs.Twist(linear=geometry_msgs.Vector3(*linear_vel), angular=geometry_msgs.Vector3(*angular_vel)) + + +def numpy_to_wrench(forcetorque): + return geometry_msgs.Wrench( + force=geometry_msgs.Vector3(*forcetorque[:3]), + torque=geometry_msgs.Vector3(*forcetorque[3:]) + ) + + +def numpy_matrix_to_quaternion(np_matrix): + '''Given a 3x3 rotation matrix, return its geometry_msgs Quaternion''' + assert np_matrix.shape == (3, 3), "Must submit 3x3 rotation matrix" + pose_mat = np.eye(4) + pose_mat[:3, :3] = np_matrix + np_quaternion = transformations.quaternion_from_matrix(pose_mat) + return geometry_msgs.Quaternion(*np_quaternion) + + +def numpy_pair_to_pose(np_translation, np_rotation_matrix): + '''Convert a R, t pair to a geometry_msgs Pose''' + orientation = numpy_matrix_to_quaternion(np_rotation_matrix) + position = numpy_to_point(np_translation) + return geometry_msgs.Pose(position=position, orientation=orientation) + + +def numpy_quat_pair_to_pose(np_translation, np_quaternion): + orientation = numpy_to_quaternion(np_quaternion) + position = numpy_to_point(np_translation) + return geometry_msgs.Pose(position=position, orientation=orientation) + + +def make_header(frame='/body', stamp=None): + if stamp is None: + try: + stamp = rospy.Time.now() + except rospy.ROSInitException: + stamp = rospy.Time(0) + + header = std_msgs.Header( + stamp=stamp, + frame_id=frame + ) + return header + + +def make_wrench_stamped(force, torque, frame='/body'): + '''Make a WrenchStamped message without all the fuss + Frame defaults to body + ''' + wrench = geometry_msgs.WrenchStamped( + header=make_header(frame), + wrench=geometry_msgs.Wrench( + force=geometry_msgs.Vector3(*force), + torque=geometry_msgs.Vector3(*torque) + ) + ) + return wrench + + +def make_pose_stamped(position, orientation, frame='/body'): + wrench = geometry_msgs.WrenchStamped( + header=make_header(frame), + pose=geometry_msgs.Pose( + force=geometry_msgs.Vector3(*position), + orientation=geometry_msgs.Quaternion(*orientation) + ) + ) + return wrench + + +def odom_sub(topic, callback): + def wrapped_callback(*args): + msg = args[-1] + callback(odometry_to_numpy(msg)) + + return rospy.Subscriber(topic, nav_msgs.Odometry, wrapped_callback, queue_size=1) + + +def ros_to_np_3D(msg): + xyz_array = np.array(([msg.x, msg.y, msg.z])) + return xyz_array diff --git a/utils/navigator_tools/navigator_tools/threading_helpers.py b/utils/navigator_tools/navigator_tools/threading_helpers.py new file mode 100644 index 00000000..a7b037ad --- /dev/null +++ b/utils/navigator_tools/navigator_tools/threading_helpers.py @@ -0,0 +1,30 @@ +def thread_lock(lock): + '''Use an existing thread lock to thread-lock a function + This prevents the function from being executed by multiple threads at once + + Example: + import threading + lock = threading.Lock() + + @thread_lock(lock) + def my_function(a, b, c): + print a, b, c + ''' + + def lock_thread(function_to_lock): + '''thread_lock(function) -> locked function + Thread locking decorator + If you use this as a decorator for a function, it will apply a threading lock during the execution of that function, + Which guarantees that no ROS callbacks can change the state of data while it is executing. This + is critical to make sure that a new message being sent doesn't cause a weird serial interruption + ''' + def locked_function(*args, **kwargs): + # Get threading lock + with lock: + result = function_to_lock(*args, **kwargs) + # Return, pretending the function hasn't changed at all + return result + # Return the function with locking added + return locked_function + + return lock_thread \ No newline at end of file From 6119d33685a3a58c1e67ce975fd41d387c81d4e9 Mon Sep 17 00:00:00 2001 From: Anthony Olive Date: Wed, 24 Aug 2016 19:22:02 -0400 Subject: [PATCH 084/267] BKEEPING: Fixed an import error in move_helper --- utils/navigator_tools/navigator_tools/move_helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/navigator_tools/navigator_tools/move_helper.py b/utils/navigator_tools/navigator_tools/move_helper.py index eeb07b35..e116ee56 100755 --- a/utils/navigator_tools/navigator_tools/move_helper.py +++ b/utils/navigator_tools/navigator_tools/move_helper.py @@ -6,7 +6,7 @@ from tf import transformations as trns from nav_msgs.msg import Odometry from geometry_msgs.msg import PoseStamped, Point -from navigator_tools import geometry_helper as gh +from navigator_tools import geometry_helpers as gh rospy.init_node('move_helper') From 64609b1f2956cd067690d86f4d166731a7ef5b52 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Sun, 28 Aug 2016 02:54:35 -0400 Subject: [PATCH 085/267] UTILS: Add a pre-flight like check --- .../navigator_tools/self_check.py | 76 +++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100755 utils/navigator_tools/navigator_tools/self_check.py diff --git a/utils/navigator_tools/navigator_tools/self_check.py b/utils/navigator_tools/navigator_tools/self_check.py new file mode 100755 index 00000000..0d30ae9f --- /dev/null +++ b/utils/navigator_tools/navigator_tools/self_check.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python +from twisted.internet import defer, reactor +import traceback +import signal +import txros + + +class FancyPrint(object): + GOOD = '\033[32m' + BAD = '\033[31m' + NORMAL = '\033[0m' + BOLD = '\033[1m' + + @classmethod + def okay(self, text): + print self.GOOD + text + self.NORMAL + + @classmethod + def error(self, text): + print self.BAD + text + self.NORMAL + + +@txros.util.cancellableInlineCallbacks +def main(): + nh = yield txros.NodeHandle.from_argv("self_checker") + # Add deffereds to list to be yieleded on and checked later + topics = {} + + # General Subs + from nav_msgs.msg import Odometry + topics['odom'] = nh.subscribe('odom', Odometry).get_next_message() + + from sensor_msgs.msg import Joy + topics['joy'] = nh.subscribe('joy', Joy).get_next_message() + + # Perception Subs + from sensor_msgs.msg import Image + topics['right_camera'] = nh.subscribe('/right_camera/image_raw', Image).get_next_message() + topics['left_camera'] = nh.subscribe('/left_camera/image_raw', Image).get_next_message() + topics['side_camera'] = nh.subscribe('/side_camera/image_raw', Image).get_next_message() + topics['down_camera'] = nh.subscribe('/down_camera/image_raw', Image).get_next_message() + + from sensor_msgs.msg import CompressedImage + topics['right_camera_compressed'] = nh.subscribe('/right_camera/image_raw/compressed', CompressedImage).get_next_message() + topics['left_camera_compressed'] = nh.subscribe('/left_camera/image_raw/compressed', CompressedImage).get_next_message() + topics['side_camera_compressed'] = nh.subscribe('/side_camera/image_raw/compressed', CompressedImage).get_next_message() + topics['down_camera_compressed'] = nh.subscribe('/down_camera/image_raw/compressed', CompressedImage).get_next_message() + + from sensor_msgs.msg import PointCloud2 + topics['velodyne'] = nh.subscribe('/velodyne/points', PointCloud2).get_next_message() + + # Thrusters + from roboteq_msgs.msg import Feedback + topics['BL_motor'] = nh.subscribe('/BL_motor/Feedback', Feedback).get_next_message() + topics['BR_motor'] = nh.subscribe('/BR_motor/Feedback', Feedback).get_next_message() + topics['FL_motor'] = nh.subscribe('/FL_motor/Feedback', Feedback).get_next_message() + topics['FR_motor'] = nh.subscribe('/FR_motor/Feedback', Feedback).get_next_message() + + for name, sub in topics.iteritems(): + try: + # Bold the name so it's distinct + fancy_name = FancyPrint.BOLD + name + FancyPrint.NORMAL + print " - - - - Testing for {}".format(fancy_name) + + result = yield txros.util.wrap_timeout(sub, 2) # 2 second timeout should be good + if result is None: + FancyPrint.error("[ FAIL ] Response was None from {}".format(fancy_name)) + else: + FancyPrint.okay("[ PASS ] Response found from {}".format(fancy_name)) + + except: + FancyPrint.error("[ FAIL ] No response from {}".format(fancy_name)) + + +if __name__ == '__main__': + txros.util.launch_main(main) \ No newline at end of file From c4ff27c0ea32c3eb23d302a099e692b838df15a5 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Thu, 1 Sep 2016 00:56:55 -0400 Subject: [PATCH 086/267] UTILS: Add rviz helpers. Fix some image helper bugs --- .../navigator_tools/__init__.py | 2 + .../navigator_tools/image_helpers.py | 1 + .../navigator_tools/msg_helpers.py | 2 + .../navigator_tools/rviz_helpers.py | 61 +++++++++++++++++++ 4 files changed, 66 insertions(+) create mode 100644 utils/navigator_tools/navigator_tools/rviz_helpers.py diff --git a/utils/navigator_tools/navigator_tools/__init__.py b/utils/navigator_tools/navigator_tools/__init__.py index 59d64b82..74113d95 100644 --- a/utils/navigator_tools/navigator_tools/__init__.py +++ b/utils/navigator_tools/navigator_tools/__init__.py @@ -3,9 +3,11 @@ import msg_helpers import threading_helpers import geometry_helpers +import rviz_helpers from init_helpers import * from image_helpers import * from msg_helpers import * from threading_helpers import * from geometry_helpers import * +from rviz_helpers import * diff --git a/utils/navigator_tools/navigator_tools/image_helpers.py b/utils/navigator_tools/navigator_tools/image_helpers.py index 0735f010..b15d1972 100644 --- a/utils/navigator_tools/navigator_tools/image_helpers.py +++ b/utils/navigator_tools/navigator_tools/image_helpers.py @@ -90,6 +90,7 @@ def wait_for_camera_info(self, timeout=10): timeout = rospy.Duration(timeout) start_time = rospy.Time.now() + rospy.sleep(.1) # Make sure we don't have negative time while (rospy.Time.now() - start_time < timeout) and (not rospy.is_shutdown()): if self.camera_info is not None: rospy.loginfo("Camera info found!") diff --git a/utils/navigator_tools/navigator_tools/msg_helpers.py b/utils/navigator_tools/navigator_tools/msg_helpers.py index 981f4ee0..72edc83f 100644 --- a/utils/navigator_tools/navigator_tools/msg_helpers.py +++ b/utils/navigator_tools/navigator_tools/msg_helpers.py @@ -5,6 +5,8 @@ import nav_msgs.msg as nav_msgs import rospy +point_to_numpy = rosmsg_to_numpy +quaternion_to_numpy = rosmsg_to_numpy def rosmsg_to_numpy(rosmsg, keys=None): '''Convert an arbitrary ROS msg to a numpy array diff --git a/utils/navigator_tools/navigator_tools/rviz_helpers.py b/utils/navigator_tools/navigator_tools/rviz_helpers.py new file mode 100644 index 00000000..ef730081 --- /dev/null +++ b/utils/navigator_tools/navigator_tools/rviz_helpers.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +from __future__ import division + +import numpy as np +import rospy +import visualization_msgs.msg as visualization_msgs + +from geometry_msgs.msg import Pose, Vector3, Point +from std_msgs.msg import ColorRGBA +from uf_common.msg import Float64Stamped # This needs to be deprecated + +import navigator_tools + +rviz_pub = rospy.Publisher("visualization", visualization_msgs.Marker, queue_size=3) + +def draw_sphere(position, color, scaling=(0.11, 0.11, 0.11), m_id=4, frame='/base_link'): + pose = Pose( + position=navigator_tools.numpy_to_point(position), + orientation=navigator_tools.numpy_to_quaternion([0.0, 0.0, 0.0, 1.0]) + ) + + marker = visualization_msgs.Marker( + ns='wamv', + id=m_id, + header=navigator_tools.make_header(frame=frame), + type=visualization_msgs.Marker.SPHERE, + action=visualization_msgs.Marker.ADD, + pose=pose, + color=ColorRGBA(*color), + scale=Vector3(*scaling), + lifetime=rospy.Duration(), + ) + rviz_pub.publish(marker) + +def draw_ray_3d(pix_coords, camera_model, color, frame='/stereo_front', m_id=0, length=35): + marker = make_ray( + base=np.array([0.0, 0.0, 0.0]), + direction=np.array(camera_model.projectPixelTo3dRay(pix_coords)), + length=length, + color=color, + frame=frame, + m_id=m_id + ) + + rviz_pub.publish(marker) + +def make_ray(base, direction, length, color, frame='/base_link', m_id=0, **kwargs): + '''Handle the frustration that Rviz cylinders are designated by their center, not base''' + marker = visualization_msgs.Marker( + ns='wamv', + id=m_id, + header=navigator_tools.make_header(frame=frame), + type=visualization_msgs.Marker.LINE_STRIP, + action=visualization_msgs.Marker.ADD, + color=ColorRGBA(*color), + scale=Vector3(0.05, 0.05, 0.05), + points=map(lambda o: Point(*o), [base, direction * length]), + lifetime=rospy.Duration(), + **kwargs + ) + return marker \ No newline at end of file From a2827a8d7fa81a3414e4b645eecf099d74912719 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Sat, 3 Sep 2016 19:48:49 -0400 Subject: [PATCH 087/267] MISSION: Add manual start gate mission. --- utils/navigator_tools/navigator_tools/msg_helpers.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/msg_helpers.py b/utils/navigator_tools/navigator_tools/msg_helpers.py index 72edc83f..f4a5e6aa 100644 --- a/utils/navigator_tools/navigator_tools/msg_helpers.py +++ b/utils/navigator_tools/navigator_tools/msg_helpers.py @@ -5,9 +5,6 @@ import nav_msgs.msg as nav_msgs import rospy -point_to_numpy = rosmsg_to_numpy -quaternion_to_numpy = rosmsg_to_numpy - def rosmsg_to_numpy(rosmsg, keys=None): '''Convert an arbitrary ROS msg to a numpy array With no additional arguments, it will by default handle: @@ -51,6 +48,8 @@ def rosmsg_to_numpy(rosmsg, keys=None): return output_array +point_to_numpy = rosmsg_to_numpy +quaternion_to_numpy = rosmsg_to_numpy def pose_to_numpy(pose): '''TODO: Unit-test From 408bba1a7d7169f49bdfc741de231b3230434b0c Mon Sep 17 00:00:00 2001 From: mattlangford Date: Tue, 6 Sep 2016 01:59:46 -0400 Subject: [PATCH 088/267] Updates from lake day --- .../navigator_tools/self_check.py | 27 ++++++++++--------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/self_check.py b/utils/navigator_tools/navigator_tools/self_check.py index 0d30ae9f..118c35ea 100755 --- a/utils/navigator_tools/navigator_tools/self_check.py +++ b/utils/navigator_tools/navigator_tools/self_check.py @@ -19,11 +19,20 @@ def okay(self, text): def error(self, text): print self.BAD + text + self.NORMAL +from sensor_msgs.msg import Image +from sensor_msgs.msg import CompressedImage +def add_camera_feeds(nh, cam_name, image_type="image_raw"): + ''' Returns subscribers to the raw and compressed `image_type` ''' + + raw = '/{}/{}'.format(cam_name, image_type) + compressed = '/{}/{}/compressed'.format(cam_name, image_type) + return nh.subscribe(raw, Image).get_next_message(), \ + nh.subscribe(compressed, CompressedImage).get_next_message() @txros.util.cancellableInlineCallbacks def main(): nh = yield txros.NodeHandle.from_argv("self_checker") - # Add deffereds to list to be yieleded on and checked later + # Add deferreds to this dict to be yieleded on and checked later topics = {} # General Subs @@ -34,20 +43,12 @@ def main(): topics['joy'] = nh.subscribe('joy', Joy).get_next_message() # Perception Subs - from sensor_msgs.msg import Image - topics['right_camera'] = nh.subscribe('/right_camera/image_raw', Image).get_next_message() - topics['left_camera'] = nh.subscribe('/left_camera/image_raw', Image).get_next_message() - topics['side_camera'] = nh.subscribe('/side_camera/image_raw', Image).get_next_message() - topics['down_camera'] = nh.subscribe('/down_camera/image_raw', Image).get_next_message() - - from sensor_msgs.msg import CompressedImage - topics['right_camera_compressed'] = nh.subscribe('/right_camera/image_raw/compressed', CompressedImage).get_next_message() - topics['left_camera_compressed'] = nh.subscribe('/left_camera/image_raw/compressed', CompressedImage).get_next_message() - topics['side_camera_compressed'] = nh.subscribe('/side_camera/image_raw/compressed', CompressedImage).get_next_message() - topics['down_camera_compressed'] = nh.subscribe('/down_camera/image_raw/compressed', CompressedImage).get_next_message() + topics['right_images'], topics['right_compressed'] = add_camera_feeds(nh, "right") + topics['stereo_right_images'], topics['stereo_right_compressed'] = add_camera_feeds(nh, "stereo/right") + topics['stereo_left_images'], topics['stereo_left_compressed'] = add_camera_feeds(nh, "stereo/left") from sensor_msgs.msg import PointCloud2 - topics['velodyne'] = nh.subscribe('/velodyne/points', PointCloud2).get_next_message() + topics['velodyne'] = nh.subscribe('/velodyne_points', PointCloud2).get_next_message() # Thrusters from roboteq_msgs.msg import Feedback From 4ee4c768b85d71d1f62ac151073061185ea8534f Mon Sep 17 00:00:00 2001 From: mattlangford Date: Sat, 10 Sep 2016 23:58:27 -0400 Subject: [PATCH 089/267] MISSION: Update start gate mission to basic thresholing --- utils/navigator_tools/navigator_tools/image_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/navigator_tools/navigator_tools/image_helpers.py b/utils/navigator_tools/navigator_tools/image_helpers.py index b15d1972..779c2c34 100644 --- a/utils/navigator_tools/navigator_tools/image_helpers.py +++ b/utils/navigator_tools/navigator_tools/image_helpers.py @@ -9,7 +9,7 @@ from os import path from cv_bridge import CvBridge, CvBridgeError from sensor_msgs.msg import Image, CameraInfo -from sub8_ros_tools.init_helpers import wait_for_param +from navigator_tools.init_helpers import wait_for_param def get_parameter_range(parameter_root): From c366e4ec0373a4793651fc0923136e203ef4b895 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Sun, 25 Sep 2016 23:52:31 -0400 Subject: [PATCH 090/267] MISSIONS: Update vision proxy, add internal publishers, add lla bounds --- utils/navigator_tools/navigator_tools/image_helpers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/utils/navigator_tools/navigator_tools/image_helpers.py b/utils/navigator_tools/navigator_tools/image_helpers.py index 779c2c34..bec0c604 100644 --- a/utils/navigator_tools/navigator_tools/image_helpers.py +++ b/utils/navigator_tools/navigator_tools/image_helpers.py @@ -89,8 +89,9 @@ def wait_for_camera_info(self, timeout=10): rospy.logwarn("Blocking -- waiting at most %d seconds for camera info." % timeout) timeout = rospy.Duration(timeout) - start_time = rospy.Time.now() rospy.sleep(.1) # Make sure we don't have negative time + start_time = rospy.Time.now() + while (rospy.Time.now() - start_time < timeout) and (not rospy.is_shutdown()): if self.camera_info is not None: rospy.loginfo("Camera info found!") From 1eed8398d914331a4ce12e5b6de7833a75b142a2 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Mon, 26 Sep 2016 15:14:39 -0400 Subject: [PATCH 091/267] UTILS: Add conversion server for points in lla, ecef, and enu --- .../nodes/coordinate_conversion_server.py | 128 ++++++++++++++++++ 1 file changed, 128 insertions(+) create mode 100755 utils/navigator_tools/nodes/coordinate_conversion_server.py diff --git a/utils/navigator_tools/nodes/coordinate_conversion_server.py b/utils/navigator_tools/nodes/coordinate_conversion_server.py new file mode 100755 index 00000000..11804c0a --- /dev/null +++ b/utils/navigator_tools/nodes/coordinate_conversion_server.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python +from __future__ import division + +import txros +import numpy as np +import navigator_tools +from twisted.internet import defer +from rawgps_common.gps import ecef_from_latlongheight, enu_from_ecef_tf, latlongheight_from_ecef + +from nav_msgs.msg import Odometry +from navigator_msgs.srv import CoordinateConversion, CoordinateConversionRequest, CoordinateConversionResponse + +class Converter(object): + @txros.util.cancellableInlineCallbacks + def init(self): + self.nh = yield txros.NodeHandle.from_argv("coordinate_conversion_server") + self.odom_sub = self.nh.subscribe('odom', Odometry) + self.abs_odom_sub = self.nh.subscribe('absodom', Odometry) + + self.nh.advertise_service('/convert', CoordinateConversion, self.got_request) + + def __getattr__(self, attr): + print "Frame '{}' not found!".format(attr) + return self.not_found + + def not_found(self): + return [[np.nan, np.nan, np.nan]] * 3 + + @txros.util.cancellableInlineCallbacks + def got_request(self, srv): + self.point = np.array(srv.point) + + # self.enu_pos = navigator_tools.odometry_to_numpy((yield self.odom_sub.get_next_message()))[0][0] + # self.ecef_pos = navigator_tools.odometry_to_numpy((yield self.abs_odom_sub.get_next_message()))[0][0] + + yield + self.enu_pos = np.array([0, 0, 0]) + self.ecef_pos = np.array([743864, -5503829, 3125589]) + + enu, ecef, lla = getattr(self, srv.frame)() + + distance = np.linalg.norm(enu - self.enu_pos) + defer.returnValue(CoordinateConversionResponse(enu=enu, ecef=ecef, lla=lla, distance=distance)) + + def lla(self): + lla = self.point + + # to ecef + ecef = ecef_from_latlongheight(*np.radians(lla)) + + # to enu + ecef_vector = ecef - self.ecef_pos + enu_vector = enu_from_ecef_tf(self.ecef_pos).dot(ecef_vector) + enu = enu_vector + self.enu_pos + + return enu, ecef, lla + + def ecef(self): + ecef = self.point + + # to lla + lla = np.degrees(latlongheight_from_ecef(ecef)) + + # to enu + ecef_vector = ecef - self.ecef_pos + enu_vector = enu_from_ecef_tf(self.ecef_pos).dot(ecef_vector) + enu = enu_vector + self.enu_pos + + return enu, ecef, lla + + def enu(self): + enu = self.point + + # to ecef + enu_vector = enu - self.enu_pos + ecef_vector = enu_from_ecef_tf(self.ecef_pos).T.dot(enu_vector) + ecef = ecef_vector + self.ecef_pos + + # to lla + lla = np.degrees(latlongheight_from_ecef(ecef)) + + return enu, ecef, lla + + +@txros.util.cancellableInlineCallbacks +def main(): + c = Converter() + yield c.init() + + yield defer.Deferred() # never exit + +txros.util.launch_main(main) + + + + +# if srv.frame.lower() == "lla": +# lla = point + +# # to ecef +# ecef = ecef_from_latlongheight(*np.radians(lla)) + +# # to enu +# ecef_vector = ecef - ecef_pos +# enu_vector = enu_from_ecef_tf(ecef_pos).dot(ecef_vector) +# enu = enu_vector + enu_pos + +# elif srv.frame.lower() == "ecef": +# ecef = point + +# # to lla +# lla = np.degrees(latlongheight_from_ecef(point)) + +# # to enu +# ecef_vector = ecef - ecef_pos +# enu_vector = enu_from_ecef_tf(ecef_pos).dot(ecef_vector) +# enu = enu_vector + enu_pos + +# elif srv.frame.lower() == "enu": +# enu = point + +# # to ecef +# enu_vector = enu - enu_pos +# ecef_vector = enu_from_ecef_tf(ecef_pos).T.dot(enu_vector) +# ecef = ecef_vector + ecef_pos + +# # to lla +# lla = np.degrees(latlongheight_from_ecef(ecef)) From 7c49902bedd9e6df33db66eb18f565e0e1051ed5 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 30 Sep 2016 13:20:33 -0400 Subject: [PATCH 092/267] COMMAND: Integrate bounds param into mission runner --- .../navigator_tools/nodes/coordinate_conversion_server.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/utils/navigator_tools/nodes/coordinate_conversion_server.py b/utils/navigator_tools/nodes/coordinate_conversion_server.py index 11804c0a..2ba0ae66 100755 --- a/utils/navigator_tools/nodes/coordinate_conversion_server.py +++ b/utils/navigator_tools/nodes/coordinate_conversion_server.py @@ -30,12 +30,8 @@ def not_found(self): def got_request(self, srv): self.point = np.array(srv.point) - # self.enu_pos = navigator_tools.odometry_to_numpy((yield self.odom_sub.get_next_message()))[0][0] - # self.ecef_pos = navigator_tools.odometry_to_numpy((yield self.abs_odom_sub.get_next_message()))[0][0] - - yield - self.enu_pos = np.array([0, 0, 0]) - self.ecef_pos = np.array([743864, -5503829, 3125589]) + self.enu_pos = navigator_tools.odometry_to_numpy((yield self.odom_sub.get_next_message()))[0][0] + self.ecef_pos = navigator_tools.odometry_to_numpy((yield self.abs_odom_sub.get_next_message()))[0][0] enu, ecef, lla = getattr(self, srv.frame)() From 1b8d156f448115e16788b31ed3f211ac6d10d5df Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 30 Sep 2016 13:22:32 -0400 Subject: [PATCH 093/267] TOOLS: Add tool to fudge tf values on the fly --- utils/navigator_tools/nodes/tf_fudger.py | 48 ++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100755 utils/navigator_tools/nodes/tf_fudger.py diff --git a/utils/navigator_tools/nodes/tf_fudger.py b/utils/navigator_tools/nodes/tf_fudger.py new file mode 100755 index 00000000..12192b6a --- /dev/null +++ b/utils/navigator_tools/nodes/tf_fudger.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python +from __future__ import division + +import rospy +import tf +import cv2 +import argparse +import sys +import numpy as np + +rospy.init_node("tf_fudger", anonymous=True) +br = tf.TransformBroadcaster() + +usage_msg = "Useful to test transforms between two frames." +desc_msg = "Pass the name of the parent frame and the child name as well as a an optional multiplier." + +parser = argparse.ArgumentParser(usage=usage_msg, description=desc_msg) +parser.add_argument(dest='tf_child', + help="Child tf frame.") +parser.add_argument(dest='tf_parent', + help="Parent tf frame.") +parser.add_argument('--mult', action='store', type=float, default=1, + help="A factor with which to muliply linear components of the transformations") + +args = parser.parse_args(sys.argv[1:]) + +cv2.namedWindow('tf', cv2.WINDOW_NORMAL) + +cv2.createTrackbar("x", 'tf', 0, 100, lambda x: x) +cv2.createTrackbar("y", 'tf', 0, 100, lambda x: x) +cv2.createTrackbar("z", 'tf', 0, 100, lambda x: x) + +cv2.createTrackbar("roll", 'tf', 0, 628, lambda x: x) +cv2.createTrackbar("pitch", 'tf', 0, 628, lambda x: x) +cv2.createTrackbar("yaw", 'tf', 0, 628, lambda x: x) + +while not rospy.is_shutdown(): + x, y, z = cv2.getTrackbarPos("x", "tf") / 100 - .5, cv2.getTrackbarPos("y", "tf") / 100 - .5, cv2.getTrackbarPos("z", "tf") / 100 - .5 + p = (x * args.mult, y * args.mult, z * args.mult) + rpy = (cv2.getTrackbarPos("roll", "tf") / 100 - np.pi, cv2.getTrackbarPos("pitch", "tf") / 100 - np.pi, cv2.getTrackbarPos("yaw", "tf") / 100 - np.pi) + q = tf.transformations.quaternion_from_euler(*rpy) + + print "xyz:", [round(x, 5) for x in p], " rpy:", [round(np.degrees(x), 5) for x in rpy] + br.sendTransform(p, q, rospy.Time.now(), args.tf_child, args.tf_parent) + + cv2.waitKey(10) + +cv2.destroyAllWindows() \ No newline at end of file From 04b469df98573d365527f338265bc326a34d8081 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Sat, 1 Oct 2016 15:07:47 -0400 Subject: [PATCH 094/267] COMMAND: Update bounds system, integrate that system into pose editor --- utils/navigator_tools/nodes/bounds.py | 38 ++++++++++++++++ .../nodes/coordinate_conversion_server.py | 44 ++----------------- 2 files changed, 42 insertions(+), 40 deletions(-) create mode 100755 utils/navigator_tools/nodes/bounds.py diff --git a/utils/navigator_tools/nodes/bounds.py b/utils/navigator_tools/nodes/bounds.py new file mode 100755 index 00000000..5f3ab1c5 --- /dev/null +++ b/utils/navigator_tools/nodes/bounds.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +import numpy as np +import txros +import navigator_tools +from twisted.internet import defer +from navigator_msgs.srv import Bounds, BoundsResponse, \ + CoordinateConversion, CoordinateConversionRequest + + +@txros.util.cancellableInlineCallbacks +def got_request(nh, req, convert): + to_frame = "enu" if req.to_frame == '' else req.to_frame + + resp = BoundsResponse(enforce=False) + if (yield nh.has_param("/bounds/enforce")) and (yield nh.get_param("/bounds/enforce")): + # We want to enforce the bounds that were set + lla_bounds = yield nh.get_param("/bounds/lla") + bounds = [] + for lla_bound in lla_bounds: + bound = yield convert(CoordinateConversionRequest(frame="lla", point=np.append(lla_bound, 0))) + bounds.append(navigator_tools.numpy_to_point(getattr(bound, to_frame))) + + resp = BoundsResponse(enforce=True, bounds=bounds) + + defer.returnValue(resp) + + +@txros.util.cancellableInlineCallbacks +def main(): + nh = yield txros.NodeHandle.from_argv("bounds_server") + + convert = nh.get_service_client("/convert", CoordinateConversion) + nh.advertise_service('/get_bounds', Bounds, lambda req: got_request(nh, req, convert)) + + yield defer.Deferred() # never exit + + +txros.util.launch_main(main) \ No newline at end of file diff --git a/utils/navigator_tools/nodes/coordinate_conversion_server.py b/utils/navigator_tools/nodes/coordinate_conversion_server.py index 2ba0ae66..7cfccb96 100755 --- a/utils/navigator_tools/nodes/coordinate_conversion_server.py +++ b/utils/navigator_tools/nodes/coordinate_conversion_server.py @@ -8,7 +8,7 @@ from rawgps_common.gps import ecef_from_latlongheight, enu_from_ecef_tf, latlongheight_from_ecef from nav_msgs.msg import Odometry -from navigator_msgs.srv import CoordinateConversion, CoordinateConversionRequest, CoordinateConversionResponse +from navigator_msgs.srv import CoordinateConversion, CoordinateConversionResponse class Converter(object): @txros.util.cancellableInlineCallbacks @@ -30,8 +30,8 @@ def not_found(self): def got_request(self, srv): self.point = np.array(srv.point) - self.enu_pos = navigator_tools.odometry_to_numpy((yield self.odom_sub.get_next_message()))[0][0] - self.ecef_pos = navigator_tools.odometry_to_numpy((yield self.abs_odom_sub.get_next_message()))[0][0] + self.enu_pos = navigator_tools.odometry_to_numpy((yield self.odom_sub.get_last_message()))[0][0] + self.ecef_pos = navigator_tools.odometry_to_numpy((yield self.abs_odom_sub.get_last_message()))[0][0] enu, ecef, lla = getattr(self, srv.frame)() @@ -85,40 +85,4 @@ def main(): yield defer.Deferred() # never exit -txros.util.launch_main(main) - - - - -# if srv.frame.lower() == "lla": -# lla = point - -# # to ecef -# ecef = ecef_from_latlongheight(*np.radians(lla)) - -# # to enu -# ecef_vector = ecef - ecef_pos -# enu_vector = enu_from_ecef_tf(ecef_pos).dot(ecef_vector) -# enu = enu_vector + enu_pos - -# elif srv.frame.lower() == "ecef": -# ecef = point - -# # to lla -# lla = np.degrees(latlongheight_from_ecef(point)) - -# # to enu -# ecef_vector = ecef - ecef_pos -# enu_vector = enu_from_ecef_tf(ecef_pos).dot(ecef_vector) -# enu = enu_vector + enu_pos - -# elif srv.frame.lower() == "enu": -# enu = point - -# # to ecef -# enu_vector = enu - enu_pos -# ecef_vector = enu_from_ecef_tf(ecef_pos).T.dot(enu_vector) -# ecef = ecef_vector + ecef_pos - -# # to lla -# lla = np.degrees(latlongheight_from_ecef(ecef)) +txros.util.launch_main(main) \ No newline at end of file From 9bdb72045218f671c596d947dc27a80dd8412177 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 7 Oct 2016 22:57:12 -0400 Subject: [PATCH 095/267] TOOLS: Add print function with formatted time --- utils/navigator_tools/navigator_tools/__init__.py | 2 ++ utils/navigator_tools/navigator_tools/general_helpers.py | 8 ++++++++ 2 files changed, 10 insertions(+) create mode 100644 utils/navigator_tools/navigator_tools/general_helpers.py diff --git a/utils/navigator_tools/navigator_tools/__init__.py b/utils/navigator_tools/navigator_tools/__init__.py index 74113d95..8b91231e 100644 --- a/utils/navigator_tools/navigator_tools/__init__.py +++ b/utils/navigator_tools/navigator_tools/__init__.py @@ -4,6 +4,7 @@ import threading_helpers import geometry_helpers import rviz_helpers +import general_helpers from init_helpers import * from image_helpers import * @@ -11,3 +12,4 @@ from threading_helpers import * from geometry_helpers import * from rviz_helpers import * +from general_helpers import * \ No newline at end of file diff --git a/utils/navigator_tools/navigator_tools/general_helpers.py b/utils/navigator_tools/navigator_tools/general_helpers.py new file mode 100644 index 00000000..d07fab10 --- /dev/null +++ b/utils/navigator_tools/navigator_tools/general_helpers.py @@ -0,0 +1,8 @@ +import rospy + +def print_t(_str, time=None): + # Needs a rospy init for ros time + if time is None: + time = rospy.Time.now().to_sec() + + print "\033[1m{}:\033[0m {}".format(time, _str) \ No newline at end of file From 44915fddc913172e8568093eeeec0324be38a5ff Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 7 Oct 2016 23:24:58 -0400 Subject: [PATCH 096/267] Remove depricated packages. --- .../navigator_tools/geometry_helpers.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/geometry_helpers.py b/utils/navigator_tools/navigator_tools/geometry_helpers.py index 1d1912ed..5f43739e 100644 --- a/utils/navigator_tools/navigator_tools/geometry_helpers.py +++ b/utils/navigator_tools/navigator_tools/geometry_helpers.py @@ -1,6 +1,7 @@ from __future__ import division import numpy as np -from tf.transformations import quaternion_from_euler, euler_from_quaternion +from tf.transformations import quaternion_from_euler, euler_from_quaternion, random_quaternion +from msg_helpers import numpy_quat_pair_to_pose from geometry_msgs.msg import Quaternion ''' @@ -19,10 +20,9 @@ def normalize(vector): def compose_transformation(R, t): '''Compose a transformation from a rotation matrix and a translation matrix''' - transformation = np.zeros((4, 4)) + transformation = np.eye(4) transformation[:3, :3] = R transformation[3, :3] = t - transformation[3, 3] = 1.0 return transformation def quat_to_euler(q): @@ -39,4 +39,10 @@ def euler_to_quat(rotvec): quat = quaternion_from_euler(rotvec[0], rotvec[1], rotvec[2]) return Quaternion(quat[0], quat[1], quat[2], quat[3]) - +def random_pose(_min, _max): + ''' Gives a random pose in the xyz range `_min` to `_max` ''' + pos = np.random.uniform(low=_min, high=_max, size=3) + quat = random_quaternion() + return numpy_quat_pair_to_pose(pos, quat) + + From 4d3a41d2dbd9054fb286501bd6924fb5adcf8e90 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Mon, 10 Oct 2016 01:05:04 -0400 Subject: [PATCH 097/267] UTILS: Add rviz waypoint move commands --- utils/navigator_tools/nodes/rviz_waypoint.py | 35 ++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100755 utils/navigator_tools/nodes/rviz_waypoint.py diff --git a/utils/navigator_tools/nodes/rviz_waypoint.py b/utils/navigator_tools/nodes/rviz_waypoint.py new file mode 100755 index 00000000..37b67376 --- /dev/null +++ b/utils/navigator_tools/nodes/rviz_waypoint.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +from __future__ import division + +import txros +import numpy as np +import navigator_tools +from twisted.internet import defer +from geometry_msgs.msg import PoseStamped + +class RvizRepublisher(object): + @txros.util.cancellableInlineCallbacks + def init(self): + self.nh = yield txros.NodeHandle.from_argv("rviz_republisher") + self.rviz_republish = self.nh.advertise("/rviz_goal", PoseStamped) + self.rviz_goal = self.nh.subscribe("/move_base_simple/goal", PoseStamped) + + yield self.rviz_goal.get_next_message() + + while True: + yield self.nh.sleep(.1) + yield self.do() + + @txros.util.cancellableInlineCallbacks + def do(self): + last = yield self.rviz_goal.get_last_message() + self.rviz_republish.publish(last) + +@txros.util.cancellableInlineCallbacks +def main(): + rr = RvizRepublisher() + yield rr.init() + + yield defer.Deferred() # never exit + +txros.util.launch_main(main) \ No newline at end of file From 1390494ca976a170fb38321c9dd24e8a84bc6284 Mon Sep 17 00:00:00 2001 From: David Date: Thu, 6 Oct 2016 03:53:56 -0400 Subject: [PATCH 098/267] TF: improve tf_fudger tool interface and code structure --- utils/navigator_tools/nodes/tf_fudger.py | 59 ++++++++++++++++-------- 1 file changed, 40 insertions(+), 19 deletions(-) diff --git a/utils/navigator_tools/nodes/tf_fudger.py b/utils/navigator_tools/nodes/tf_fudger.py index 12192b6a..cbd0074b 100755 --- a/utils/navigator_tools/nodes/tf_fudger.py +++ b/utils/navigator_tools/nodes/tf_fudger.py @@ -15,34 +15,55 @@ desc_msg = "Pass the name of the parent frame and the child name as well as a an optional multiplier." parser = argparse.ArgumentParser(usage=usage_msg, description=desc_msg) -parser.add_argument(dest='tf_child', - help="Child tf frame.") parser.add_argument(dest='tf_parent', help="Parent tf frame.") -parser.add_argument('--mult', action='store', type=float, default=1, - help="A factor with which to muliply linear components of the transformations") +parser.add_argument(dest='tf_child', + help="Child tf frame.") +parser.add_argument('--range', action='store', type=float, default=3, + help="Maximum range allowed for linear displacement (in meters)") args = parser.parse_args(sys.argv[1:]) cv2.namedWindow('tf', cv2.WINDOW_NORMAL) -cv2.createTrackbar("x", 'tf', 0, 100, lambda x: x) -cv2.createTrackbar("y", 'tf', 0, 100, lambda x: x) -cv2.createTrackbar("z", 'tf', 0, 100, lambda x: x) +# slider max values +x_max = 1000 +ang_max = 1000 + +# tf ranges (symmetric about zero) +x_range = args.range # meters +ang_range = 2 * np.pi # radians + +# tf fudging resolution +x_res = x_range / x_max +ang_res = ang_range / ang_max + +print "Distance resolution: {} meters\nAngular resolution: {} radians".format(x_res, ang_res) + +cv2.createTrackbar("x", 'tf', 0, x_max, lambda x: x) +cv2.createTrackbar("y", 'tf', 0, x_max, lambda x: x) +cv2.createTrackbar("z", 'tf', 0, x_max, lambda x: x) + +cv2.createTrackbar("roll", 'tf', 0, ang_max, lambda x: x) +cv2.createTrackbar("pitch", 'tf', 0, ang_max, lambda x: x) +cv2.createTrackbar("yaw", 'tf', 0, ang_max, lambda x: x) -cv2.createTrackbar("roll", 'tf', 0, 628, lambda x: x) -cv2.createTrackbar("pitch", 'tf', 0, 628, lambda x: x) -cv2.createTrackbar("yaw", 'tf', 0, 628, lambda x: x) +toTfLin = lambda x: x * x_res - 0.5 * x_range +toTfAng = lambda x: x * ang_res - 0.5 * ang_range -while not rospy.is_shutdown(): - x, y, z = cv2.getTrackbarPos("x", "tf") / 100 - .5, cv2.getTrackbarPos("y", "tf") / 100 - .5, cv2.getTrackbarPos("z", "tf") / 100 - .5 - p = (x * args.mult, y * args.mult, z * args.mult) - rpy = (cv2.getTrackbarPos("roll", "tf") / 100 - np.pi, cv2.getTrackbarPos("pitch", "tf") / 100 - np.pi, cv2.getTrackbarPos("yaw", "tf") / 100 - np.pi) - q = tf.transformations.quaternion_from_euler(*rpy) +p = q = None - print "xyz:", [round(x, 5) for x in p], " rpy:", [round(np.degrees(x), 5) for x in rpy] - br.sendTransform(p, q, rospy.Time.now(), args.tf_child, args.tf_parent) +try: + while not rospy.is_shutdown(): + x, y, z = toTfLin(cv2.getTrackbarPos("x", "tf")), toTfLin(cv2.getTrackbarPos("y", "tf")), toTfLin(cv2.getTrackbarPos("z", "tf")) + p = (x, y, z) + rpy = (toTfAng(cv2.getTrackbarPos("roll", "tf")), toTfAng(cv2.getTrackbarPos("pitch", "tf")), toTfAng(cv2.getTrackbarPos("yaw", "tf"))) + q = tf.transformations.quaternion_from_euler(*rpy) - cv2.waitKey(10) + print "xyz:", [round(x, 5) for x in p], " rpy:", [round(np.degrees(x), 5) for x in rpy] + br.sendTransform(p, q, rospy.Time.now(), args.tf_child, args.tf_parent) -cv2.destroyAllWindows() \ No newline at end of file + cv2.waitKey(10) +except KeyboardInterrupt: + print "Final TF:", p, q + cv2.destroyAllWindows() \ No newline at end of file From 0683209167864ce8286337f6262f9ba12f82fd0f Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 14 Oct 2016 02:00:18 -0400 Subject: [PATCH 099/267] TOOLS: Add ability to switch from displaying quaternion and euler angles --- utils/navigator_tools/nodes/tf_fudger.py | 29 +++++++++++++----------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/utils/navigator_tools/nodes/tf_fudger.py b/utils/navigator_tools/nodes/tf_fudger.py index cbd0074b..5547d9bb 100755 --- a/utils/navigator_tools/nodes/tf_fudger.py +++ b/utils/navigator_tools/nodes/tf_fudger.py @@ -12,7 +12,8 @@ br = tf.TransformBroadcaster() usage_msg = "Useful to test transforms between two frames." -desc_msg = "Pass the name of the parent frame and the child name as well as a an optional multiplier." +desc_msg = "Pass the name of the parent frame and the child name as well as a an optional multiplier. \n\ +At anypoint you can press q to switch between displaying a quaternion and an euler angle." parser = argparse.ArgumentParser(usage=usage_msg, description=desc_msg) parser.add_argument(dest='tf_parent', @@ -52,18 +53,20 @@ toTfAng = lambda x: x * ang_res - 0.5 * ang_range p = q = None +q_mode = False -try: - while not rospy.is_shutdown(): - x, y, z = toTfLin(cv2.getTrackbarPos("x", "tf")), toTfLin(cv2.getTrackbarPos("y", "tf")), toTfLin(cv2.getTrackbarPos("z", "tf")) - p = (x, y, z) - rpy = (toTfAng(cv2.getTrackbarPos("roll", "tf")), toTfAng(cv2.getTrackbarPos("pitch", "tf")), toTfAng(cv2.getTrackbarPos("yaw", "tf"))) - q = tf.transformations.quaternion_from_euler(*rpy) +while not rospy.is_shutdown(): + x, y, z = toTfLin(cv2.getTrackbarPos("x", "tf")), toTfLin(cv2.getTrackbarPos("y", "tf")), toTfLin(cv2.getTrackbarPos("z", "tf")) + p = (x, y, z) + rpy = (toTfAng(cv2.getTrackbarPos("roll", "tf")), toTfAng(cv2.getTrackbarPos("pitch", "tf")), toTfAng(cv2.getTrackbarPos("yaw", "tf"))) + q = tf.transformations.quaternion_from_euler(*rpy) - print "xyz:", [round(x, 5) for x in p], " rpy:", [round(np.degrees(x), 5) for x in rpy] - br.sendTransform(p, q, rospy.Time.now(), args.tf_child, args.tf_parent) + rpy_feedback = "xyz: {}, euler: {}".format([round(x, 5) for x in p], [round(np.degrees(x), 5) for x in rpy]) + q_feedback = "xyz: {}, q: {}".format([round(x, 5) for x in p], [round(x, 5) for x in q]) + print q_feedback if q_mode else rpy_feedback - cv2.waitKey(10) -except KeyboardInterrupt: - print "Final TF:", p, q - cv2.destroyAllWindows() \ No newline at end of file + k = cv2.waitKey(10) & 0xFF + if k == ord('q'): + q_mode = not q_mode + +print "\nFinal TF:", p, q \ No newline at end of file From e214ff037ce9662209be23a63b634436e288874a Mon Sep 17 00:00:00 2001 From: mattlangford Date: Sun, 16 Oct 2016 03:33:44 -0400 Subject: [PATCH 100/267] TF: Fix the fixed TF for front stereo --- utils/navigator_tools/nodes/tf_fudger.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/utils/navigator_tools/nodes/tf_fudger.py b/utils/navigator_tools/nodes/tf_fudger.py index 5547d9bb..d87ebc1a 100755 --- a/utils/navigator_tools/nodes/tf_fudger.py +++ b/utils/navigator_tools/nodes/tf_fudger.py @@ -69,4 +69,6 @@ if k == ord('q'): q_mode = not q_mode + br.sendTransform(p, q, rospy.Time.now(), args.tf_child, args.tf_parent) + print "\nFinal TF:", p, q \ No newline at end of file From 4b93de9d8ebe1bf1ee35d1a3925d754c542f6004 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Thu, 20 Oct 2016 22:22:56 -0400 Subject: [PATCH 101/267] PR: Fix spelling mistakes, remove debug prints, make TF_fudger save to tf.launch --- utils/navigator_tools/nodes/tf_fudger.py | 51 +++++++++++++++++++++++- 1 file changed, 49 insertions(+), 2 deletions(-) diff --git a/utils/navigator_tools/nodes/tf_fudger.py b/utils/navigator_tools/nodes/tf_fudger.py index d87ebc1a..53b2dc61 100755 --- a/utils/navigator_tools/nodes/tf_fudger.py +++ b/utils/navigator_tools/nodes/tf_fudger.py @@ -13,7 +13,8 @@ usage_msg = "Useful to test transforms between two frames." desc_msg = "Pass the name of the parent frame and the child name as well as a an optional multiplier. \n\ -At anypoint you can press q to switch between displaying a quaternion and an euler angle." +At anypoint you can press q to switch between displaying a quaternion and an euler angle. \n\ +Press 's' to exit and REPLACE or ADD the line in the tf launch file. Use control-c if you don't want to save." parser = argparse.ArgumentParser(usage=usage_msg, description=desc_msg) parser.add_argument(dest='tf_parent', @@ -55,6 +56,10 @@ p = q = None q_mode = False +# Used for printing and saving +tf_line = '\n' +prd = 100 # This will get replaced if it needs to + while not rospy.is_shutdown(): x, y, z = toTfLin(cv2.getTrackbarPos("x", "tf")), toTfLin(cv2.getTrackbarPos("y", "tf")), toTfLin(cv2.getTrackbarPos("z", "tf")) p = (x, y, z) @@ -69,6 +74,48 @@ if k == ord('q'): q_mode = not q_mode + if k == ord('s'): + # Save the transform in navigator_launch/launch/tf.launch replacing the line + import rospkg + rospack = rospkg.RosPack() + launch_path = rospack.get_path('navigator_launch') + launch_path += "/launch/subsystems/tf.launch" + + with open(launch_path, 'r') as f: + lines = f.readlines() + + last_static_pub = 0 + tab_level = '' + np.set_printoptions(suppress=True) + tf_line_to_add = tf_line.format(child=args.tf_child, p=p, q=np.round(q, 5), parent=args.tf_parent, prd="{prd}") + for i,line in enumerate(lines): + if args.tf_child in line and args.tf_parent in line: + tab_level = line[:line.find("<")] # The labs infront of the tf line + prd = int([x for x in line.split('"')[-2].split(" ") if x != ''][-1]) # Get the period from the tf line + lines[i] = tab_level + tf_line_to_add.format(prd=prd) + + print "Found line in tf.launch!" + print "Replacing: {}".format(line.replace(tab_level, '')[:-1]) + print " With: {}".format(lines[i].replace(tab_level, '')[:-1]) + break + + elif 'pkg="tf"' in line: + # Incase we don't find the tf line of interest to change, we want to insert the new tf line after the last tf line + tab_level = line[:line.find("<")] # The labs infront of the tf line + last_static_pub = i + + else: + print "Tf link not found between /{parent} /{child} in tf.launch.".format(parent=args.tf_parent, child=args.tf_child) + lines.insert(last_static_pub + 1, '\n' + tab_level + tf_line_to_add.format(prd=prd)) + print "Adding: {}".format(lines[last_static_pub + 1].replace(tab_level, '')[:-1]) + + with open(launch_path, 'w') as f: + for line in lines: + f.write(line) + + break + br.sendTransform(p, q, rospy.Time.now(), args.tf_child, args.tf_parent) -print "\nFinal TF:", p, q \ No newline at end of file +# Print out the tf static transform line with the fudged tf +print '\n',tf_line.format(child=args.tf_child, p=p, q=np.round(q, 5), parent=args.tf_parent, prd=prd) \ No newline at end of file From ae8309efe19e0e69bdcd420b903a72021af6ffd5 Mon Sep 17 00:00:00 2001 From: David Date: Mon, 24 Oct 2016 19:46:46 -0400 Subject: [PATCH 102/267] TOOLS: make print_t() handle printing without rospy initialization --- .../navigator_tools/navigator_tools/general_helpers.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/general_helpers.py b/utils/navigator_tools/navigator_tools/general_helpers.py index d07fab10..792f34c9 100644 --- a/utils/navigator_tools/navigator_tools/general_helpers.py +++ b/utils/navigator_tools/navigator_tools/general_helpers.py @@ -3,6 +3,10 @@ def print_t(_str, time=None): # Needs a rospy init for ros time if time is None: - time = rospy.Time.now().to_sec() - - print "\033[1m{}:\033[0m {}".format(time, _str) \ No newline at end of file + try: + time = rospy.Time.now().to_sec() + print "\033[1m{}:\033[0m {}".format(time, _str) + except rospy.exceptions.ROSInitException: + print _str + else: + print "\033[1m{}:\033[0m {}".format(time, _str) From f552eb9cd700a97df27ebaa36d37495e908bfd4a Mon Sep 17 00:00:00 2001 From: mike Date: Sat, 29 Oct 2016 02:25:25 -0400 Subject: [PATCH 103/267] PERCEPTION: Rename package to navigator_vison also fix somethings outputed by DDB -Wall --- perception/navigator_vision/CMakeLists.txt | 205 + perception/navigator_vision/README.md | 1 + perception/navigator_vision/calibration.xml | 94 + .../exFAST_SparseStereo/GPL.txt | 674 ++ .../exFAST_SparseStereo/README.txt | 38 + .../example-data/calibration.xml | 79 + .../exFAST_SparseStereo/example-data/left.png | Bin 0 -> 160454 bytes .../example-data/right.png | Bin 0 -> 156177 bytes .../src/example/example.cpp | 144 + .../src/sparsestereo/CMakeLists.txt | 26 + .../src/sparsestereo/calibrationresult.cpp | 55 + .../src/sparsestereo/calibrationresult.h | 28 + .../src/sparsestereo/census-inl.h | 59 + .../src/sparsestereo/census.cpp | 109 + .../src/sparsestereo/census.h | 30 + .../src/sparsestereo/censuswindow.h | 237 + .../src/sparsestereo/exception.h | 29 + .../src/sparsestereo/extendedfast-inl.h | 47 + .../src/sparsestereo/extendedfast.cpp | 232 + .../src/sparsestereo/extendedfast.h | 77 + .../src/sparsestereo/fast9-inl.h | 6016 +++++++++++++++++ .../src/sparsestereo/fast9.h | 37 + .../src/sparsestereo/hammingdistance.cpp | 24 + .../src/sparsestereo/hammingdistance.h | 80 + .../src/sparsestereo/imageconversion.cpp | 95 + .../src/sparsestereo/imageconversion.h | 37 + .../src/sparsestereo/simd.cpp | 29 + .../src/sparsestereo/simd.h | 150 + .../src/sparsestereo/sparsematch.h | 54 + .../src/sparsestereo/sparserectification.cpp | 111 + .../src/sparsestereo/sparserectification.h | 49 + .../src/sparsestereo/sparsestereo-inl.h | 225 + .../src/sparsestereo/sparsestereo.h | 57 + .../src/sparsestereo/stereorectification.cpp | 121 + .../src/sparsestereo/stereorectification.h | 163 + .../navigator_vision_lib/camera_frame.hpp | 223 + .../camera_frame_sequence.hpp | 118 + .../navigator_vision_lib/camera_model.hpp | 62 + .../include/navigator_vision_lib/cv_tools.hpp | 177 + .../point_cloud_algorithms.hpp | 70 + .../ros_camera_stream.hpp | 267 + .../navigator_vision_lib/visualization.hpp | 24 + .../nodes/camera_stream_demo.cpp | 39 + .../CameraLidarTransformer.cpp | 199 + .../CameraLidarTransformer.hpp | 69 + .../nodes/camera_to_lidar/main.cpp | 9 + .../navigator_vision/nodes/model_detector.py | 314 + .../nodes/scan_the_code_lib/__init__.py | 0 .../nodes/scan_the_code_lib/model.py | 100 + .../nodes/scan_the_code_lib/model_tracker.py | 121 + .../shape_identification/DockShapeVision.cpp | 2 + .../shape_identification/DockShapeVision.h | 14 + .../GrayscaleContour/GrayscaleContour.cpp | 354 + .../GrayscaleContour/GrayscaleContour.h | 82 + .../shape_identification/ShapeTracker.cpp | 76 + .../nodes/shape_identification/ShapeTracker.h | 24 + .../shape_identification/TrackedShape.cpp | 81 + .../nodes/shape_identification/TrackedShape.h | 27 + .../nodes/shape_identification/main.cpp | 115 + .../nodes/start_gate_manual.py | 308 + .../nodes/stereo_point_cloud_driver.cpp | 502 ++ perception/navigator_vision/package.xml | 35 + .../ros_tools/on_the_fly_thresholder.py | 117 + perception/navigator_vision/setup.py | 11 + .../src/navigator_vision_lib/cv_utils.cc | 656 ++ .../point_cloud_algorithms.cc | 142 + .../src/navigator_vision_lib/visualization.cc | 151 + 67 files changed, 13901 insertions(+) create mode 100644 perception/navigator_vision/CMakeLists.txt create mode 100644 perception/navigator_vision/README.md create mode 100644 perception/navigator_vision/calibration.xml create mode 100644 perception/navigator_vision/exFAST_SparseStereo/GPL.txt create mode 100644 perception/navigator_vision/exFAST_SparseStereo/README.txt create mode 100644 perception/navigator_vision/exFAST_SparseStereo/example-data/calibration.xml create mode 100644 perception/navigator_vision/exFAST_SparseStereo/example-data/left.png create mode 100644 perception/navigator_vision/exFAST_SparseStereo/example-data/right.png create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/example/example.cpp create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census-inl.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.cpp create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/censuswindow.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/exception.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.cpp create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsematch.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp create mode 100644 perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.h create mode 100644 perception/navigator_vision/include/navigator_vision_lib/camera_frame.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/camera_frame_sequence.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/camera_model.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/cv_tools.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/ros_camera_stream.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/visualization.hpp create mode 100644 perception/navigator_vision/nodes/camera_stream_demo.cpp create mode 100644 perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp create mode 100644 perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp create mode 100644 perception/navigator_vision/nodes/camera_to_lidar/main.cpp create mode 100755 perception/navigator_vision/nodes/model_detector.py create mode 100644 perception/navigator_vision/nodes/scan_the_code_lib/__init__.py create mode 100644 perception/navigator_vision/nodes/scan_the_code_lib/model.py create mode 100644 perception/navigator_vision/nodes/scan_the_code_lib/model_tracker.py create mode 100644 perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp create mode 100644 perception/navigator_vision/nodes/shape_identification/DockShapeVision.h create mode 100644 perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp create mode 100644 perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h create mode 100644 perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp create mode 100644 perception/navigator_vision/nodes/shape_identification/ShapeTracker.h create mode 100644 perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp create mode 100644 perception/navigator_vision/nodes/shape_identification/TrackedShape.h create mode 100644 perception/navigator_vision/nodes/shape_identification/main.cpp create mode 100755 perception/navigator_vision/nodes/start_gate_manual.py create mode 100644 perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp create mode 100644 perception/navigator_vision/package.xml create mode 100755 perception/navigator_vision/ros_tools/on_the_fly_thresholder.py create mode 100644 perception/navigator_vision/setup.py create mode 100644 perception/navigator_vision/src/navigator_vision_lib/cv_utils.cc create mode 100644 perception/navigator_vision/src/navigator_vision_lib/point_cloud_algorithms.cc create mode 100644 perception/navigator_vision/src/navigator_vision_lib/visualization.cc diff --git a/perception/navigator_vision/CMakeLists.txt b/perception/navigator_vision/CMakeLists.txt new file mode 100644 index 00000000..220eb2cc --- /dev/null +++ b/perception/navigator_vision/CMakeLists.txt @@ -0,0 +1,205 @@ +cmake_minimum_required(VERSION 2.8.3) +project(navigator_vision) + +# SET(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake;${CMAKE_MODULE_PATH}") +# set c++11 as default, overide with set_target_properties() +# if needed for specific nodes (cough..cough... PCL) +SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fopenmp -g -pedantic -Wall -std=c++11 ") + +find_package(catkin + REQUIRED COMPONENTS + roscpp + rospy + eigen_conversions + rostime + image_transport + image_geometry + cv_bridge + message_generation + navigator_msgs + std_msgs + std_srvs + geometry_msgs + sensor_msgs + tf + pcl_ros + tf2_sensor_msgs + tf2_geometry_msgs +) + +find_package(PCL 1.7 REQUIRED) +find_package(Boost REQUIRED date_time) + +catkin_python_setup() + +catkin_package( + INCLUDE_DIRS + include + exFAST_SparseStereo/src/sparsestereo + LIBRARIES + navigator_vision_lib + sparsestereo + CATKIN_DEPENDS + roscpp + rospy + message_runtime + std_msgs + geometry_msgs + sensor_msgs + navigator_msgs + DEPENDS + system_lib + image_transport + image_geometry + cv_bridge + navigator_msgs +) + +include_directories( + include + exFAST_SparseStereo/src + SYSTEM + ${PCL_INCLUDE_DIRS} + ${Boost_INCLUDE_DIR} + ${catkin_INCLUDE_DIRS} +) +link_directories(${PCL_LIBRARY_DIRS}) +add_definitions(${PCL_DEFINITIONS}) + +SET(EXFAST_SRC_DIR "exFAST_SparseStereo/src/sparsestereo") +add_library( navigator_sparsestereo SHARED + ${EXFAST_SRC_DIR}/calibrationresult.h + ${EXFAST_SRC_DIR}/calibrationresult.cpp + ${EXFAST_SRC_DIR}/censuswindow.h + ${EXFAST_SRC_DIR}/exception.h + ${EXFAST_SRC_DIR}/extendedfast.cpp + ${EXFAST_SRC_DIR}/extendedfast.h + ${EXFAST_SRC_DIR}/fast9.h + ${EXFAST_SRC_DIR}/fast9-inl.h + ${EXFAST_SRC_DIR}/hammingdistance.cpp + ${EXFAST_SRC_DIR}/hammingdistance.h + ${EXFAST_SRC_DIR}/simd.h + ${EXFAST_SRC_DIR}/simd.cpp + ${EXFAST_SRC_DIR}/sparsematch.h + ${EXFAST_SRC_DIR}/sparserectification.cpp + ${EXFAST_SRC_DIR}/sparserectification.h + ${EXFAST_SRC_DIR}/sparsestereo.h + ${EXFAST_SRC_DIR}/sparsestereo-inl.h + ${EXFAST_SRC_DIR}/stereorectification.cpp + ${EXFAST_SRC_DIR}/stereorectification.h + ${EXFAST_SRC_DIR}/imageconversion.h + ${EXFAST_SRC_DIR}/imageconversion.cpp + ${EXFAST_SRC_DIR}/census.h + ${EXFAST_SRC_DIR}/census-inl.h + ${EXFAST_SRC_DIR}/census.cpp +) + +target_link_libraries( + navigator_sparsestereo + ${catkin_LIBRARIES} + ${Boost_LIBRARIES} + ${OpenCV_INCLUDE_DIRS} +) + +set_target_properties(navigator_sparsestereo PROPERTIES COMPILE_FLAGS "-O3 -DNDEBUG -fopenmp -g -Wall -march=native -msse -msse2 -msse3 -mssse3 -msse4 -ffast-math -mfpmath=sse") + +install(TARGETS navigator_sparsestereo + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +) + +add_library( + navigator_vision_lib + src/navigator_vision_lib/visualization.cc + src/navigator_vision_lib/cv_utils.cc + src/navigator_vision_lib/point_cloud_algorithms.cc + ) + +target_include_directories(navigator_vision_lib PUBLIC include/navigator_vision_lib) + +target_link_libraries( + + navigator_vision_lib + ${catkin_LIBRARIES} + ${Boost_LIBRARIES} +) + +install(TARGETS navigator_vision_lib + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +) + +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.hpp" + PATTERN ".svn" EXCLUDE +) + + +add_executable( + stereo_point_cloud_driver + nodes/stereo_point_cloud_driver.cpp +) + +add_dependencies( + stereo_point_cloud_driver + navigator_msgs_generate_messages_cpp + ${catkin_EXPORTED_TARGETS} +) + +target_link_libraries( + stereo_point_cloud_driver + navigator_sparsestereo + navigator_vision_lib + ${PCL_COMMON_LIBRARIES} + ${PCL_IO_LIBRARIES} + ${PCL_LIBRARIES} + ${catkin_LIBRARIES} + ${Boost_LIBRARIES} + ${OpenCV_INCLUDE_DIRS} +) + +set_target_properties(stereo_point_cloud_driver PROPERTIES COMPILE_FLAGS "-O3 -DNDEBUG -fopenmp -g -Wall -march=native -msse -msse2 -msse3 -mssse3 -msse4 -ffast-math -mfpmath=sse") + + +add_executable( camera_stream_demo nodes/camera_stream_demo.cpp) + +target_link_libraries( + camera_stream_demo + navigator_vision_lib + ${catkin_LIBRARIES} + ${Boost_LIBRARIES} + ${OpenCV_INCLUDE_DIRS} +) + +add_executable(shape_identification + nodes/shape_identification/main.cpp + nodes/shape_identification/DockShapeVision.cpp + nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp + nodes/shape_identification/TrackedShape.cpp + nodes/shape_identification/ShapeTracker.cpp +) +add_dependencies(shape_identification + ${catkin_EXPORTED_TARGETS} +) +target_link_libraries(shape_identification + ${catkin_LIBRARIES} + ${OpenCV_LIBS} +) +add_executable(camera_to_lidar + nodes/camera_to_lidar/main.cpp + nodes/camera_to_lidar/CameraLidarTransformer.cpp +) +add_dependencies(camera_to_lidar + ${catkin_EXPORTED_TARGETS} +) +target_link_libraries(camera_to_lidar + ${catkin_LIBRARIES} + ${OpenCV_LIBS} +) + +install( + TARGETS +) diff --git a/perception/navigator_vision/README.md b/perception/navigator_vision/README.md new file mode 100644 index 00000000..01d81d7c --- /dev/null +++ b/perception/navigator_vision/README.md @@ -0,0 +1 @@ +rosrun navigator_perception scan_the_code diff --git a/perception/navigator_vision/calibration.xml b/perception/navigator_vision/calibration.xml new file mode 100644 index 00000000..fe6a79f6 --- /dev/null +++ b/perception/navigator_vision/calibration.xml @@ -0,0 +1,94 @@ + + + + + 3 + 3 +
d
+ + 702.1922 0. 476.5935 + 0. 699.2620 298.8085 + 0. 0. 1. + +
+ + 1 + 5 +
d
+ + -0.1701 0.1220 -8.5339e-04 -0.0012 -0.0304 + +
+ + 3 + 3 +
d
+ + 704.5182 0 476.1918 + 0 701.7624 300.8320 + 0 0 1.0000 + +
+ + 1 + 5 +
d
+ + -0.1770 0.1433 -3.1992e-04 -0.0014 -0.0434 + +
+ + 3 + 3 +
d
+ + 1 0 0 + 0 1 0 + 0 0 1 +
+ + 3 + 3 +
d
+ + 1 0 0 + 0 1 0 + 0 0 1 + +
+ + 3 + 4 +
d
+ + 702.1922 0 476.5935 -140.4384 + 0 699.2620 298.8085 0 + 0 0 1.0000 0 + +
+ + 3 + 4 +
d
+ + 704.5182 0 476.1918 0 + 0 701.7624 300.8320 0 + 0 0 1.0000 0 + +
+ + 4 + 4 +
d
+ + + 1. 0. 0. -3.3994384384155273e+02 0. 1. 0. -2.2540025138854980e+02 0. + 0. 0. 5.4581318104785521e+02 0. 0. -9.1620188198129178e+00 0.
+ + 2 + 1 +
i
+ + 960 600
+
diff --git a/perception/navigator_vision/exFAST_SparseStereo/GPL.txt b/perception/navigator_vision/exFAST_SparseStereo/GPL.txt new file mode 100644 index 00000000..94a9ed02 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/GPL.txt @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/perception/navigator_vision/exFAST_SparseStereo/README.txt b/perception/navigator_vision/exFAST_SparseStereo/README.txt new file mode 100644 index 00000000..752d37c7 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/README.txt @@ -0,0 +1,38 @@ +exFAST and High-Performance Sparse Stereo Matcher + +This source code is published under the GNU General Public License version 3, +of which a copy is distributed with this code. Exempt from this license is the +FAST9 detector (file: src/sparsestereo/fast9-inl.h), which was originally +written by Edward Rosten and has only been modified by us. + +The source code makes heavy use of the GCC vector extensions to take advantage +of the SSE instruction sets. Hence, the code cannot be complied with a +different compiler without making major modifications. + +We included a simple example application that demonstrates how to use our code. +Also included is one example stereo pair with a matching camera calibration +file. + +Some notes about camera calibration: We use the OpenCV functions +stereoCalibrate and stereoRectify to obtain the calibration and rectification +data. The example application loads those settings from an XML file. The +meaning of the individual data items is as follows: + +M1: Camera matrix of left camera (from stereoCalibrate) +M2: Camera matrix of right camera (from steroCalibrate) +D1: Distortion coefficients for left camera (from stereoCalibrate) +D2: Distortion coefficients for right camera (from stereoCalibrate) +R1: Rectification transformation for left camera (from stereoRectify) +R2: Rectification transformation for right camera (from stereoRectify) +P1: Projection matrix for left camera (from stereoRectify) +P2: Projection matrix for right camera (from stereoRectify) +Q: Disparity-to-depth mapping matrix (from stereoRectify) +size: Image size of one camera image + +If you use our code for any research that will be published, we kindly ask you +to cite our paper: + +Konstantin Schauwecker, Reinhard Klette, and Andreas Zell. A New Feature +Detector and Stereo Matching Method for Accurate High-Performance Sparse Stereo +Matching. In IEEE/RSJ International Conference on Intelligent Robots and +Systems (IROS). IEEE, October 2012. diff --git a/perception/navigator_vision/exFAST_SparseStereo/example-data/calibration.xml b/perception/navigator_vision/exFAST_SparseStereo/example-data/calibration.xml new file mode 100644 index 00000000..ccb6f2c2 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/example-data/calibration.xml @@ -0,0 +1,79 @@ + + + + 3 + 3 +
d
+ + 6.5241357652015370e+02 0. 3.4737478196748043e+02 0. + 6.5434070705887893e+02 2.2621555342556007e+02 0. 0. 1.
+ + 1 + 5 +
d
+ + -4.3763636007329099e-01 2.8006488733035506e-01 + 4.0981386272831622e-04 -1.1888413625068292e-03 + -1.4136761880997478e-01
+ + 3 + 3 +
d
+ + 6.5181248374572181e+02 0. 3.3187853747142810e+02 0. + 6.5339335949419046e+02 2.2968726339955754e+02 0. 0. 1.
+ + 1 + 5 +
d
+ + -4.3932608881272195e-01 2.9797543665860654e-01 + -4.7999780141084688e-04 -1.0480857219477820e-03 + -1.6575380391655467e-01
+ + 3 + 3 +
d
+ + 9.9964441516773250e-01 -5.8620392937120611e-03 + 2.6013068240420170e-02 5.8033365360402080e-03 9.9998044230621075e-01 + 2.3315853243672281e-03 -2.6026227329585080e-02 + -2.1797936586566173e-03 9.9965888381517143e-01
+ + 3 + 3 +
d
+ + 9.9993558455087461e-01 5.2798013266097798e-03 + -1.0047409957402902e-02 -5.3024541086745978e-03 + 9.9998345699189362e-01 -2.2292875470448703e-03 + 1.0035473547670247e-02 2.2824198766964580e-03 9.9994703851263078e-01
+ + 3 + 4 +
d
+ + 5.4581318104785521e+02 0. 3.3994384384155273e+02 0. 0. + 5.4581318104785521e+02 2.2540025138854980e+02 0. 0. 0. 1. 0.
+ + 3 + 4 +
d
+ + 5.4581318104785521e+02 0. 3.3994384384155273e+02 + -5.9573462113779023e+01 0. 5.4581318104785521e+02 + 2.2540025138854980e+02 0. 0. 0. 1. 0.
+ + 4 + 4 +
d
+ + 1. 0. 0. -3.3994384384155273e+02 0. 1. 0. -2.2540025138854980e+02 0. + 0. 0. 5.4581318104785521e+02 0. 0. -9.1620188198129178e+00 0.
+ + 2 + 1 +
i
+ + 640 480
+
diff --git a/perception/navigator_vision/exFAST_SparseStereo/example-data/left.png b/perception/navigator_vision/exFAST_SparseStereo/example-data/left.png new file mode 100644 index 0000000000000000000000000000000000000000..0c5b9a18d2456afba10a70f740f0657bb24c8f36 GIT binary patch literal 160454 zcmXt;dpr~V_s6?XQpsghqK_nFqfe19w&s3MV{DU!O1VwZFw4Dk!Ne!Aa@#PMU2Ig_ za>=C&h1lFijHtn)GJYWwb7N45WIZU0v;O}d@6srq%- zL*|wx89aGeo=M7I85$l;6>TQ&2steuc>s(&MgE@=@%YN1^1Mf@2l6YuR2AoQ>6;6* z8?#_Hd~EWKw=>Sf`@sO{Bp8t*f~QL^8xqTc(S0j9=`wKNuR=`chyK+v zeGU~?XBorrPK%m8UIMCtiQm1nKW8{L2HkWx-&+M_&41#}rGeW*ZWq>$1CAMjaC}uP zw2UK4UUQ(H*)a?;CzOp2(ZE%6`7@J7ULaJi-2|L_d=#yimF5-DYOsw=K>u-A*i+u; zBFZ%FNMp39M@4^V{Py{|+MQ#fqm8?2Dyk$oeYK!U4(<6j`GvYF`tgcR(>wY5S}KqX zgwjc(@YIHnK9Wp-8)Ko{(PP6#o{YLThL}axV}njAM5Id?Gn5HvPgKSnlfNko(u&aV zu|h;&!#G}HUI@LszG>kIV5JWEPz8xZ>>C$@q@Hcn8)dK{+rA6$5)_PnXdYHde z2N!^VT^#ogXP$^cU2KJ(9#^qz@qP+j)bD6MnPXHLRYfdWtb5(C6Lcesn!1_bz@E^0 z4lL7*K##v(b1+Q*n2(3D(YMTNhON`N!zILivz9pna6c(pl8v_ZEj2Tw`!D}Pl}8qI z!oc^o(XN2-+ExI-U4VPm-HUNTxF!g6$Eb^&(1Pmseif(pm`+`exl!|gEY~vwpqKLV zwL#;w*rHx}hShGX`poSRS!O+{t`6@ZuD_;}S-RL1m?8lL5G zUs$Eo41@=vBJHQ42ClkbXI%3A+MgiIxoc{}!wBQU!;z(Gj(k=zf}%5J$#$<1iEFbJ-qS^rpvE@Oib#mub(!5-wjYbUBIAulWj zU$X{`_vlgk;P;Bn$I~=b^OX057}i_tnLK2eZE+`@Nil;S9t2NtZX}g-*yU+L(j_Xa zYg*CnK>}HdU6h6Q;vhuYlAj23ngx7!>GIC&uMELbZHdxjxej5XqnAn@W;&23WkhQu z+6|mj!UUU!3N&OxXDYrlPTnsN9}yJh=2L?tYngcXeE6l{RW-=?K}pv%d2c5!Ts154 zd_!Q)^6Nb+Q=M1s@KEq#ooF%AStJiQCpI{6(X`YL%u$|Yeg554f6Evdb`eZ4^G#Al zod#z`EA9m~4XZTZH08WINBYkTpxHNfh80EyY4l)|9 z7*d3ub&vI>XcV` zO+yEuc<42WT{d4BFCcUrjrhGVOaFyE*-P;o)HK;SVtMce`CYy2ptM?>sYkRg=S%8r zarn_wsvPlHXtNiM+q()UwMsJ<3SQydXQbFWtyq$$>!!F~4Xl z&c?fm{9DWIK}ygc2_70K_uM2uyY~a7Gw?ejY8?`VfC?M;U8|>aEMq-rgu(Q`@<@Hy0~I#7gs0sxl6Gna4s9>RV|jX?G9H|jGV(bQ);KI;^wPN$yf27L3uAd; zoHNQGeM4~*SOI*cxJ@j*R0N1SVPs7`+{_Vts8cIb2Z5qAk4k*%GFN2A-zW`o%uf^i zCI7k(rKK9hY4yTUK}0psdKrFR=tHPd^#zxnRC~?^b;_31&px|apZ~wo)!bT_G)Pu| zluMKvrP-#$589_SS|4U@0!FdoeP{U;Eu)ePAFun4hLBgT(U=|(oSw_>`fmoB>Z;pF z9=q9IW-Utd*^;z^TTO*$3OhG+RNH;TmRb|o9YiEA(L#TTg(`tUV_sWh7MzX5{*bw_JmjN+to1)k3AF&c~-<>Z}e$s16D zRF5zNstASftCIG2=#9OtsB&?zldxy?TE;{)ll}#tka1hck>^t&$VjrcC*@r1oU0dN z#0BSiQ(9*$f&-*2D1Vcb6D#)c`Fk9!Z}f1Tn%JY0KpfETb0tvw?RB1*{)|1}kPHod z3J8RPqi8M{Pl0Et^{RvHL1A+WvrEqE3)M{9JQH7Dk3VC-TB4v8uc5uM>EN7Y@{c1APkzTcVVw6vbDb@ zLv>9djyt){b1X$((Jw}`vj5cRF!cEZc>t7k)n2Gz@|odY`0YQ7H(BcUJr$&Mg*;*W ze>-}y>S?(~Uai<+4g}mWyHE&bPoA4n7Rp_6TyUk5J^DF^x=DH>TQmSs?il|Vf55(_ zNu@8=e%d1BLC7(h2v)|t%7mWE6WC#QKPkb*4Nv9tya>yfdkOKn;W+yUor+YLa-Pg} z^I$qHaAbFvac$eo*j(D+Y5Cz}r3>@d1!HRl?cic|f_S4Lh5bNallARCZ#z4LVtdPA zYB7EG5OGh{|)ctd0pF9 zZ!89#GQ3p0VaavwxPQBUfIr?U2F#a#=8OgCYs-1nsMBY z?(8QaCh66Qe;y8;#7|SWz+Q+0IG*-+MMk>F{=xfZf*E;OtxJ_%@a(nze0RoT5OWDy zr3S^-l5quSo6q)Sa? zYb!en;i5V9=xD!^q`l`M8L#ivLuz((F`<{ar7wbfM}qUyd;;dev3Hnp`9quA$C6p@ zqSdsaI$$;x^EIZ3eug%;rB?F#9>elg#DKh;XFn&Cel>ZM(T4Nk&;(5%?8UulM?MJ2 zcS4b$%s(H?8?rfoU_cDFC=~(`QJ%qBY+u%}8FVN78ewuu39bV9|LvsHdnp2t>LWOR`b^c8P*6mDQv}{G zq{yBNNRf|y{I!6t=9OuMDD+05?Ct9WUZ;<7sG9$(`u(dYKdcmb19}EkZWSphGbz`m z2A8M`$#Y4_H!jNifnnEHYKY%l>UrMKARG`C1np>1GWqJ`mAj+};uBx9PT{gJl+XJz zWti*np3e^rC|^^5v=VImywpfM!#Z|qyY*E?!@eQsuV1Ahz_;L<^zuVFop!#I4W9?( z@D}W^cr{XO=@Zhsk;o#M7`FzSU?4c8>bAcAt-O6~%h`o71ru-8RBets3YiSv}3H3K6%*|L`GsGH?aozqpSzrO3=QUaq|dYv@e6V@i|1)5 zEQ55H$o2Gk{y2noa?!ag`eR5Qif%3JSgI>jE(u)UR4<5DY>n(sutrIJX@DfV)AUdkHn<2J5)`*G>7(SH(#BY=Q*p%cb|>}khz;}Y zwul>ew}2c;ySPq!rQ0FXV1Z~P9mg2Ch}#GMo+p%$8jy4G2uq-Gd(h)I$3oFk#nFqc5#YBEt-fM@{AGJzAdm!$0kV5WP3whF?VL_XgKkqd;Nj(oWuE^J>F6+pc)%# zx4rUma|#u^JGDWSS!*G^iN?nZ9s{#ZXx}V*lJCshCFwtWi9mIJYxO{q<3eJ(J>V$h z(&eayanz25`229D3%(j5*qBGUsOpOpe9mQAxRp80r?t(>SFrupC{0KIZW3ht=Bk^) zgnfjSc;-5fI`&W6Hqf}fY984;M^qhW!TM}F+va~S0`bDm`pUH0s+D*Zf7!l7x9nq)IKt(U{P3`pFJAzg z2Pb;#^7;94pcDpCvXgcGVbj(HM~_k1wZ|OWxG3;t|50yPk+D}$&o#G0_xYwO#a}RK zVl0|mUaM@M=P@{Z0jjl!eQu!i0pp^1o^X#9`*JWUE{HKrk*&bD7$6L|5QcEfG6^^_ z6YB;!UBt7fQDJ}<^CLq+B`qY5TOnU_v@gzIglN=Z3 zK}r|4o7^64H>y2Az3sZ{PnRLHA2pv-ZWL0f4=|*Pr%l z!4Du42DeZ!-%G1LgZ)YiA+lQbnh9>3tB$G_U53mYcysy((fP{Odb|ok7e{HOYVq<^{r z8`AiWyNEG!0W(C*WIjIY$hc<@oQml0SWvHCi~{n8g^SdZxubZ=)`e=ZD}lJe8gwue z#sl367}c4`A1?UgLM-tclS5=eYO!SzRglJ=c3H?M8bB4Vu4g1p=$J0HLjixqIr+rC z7)x2^!*H9c?JO;aRh?;zi@IEjAWl1Ho8Feg>~$ciV8`Nll}f8%9i<$ZqH{_e6e0D` zE{=+>KecWAzX7&X4=oL+&4#D9bJ6;+y*9I{3ci8Bpa3S1r))Uq1LiyDg+idKX}p_F zIH6YFj!2;j#w$zXVZO%MBts^#E)1uu7Y9n)LDYX-N2J)-?^fQ!zS=gQ78-p!PCJ_W z{`02*i;AWLxE}b(^z}Qyn3n>Ho%~F^pBFcRy%C>aA=<-^T(`(uUfF9F2D0R#TD5Pw&j^#wSXnCP130 zSLX(ZSMB?etq5G1MfC`V*=vN&(#w^D#6_nn_WbpKU*|gsFCkZk*BVp``z|M^Uo0Xp z1=Q(GVtYPB5XYP(F#2oVqD~++RBOIqN6kc=i!lcd5tFB(Vyr;jy=>ydI|GRi&q#3T zL$V33`CW{_=p+Krg>`GOAoy#i%V^lWk>Y-scC~B)j+2%IAH8c3SxaQ-1SRahiBOXn z=Wc%pp{Pm}d%C9`^i9lGTtCV-di9330#@U=;N`Kkhf6C0jcC-cvVudb$ zGTPT3-rR=+lV!_K2^Gw(!%ivUBnCnH`IFmJ4_EU*eJ*~dpbWDA#CuLDUW0fAeNdpRPc=+;xupQq}#^)1@=@2`9&#T^p^c z#qWhlv#js%!4**S;=b2#7w zVGh}Z6;K?izKfYFCXrd*43Sz+cE}*p`GLGC_VB<)ye+hcX!+-l%G1vAZ{k~d#bn-Z zi;~`9LQ$OdK@7mU#G^4XrDM}aL_i^U=I37)Am7U`qFy~sAglScUjVd%+K!d?jndgr` zM`kqb4EE)zEUYa7c4+oeypzAf=VJcz2yRWg4eQA-1b7*8!ZIY@b?j|5nFJ?CuK`$% zQLF?rS(b&TjrEZ`U4AS(0}rR}qM7*lmp*+@ZkeyW^otvgY${RafHLoe+dN49wDyOh51Ig{q{!O_q>FE?8%+jP*K;>HZ`xaRfShpMiWo+ z&t&wzt5UUEh^O+lYT9kn?CkG<^U!T7UC)n6zzFp`?RnjyURd2;3Mt*l4FB_$*-jH* zuL@!X8GO*6X+&O?)+?UgW%||)JgGb!;9mD*HqWd ze;>-I9AXS)HU|kHoK-+I3pTi*#0n0iQEi*IFX%*%oCO=Ey0*akX0OB3SCBQH`p@Oe zes!#Ng6j|bisWy&)AC#pfQL2nbljUW*h=Edy6VVe+fjtaEa)>6QI7;fWC>)(bkDy z)M<7*wYd=rXXt&sDTwdy&YXCVwv*_7f?dhHg5>Uy5}w^uS2>>a7L|tzxG^%Lw;F#` zglP%_nzZhBL_BCA=G5PYju-yt%|YL;bK=~fNM|$8P0MSz2MW{LERIRa(C1VMNtKv! zyQU_}uD_vC%}K!}VJ|kpuGFv;-`$~@#ra8q>VMM|^ahG$*UKW#?QPm=ZR9~`TEtm! z^GV>ZNG9gcj)(|q{;7}Gh;vl;678Vc6k*)}p=r0UGMWxqWkJSLT@j~t5XkRLn<_qq zl#@@{)4Mo!9l=HSH3@M!76ll5Jed#sdQYy^TmnIju2EWhTy$X!MmI z+6lXI2li{)vma?2>XMz>p9Kam>xUQXvM-ZwzREDq}BG z1Qm$VmKAj(FpUV7Ynh>=VSv*V8J0C(EUl8^C1dT(E;1^RwY;U4{%{P>=H|flpm5b2 z3eQ)7t!`BWwJYDH)&?*3;(~;Qczc=>=G&J}iL;|%Zp;;-8@3cyI9z1S)=op`I&C(Z zFvo%owa4gictxnf0&@VKyI%QWV}yZp+An{hB?|rgkNWw*IAU@ikq>|PD{+bc;OAmo z-%yBvir2-Vs^wO+Ylw4Cv}_+PwcRQ~7r)JjrslfN#;%V_z8)#F>xt3!M9n)3ou|#s z{G%Hk!;}5>M@t3REN)J~$gYMRypOEq_AB-9ibaflPyG5n*=zI8sE$$RX-#YEm2(ID zuoo^GkoVvVF3!ZbKETF}Z3A`WzM!1Ao3GSk)HV))gdVgLQpOqalz)sGE?V_xKr0o* zy`1P=uNXnSdHAP1dwT5ddt(N>_%G12H#}=HbwR~1PEvY@_6;4!Pbq#QcV2DCCUGNl zTaf1G~=8CM_uzBeer zWUcIS|Bx|ovUM3sCU}Rx*9Wdj^*P!mOiWTF_nix=@&TffTpRrIB*3|2Q`0*34)1bfxAHbM7}gNcc75pX>;ZV|sHX5(GWWbKQe4&bJ~nnop;V5^Npc zq%WNv7aZ^F-HMsvu;*uu(M^`Vx)hOC$nxT)__4XfC!R)G*rbX>5Mhv`Unu$aSuYn_ zz|qac*q3k58vBj8ySrC_Lwj_Xkxeeh-c=yD?Aa&Yn7{i+1Nqe>vf46McemMz)dF$t zYq3i$-2FBs4q&rtZO)T_oDMYiEE))3nwqWb(7`Lv5QLH2Htbg!z9F8NNfY;QM`_iqWNrNKJ< zb+HyF3{_`cm$fr$4r{-Aj?_;`EwD}BD<0~BB#Ov{OQY}5t1)#UW%Y9gE+Vof+lckq z+gwt1>qLU3l;zb+aU-iS`ZGp}uH2u)qbhgYLlbsQ7f>ujfCNi%fL&lY`GXcMEbgeX z5H6UufLo-6T)q4!o@4U7c;-koILDu;7e)x4KE2Q{NbO=Yy`m6~k}mrqYglDONMb|* z$41E4*0o9cV9&a!wDa^aeH7 zk$A^t172~!j@vTCWwS*3()Hb{jsWNKPUwjsU#%*=rZkKr@;v|#RIzQrc@#cnk_ysq;9qy30zK+<(FE#p^htv;iyCg&-9>#gd zPsLCC_EWPZOER#;=pA{WlfQ82M9!j7qQ|aozEP0|$>nr8N13?xJ?44u(IpxAz^SVA z@6&ee2|HrG4Hp@gKXa`>XiOT4>PkRAO-rzqS{KZrV>3lICmhN6NM1xSM|AVXf&XU# z_DLP^B@$L{_|1Yegn>W6cGM~Ogrx&CFOb>KP5gs#uP|%{6PbLq2_<1(#(Pw;LYT*;BalgRK?B5n=w0N9Tw_c zzjYkT*#nXan6|;?joW9Bu(MSD%FlW0w`$=5v*N<$kYi>7vce^Z#`qnmMG?cU7 ziCoPpUs5uzc2a)fOiROy7Xzh)|8WYR7Esq3hod@vZ*uCIfy7`Dv^NMvN~^D9do8~j z&tR&Ox6KqVD{ga9IyJ1up!P=)q#U7f&fuC1^qg)BneOIyutyV}eQn2Z$NIQs`W#uk zr@wbtFulQOiM(ID^J5Efbq+e7@$Md`3y`u#&R=WfI*2=6!XIUGQh3^+1$TCMb5BQk zP`g&Eym~SA>EUrDIh*v zyd&fY7DpJ}4#T#Z)U5`8K+f30EBSmj*mAAzCM8fQ?D*jyzKJKU^d7^7`Ot5rFcq zN>`A3nfIhOBrt2zv~r9rD-)zT|H$+Cocnil|JeC~bvH=0F0QCBZT!~4vY4AsFp}n&{D`4HTp?dziP|ffRZT~xor$UX zCdFEf2=Vwo>;_VA-(|0H+*|MSdWMCjhIT(GsmaDi2Jt9+Kh@FQ_NlHng}g^a(|6qG zO~fvhwTI+87Nmu{`-L%9fkB$cgEeKgc5Ks@O0rznzd*giyvz;Bw`W!Q&=aL@He3#e zG!C#tHtcF31ML;yW=f+M@&#r69(qr`P)kGLr`s@&U%Y6_bho0ZnSW-Fm^I(c2}udJ z1D)roxaJA4Zsjx#v%R5j2Yxxx2uHt<-ghI5Xnao7c(2)_SzR;7_5`(1lSejL9E^s0 zRgLX$bat38maEo4){tW(xtW3oX^+%PYoj6(1rcaN1^J}J^VWF%KDpkI1@h9Fx&xfW z_a{UQIRrZulG_@*&?238C;nED385VxP*Ps_Re-L|GtGBX5^+=%E&kE1Wfz~!V**hP z-(+$%oAUmOam?9A&)6-a?~U7qGpQ>&^jf{GLq-_bXX&*!&yRkg{5cxy5^c8O+iZ`P z6)c`fE4+1oVkJ?fhCHeGLxTfEZAbXHjHh9THxRtMT>7@Ma4yB|`PS$xTe^!Zh7iAE_So>A{?1XZHpW7-_0UTSVJY@XOTi}wLKOvvWtNp+ znV+lGXBJJLHSa2-!I~XI6CiGN%_3YAjz#bGwu)0uCm5x3Js_;{q60no{pF(bj8}Sd zZm)FxDLD@lyC!`be^lQD(^I<(GGAJ;OrKMEm&I#jC+L2Xt(`SaUJv26BshELW@N7? zy9~md$b8wjofdsu5B&&p3~K}pprB^g8unC}0C=^o#(u3P&!WpE{YaQTUuSWDb_WJU zTG{}H^{0`%pALwmawvHv<$fGu^>NIDTBpN+wW*ncn8r$%6Dp7e*5;Mc-SqocWV?_g z?uf5vYuq47n@QQN0#y0ijY%M*%WBJKp^g8&PJ6Z6G|q?(N_epi$@Xfh9&>)U#QmVJ z(6`c1(#ozZ#w6@q^kFK0dzsIra;6t&A)0EhP)1;VY-m>d*_(475jEz1h`()X)$Yot z3wD5jG4HM?Ytwa^bL6OgQ*1t**~A)uIUha06`!V zarDq?Lw$!;WH3(XJe6ys?2?g7y?vd+(Akg>I(F!y{6Kt zA;nCFENoGAu-Q457FyY~md zJ}?uc>~KAFBNovEjm5lI#b($|T@JRt=g)rE%7N1@ zb1`)YWIEjp-)g7O=0>gZ%G9AdXG!`x*nwb2QQeQn-!7ZN?Gtmh&RWfS56bcrKM3c( z`?T%va}uZ79-1JrKy7apSn|E_dz<|fO^Ud1{MB5*M~Edf)NpkVKEn&%SOqQ9TGI#0 zi&jO=5`pPF00KrT5{=z3)0(r=Ty*}fIjNdT;~N(gQh4!*Q=b2iK-wGOvktrb@czFM z0p|jgBVi4e3>-n+GF@?qLi5y39dH#0c*xa;Mcsg;WPj8R{>Bh`*wb)cF#eemmhv|& z_{d+=oZG$P87K-t&yLDHqVyQoPV9kw`+MSWS(;}NY;ubJt;*lGg=8q;+F8dp?;Z!? zl_d%WkG};We5SgSKRYjq)`n^g`(n{7WF6Gx_Bp=i$#bJ2SHb=-R1TK4eVpHHgtjb< zt&}Bq2-WS;1vYR7FW$9-vFj(xA^q{QA4JMSf_TIGG{h~Wih&N z#{3OcI`7RBofhn?)C@Nfl;=0$2H%u=35VNT*gL2z%!)W06g>6`gxOp`Zr_x3eP|Sg z=t{tlT&SL>UlCh5)LOM0Tk$~DE_MuU6TX{0LGT}dZbkjCR4=aK%dQSOg7;8aMO2pc zst|e0yxD2-aMG&O`UEVgx{?|CBV^^Q#&M6N6eK6{{Sg7P&}*_rjV%Mcp}Lz^=JW@x z8e$T=#`#N;(8z9{GMRge`SJ(t&9H$NH0oN&4*0+U`yOl=yKFUkburHKk(C)c@8uMi ze-@1HbfG#RTRGs4p-yCY^rxMyP@+~uqm94e;sdocGEkQ^pxTP^(M2^@9p8qPB`!-m zRoQ2opB8c#+iv7|4qH}37jz>3@aN9pyGINv zKF`Kvsb2)6%9YUb@5P<5dB42F&D<7-UT==Hzk-HFP-#w?rDnO3Axt+jyKNW_Xg?qv z7hE07aoAK znqp-~yk$Cb4w`W$+~z#|_kJis3ap7ZNM^E`Hf9Y)E!xDHc|BNrOmBg<)^SW9L^YL< zxijQLtrg)ZlggGtI&E==H{C- z!0}O)aNno(1BCp|MTuiY#OBJ}YFSO?P(G$al(FH!2CI8R8G5>Hl~@>Cagp2=d}9Y4 zFI4!-orz4Th2siAtH&7r|K)3!2W{#uv}_gH0%F%TQU+gfrZ?6k16>VhPfe<4=Bn*K~4opCc%z{163YF?zktg-0Og+nvi_ z^eWsVEQNP3ddpr_hUTdQ?iz*r6pxj>Edw2w8+%2qx~tQVohnbYKBMW#V zn~Ot&IAfD;EYOpCG;w!HZPjoHX>sEn2DG{9HNU$M#A#th%mK4pdVwF^K*F5Z^NlL8 zl)@3z^UuINjYZX-QkF(T8109A8YDlV#vY-2dnP6Va%aey{n|iVMVPUfJ9&-Rw4sZx z6X-|%j@uxEY%Z$_e{8XmnG$^yxo`zsabiu^Gs!E)M?2ye9V+t`$t~^bXOARWN)s2~ zyRo@1Yy9^IPO%~FNJfNm9X2)@L?3gnS%l(NChtve!Da8Qoq?tUAR+q^s!~9U3cg^- zXnXU__+1N6RAHB1Cg9=YBdU9nA5v#5P)NUHGGS3@d(@Q4l^rBNPqtH(6OGtE>yOl* zo29hkOE|-M%h@_pIy-U5p3kNN(lqqUOZj^vW{0A!xcP1#-_~GU^i78Y;Yq`n{;)#q z@}`4O>Y>0|Yi%+%v_wnK^-X$d^sU9tv&VhW=$5bj2P8WZ;XslVX?@>GYoeElRP?O@@oivU2KCWxXE{?o`S)+~0)liw zz)L2G;)8}f-D6*#lvx`E@;YLI1|tX-m5XuIMLN=QPJTeA6!oPn`w3CuTsiX&^*w9r zv>oj5&bqqY?=c<;)(pNsnWoY z54=#9CrvH)tdIhsd(ECt6O5LzFlm#Eip>XooI>0T#5S&@< zIRk{9i*f(F%jDixH5JZ6%uEX1A%^H?+_V*V9=;6#2yYpe%Sq1y_pNUZ4Lp1Is+%mw{zn&~aBWGy z(95no2`pzhm2{)b6?i+}#dLd3H^aE4a>LtA<=EJPxUmGz;@0)FGxym~e1Tz-nzVyBmxO(JEd?sAmYwXhvnc7ZA+I9xh2!{llZz3c8eBEIM1BItw@~ z&%MhuQN2}6p`j(FRdxmtc1w`r&Mlo2*`94No@MUvxgm!{Fa5h{&;x8EhXBkf3-9eK ze6St^()iPOXW%*5^t4%4Tf2e+JJw^WdmZsrdaYQQs6FBG_8;JI32IX}pPqjs0G76r z*tb^kxCTg!2F$K{j2wlLNIaOn|8Va+W}@XNGth5Wjz+!N(WCqZ&m{RL{{w1bgwEle zv5`F7_HbaIKhmabNMe_N$#HHic5J9a;7`j67VUw>-DfVtkI_!YIAC4L8(=INrHJYu za|6#>7K8Ga6~d8!d9i$v%Nv=&=a)@p=LIh*f~v|(YBflxox)OGR0FRu)z z;&;S2;nV)E*02W#n63nsunc$LNgg-p?4GS2he zgR|GmV(4G5`?}7V>8d=+kMIglE>_}3FNHnzu9>?Q{=H*wRx-8T7mAy|^bzVyDh!g# z?6>K$biK;l=x>B3B^fEbosD;J5B1lUuj|eovd`T?{VDI2-gsdS(-8v$3zCw?&VIAsWw-f^J}7j_bdg;h#EPfg z7r2&dP^PePoK-OfTZEKU*mrwd2YwV(OT;>h_Z*uoi=J5w37i=2$G^hftS`@B(@tW( zPQ^odzn#JCHdeZq_BqDJ4CZ^ELB)*!FovJFWedX3bg&iTVKIYm8) zs9JRA?st&uMPY!TvY6j6`$`e9eXFr}+|o_(?D*I<^_t*+SYZ;uXV%Nc6lRd3L0kYH3=5u?}>ZF)E##zg@^ zS>JQw_iw_?Z@+T$g|ftKe`YSpTW4zY=fgHu!4cXsM{lWb-wV^0^v+1&jor-JwKcH5 zqItd}D7J~OusFtAF3OEw(&TyQo+L#)WjegEK`et$D#c0id zrON!!qCYQHkKbzMl2@Nv$tjPl&YM&ovhC38%`~bO%uz`F?TLO}E@x_5R-yAP?P`tJXB@#19yR@QG0)@k(03ROt!rzTJ5nQnniFi`zpLf^;dpE^jMvwLGv}@a?t46BT9E7 zs7bfe9x=&0ag|7Zp_7aU`UwVbZZp^FVJ&(br)&K@>Bwoh(~QNF5nr_E|h>wb_Y28z0QC?%$-1zHW)_pon2Mr2H{ zTcmph2mP=kXy=!JIJHWx3k}h6j~U^+QLT7PoES@^?wF*PcT$5^xiQf%Up(2TN?td@ zywUWD&p=)ZA`aLI4XSQO9^uzxQI2J{jB1M1JMGh8#(G^bo?ofp=Nl)B6`TW?27Xzu zBS4OIm|gR;xb(SEGzXhxeS<S^2Hd^lip@){dU+%bvBl7SFO7Xmw-a9Dqld%^e^{*I>gUR36?%!P+|Euvx zDDNay+Q4lcn;oV9DeKSGP40Q4Xz?7x#WTLGg zS#O2|PQ6-Ug4fVUnULt4`HB~r+s5RPLXIp65>uSgjJxsg@6W6qWtw$F8rpO4S-0Et2jJYG z19(-(F^=DPBtoNWlI()hbOv9dTtj!*!TH()+D2Lux`GlfpAjv;)X*1#Ou1t0ZgjZk z!5;G)K|z(ojj{_Zj1mW|#vZ#na6WkQhMO*c>nIF-G#+#%@w4V=jKD%{6V?0avw-(S zlhiT%IE5|`sm?3#97%Yo2p;Q+vl8=|=xoBIBwpa7Et-J|*{Tpbo`f}Y)gIN3%Eg^iYa5Ba4KZsk-LvKZNj#e$*p0_GriY(R(h? zH~oa&(Q>y8skNN^bRRL)>#^rKr8tiVOgr;n6H?Q@U-c6s#JJ*K%`*pUBrxRRg?s8L z3d7_`kx`zRolQ;ABn!YlxdN{)4`u2z+WLl&xvycf?uBa%TB`v-iDUx0mV*ij4`CSqEo-g(zB)|J?05x_rIGGg6I*LxQS z(0BLVKpN(=)9o+My=;3kXYd7-3+k(l++T$$`*e)C3wHo4WbSGNKr=} zJdlV%ihCoA0L?{zZ^*t!QE`@xO6+||Ug)LO_!Lk?ISY8xFU_TXtPdn0Ng}7vO>JBp z;#Kn){5XQ)b#jGtmm1%svI`>pC6u96tQA8m>l2e6TM&wPQL+nl5x;&P^&d4JQc#8t zCm2b;fXxyP4EKF$a+D8nDZh4Gs~pX%s&ukDybh(ZeTOOn=$c24O3T&{(hMfpGQn+REwj`KP7yQd}+p>pw82kZvU3_E{9L z;@scX1Z3UmU8MA9sk_RK2zW1523vLBb)e?w_``!S5{AUi{G7?L>}+@`rNI%=^4HUg z2vG^^6uD8)2)~3Tbc@f|0EuXoQHG?4a)7rn-9lD(J3qF>)|mRJ^Eduf&42%c?^@n$ zhrQWWp8N$G8?>OgJG}qsz;@YxJ@N{MC=jKt;L!{F1!(z47#zT=JmSb;+BB8)>h6~J zy)-YQW$lA(&DUAnppxUdejFRsKMaIh+6BCyXI{ z{&nz+RgQ7I_$|&T6x9+oz2VtdTHhp?Gf0HpNJ!~WoK($o?FBlm($CSJNFtTShNY=; zEuo=Rt9I!q&;YaM-A3orQ}m3#=6Ef-=r1S$6?S~Sl7Xp?Ld$ZxqJGZ9uGq+~`NV8Q z3FM`*P7fx~k)sN$kFs5_>e~5@oGs~{a?bJ+fZ3Q0lQD+TApSVE7__K)iDQdjkkSY< z*0_DDjRfwy#qyt!Z>cR0 zTs6R$b68+gZEwsf<4-Jqp~WhzLduBd2QEK% zeOz>q71Ru|&I&QEXATy6y;Jvn=#>xeh4t1i5BClR(FPw+EW_$3J6b35p`9km*t#8J z3Uo=Q7BzV?MiL4BmwDB<5+qY-qy!zz$*d88sr4=tJWnsXsmLSnRZ%T8=~|oS8qE-7Dl+I_oLrJ zgAth*u1NYPy{pq-Ii?{|_j)v|kJEMOu2BbEe`jhfpILJrjJj|oXHk;}bO|zX$(=+t z(83Z@*4ROsrvlJevYg1yAAJ6zZu&mf$V6qwNJ;q@al(q|FP~@BC7Nh~RW3)BlP8#8 zzz-gY97MlO3Z=G3Ov4=d<0i;0FzYUa3)TvFA6~W)byDS~_+hN(S9E9^WIku8Lp*>J znaoi7{e#Y>%+)iIfE<0*`CtTSnq-ZbJXMG59O0xgWjaTq)fcpIf)$_0aPD;H;|Sp)YoG$3YXgg$_{-*(=TG?{ypX()7UHTn8(*GTFAvBG z^dqyiLrmVVFOWSNX{71;lPC}z(m!+bC+j%we8yhYZjENnw?R6YoJxc^1$5NO26^O^ z_=To~MofI}i$v9MmSW&}g6{%L_`^?HuSaPR?Qtu9=Am#}@(064Py%=RnI7QZix#CIPwTzv+3 z9n)5%RkC@b+?<`MbCjrGj&58ZS0_3}-|MJfqR^A~UCS0v;z_Xs{`+*6+v=>?(rq7C zMr&l$5szlrfYzgj&iduJ|m~XiI2kbN*f|KC*!bN}IY?iXPinZ2F|k ze>6V3rD)OJeZQGS?7B>?*F=u>g_Jhy@Z_NScBbTkOSZXa-E9`~nb*4;5nq^c5BX|m zyS>M<>^J6BA8ux8b2(moy(sVYSyOpZqQ1$oLrtux+cOv_Zfvbo_&U43`N7&*%hsp_ z@b6rf=?{IHD&v6`o%4ycA!uPv`l!!zVEYHz>Z-goAN1yInuI~y@qT@&M5&IJN2S+f z#2mA+i%CDuAR0N66=`dCslHfeKXbkR&!0B(5{^x#J|o?y(3X5;qGtQ9>iA$@f1_n= zG6tdf?v*rV;6Xe+=b>yav^7M4~8v{*!h)xPG)1p3H+#2%kltVRGg?j3EFkxv$uB>BR zJ+R4f+&Ibww&xOcv+C`#X>WMxME8E!Hzm6nfV|L2Zr-%XcEYN2zo9NhHjp}OGjVbq|GVXYYgC=W2yh1F;W{lzs%?M5XWb{I7JZmIOLx67KsSYLJvxyqT7H zKJFTOAI8XuwCmzS)^qltRaTr}YG@io{;Wiq>Y{4W)b5obpTSLm!fh_ScL+;rF@B3B8u*-?c#S-Jeqn^l)Y^?LY zo}B|859?y>3;n2Y^K#4Ck(RB^s_VYmUhg zg)O9la=I1k_x$g*ZBfiwmhA7odTI%T1l+X|v6;20d_s6+5qv2rd)i~op4{qv=X7tZb;U9vhPpd854m>R$Hb8>o3^F9n*AaXsCIFcy^sl<@yTn0-v`Vh zU=zEQ;^-<)oZGbp?~H}#;f_S+$fZ=X@y50f<7eKC(z@ootk4XH$E0YWN1m?gbcwZL zQeBaR#Eu*Dt`F8k?Y|jmyKUPdlu%HrddS*KWJnS8__~P`j>Rcr8Q*yidr7C^UoGH zuUu0if8$lj`~0&iEIznYx8Vqpz9lJn6u0Xwxio0>8;fK|`><;tk@yWb7YB1&f5l@G z>lEK3TgdveSPF9GUX1@~Yc+NOl%HsCrk0tUwTUeewd7U#GM1@Rc3kE;Ti<_lI<%5@>t8IRqLCOOF0Y~%EvLeNUh*h^MdSDP#rYY)>(Y*dJnhnOZS5pGwKM-# z^4*r26vh>o@Qlplf1|BF1ES_jrjne7@K zQe9&)T_RU8HmjNX?Zt4eB<7<6d+XpT{sC_J8=k*Z|svi%mt4lSZ1zUJ2Fnd4T! zE_F6>h7C#{q;79%nOxLh;&mqR2A$ZwiezB?#9Y4am0PnGmkvO&v_LX?ec-~#mZ#J1 zv|o!C9l=iYd04p*ZbAZ^o&aOZS__*6pn3L}>LvE_d!*yyadkhE&o&lU0wlZM^VBvL zdr0%_{*m4>Pt{va#7}eN2a~f^I#p)f(r?0 zp(!7nzJ{H!{eT;#;)JB;rNMQJWEYEGRGF15xh@)yQ_(?PqDsFi2TM@dd9F_LEYHOY zE&ldqf7|*4$XHE+)Q$sgOm;Q{^8kzc3`kgYDjIu$oC-?DLj9C($PsCRMOzKCF%`iM zCyQDTEgK(m+G}%5?rp}Ex~;x?*By=oaQINnE*Lu>XgA@JRVH!CG1biKdic9OO}nZv z(>b6u{EyL*y93pH*TZk>I$OyEKh_QieP>!+nJD{Tm#NZuhpy?4O2yy)D9Lu7Ddp`* z&%ustWopMYJ{n0GOJnPEcy68CBIjP&aB{&T(Mrve=|Nc?0}D$AS^I6lDOeNpP&1x! zCM58JFGp#g)>ysyJ+WFa1+i7lHOWW_iMB^&$=Ar#a1ZN><_h-DBl-w~)L~ft_4|rO z>Tcrte8!=P%XjZ9&Ycg*9~fBRJ5CI3wfY6@u8fI??Wz|xZV^$7ok~Wo!k6+nds@<) zfPUtWKjO5!Ol=kB@bl?r9QzW}5hcShbcCGxlv}404kRdks;Rhb>{d9xfiyRKIh)%&QVxu41n* z?KIi#E;7#}-XcETv|6g*R{q7esk#7}R=tSp5_5FYWHXdv1YS5D{NRVS7Wd7y z37}yf{Kecd)`$zAH%h!T)X6;FyB_1Eteya#2bNTA&1usRyJ|z;7+x8}BOW5RhkEV1 zw)t9zJ6W^3Q#{9dq7Y-Q#a+0eUjO!+u;bn<0;bU1ezoh^~$g{!pWYdWjhh6p_F;VhtlV5UI`M);|{8&x)65CMRnnL6hT;40V>0F{PXO;-$uE9=J zwQGSxdf-a;#J5HrX4rRsHDh}W9;^yzc*tjgUl%Icg=<68N(dN;yd`L&nLLAkEawx0 z7~^;kv|tq#hf2?u5$?C=-t;OJU0dFqgxb>HV!eN}?5%Z{o*=|YRSQN+(8!%zqt5=h zdT$daocG1`T)<6Nmt)1))?8KB&GC;aKl&jcN0=Q+O2gvwQ`hNX3Wx@_MKAJ0twhbE z;lvi1w7t!(;gC(9g$Q(xUuC6n(w1b2|Ag(R>ll}0={F_azmgdBMdDYpkohpG>lyVK zk&f&Uw+8%sVD3P4FrQT(_ouiNouj%?CzdRjRssrbG%fZ^j8#-A`_WpeQlGd3NybO} z6c>4{+hkf7tB1EwYVsftl6MVgk;CzyxQaN(NrvWIYImY_=}mVIbIG->*{BOfy>`J_ znMrOQBF6>?A+5skj9OK@GC9*U6Z7O2@Iy-}`^%X1v9XoISc9#7BZacW?G_C{ptnZv zR;X=UgIx{Nws%K<%G%LXxXJZFf!NWjyRELbeu z$`JV3+272IZE~A?rmiuKm1;3hx$$K&B|E{t!U~-`0s8|Yp7c5F5$i{XX1m@5 z!<$Vzy=!|HPI1E|<>JtD*cn>S_)UE#YgJ$7XhUR~i#Cs4gTTjD@U;CVjJl(OfnqbOG;wAY=Qoci%agX4Dv*f%=~{)2YPz^Yhl2Kg#r2V*qo zvE@sj8`WEpRz4KM@zmr6gQKTMUSnr-&zA)vq%JN<;Rg?6U>$Jxqv z<|88$+73u=#tJeT#lM?2dUZ}7%*99^hwv6-+fnuCU2$`4kp-WbmmSu#C7)lkcF4d7 zoJ?$|dsVRhjEHkP09o1mU?v>6XV5lwEijzK6HAhKz`)9?_<1?lYC8@+sX+4>4bn{_ zrbp&FX;$)Qz=R;8g-ixnUXDA;{2OYYCTz-b8+c_s|;b;~+RN^)m4`cIw zW0#ec6Ag4<6qa3a1Px|v3EmwE#txblPRe@t-d;Wl@NHR1U(hKmwnOdf5kU`fp)O}8 zbev_CkPhArcWzI6cLBz$ga-|w_Hs`q7X=21Mily0ov;ai2H+ZSh03pwie=AhFudZOJ+CHJ9QYzF_XZ)L&QF#$lSR* zEa7{H3MB`rrrZ%aoMwb^W}w~@VXiZjn}gH=7oj;E2n#IqOOv?Z53BPQ1XTIN#mw;; zlRUZMi7}c^<`oerk{(4WF*Ki@pc0&mX_@~i_;sj^vJqQFT%#V2*Uw`?iHDaN3hr0x zz_>%GWu)Mz$%3!X_&r<@}$QvMzt#?HzSxb4hSv^OVk}Gy?vHjhTL# z$?VWv7))HfVyrKVgg2*}>_vO-Nh;w63oq4fcyHr0&=FFOM+Q>5rBVbTpk zYS3t9uq?5>P>>Mb*pif6x}lUivICQjK=GW{%tOdUE%T6%8j1KuDc!^I zlSom7hK9ZDOllI+1(8M?HX$JQ4S0~1q_wUtp_roz1kyQB1;;%is4f<-6hVZ6LySr_ zKN+Rb=ug|E;ADJk#RZ5^Y z-*uL*j%F;@7(zr=$*Mv_*rt$4h)5?dwas6u;}eYKSd=kB@r^YWYv&FKKf*~ew+oz^ zr0&#TgV$Qgx!`I++A*T#y~(&euzqQ8G*tK}&8bSw+TXJAlm!ZKWJ=kBfng?(;Nwy@kR~FHW7k+r zBq-g$Fd{G1vb2019~O^Q7WP9liz%>#iG(@823mzvFz1Mr-Y_ZA6 zqULRH++?mSkR*y1ERjMiYY5jf>#wiwp4OAGMFvQEBZao)#38RMkmjhM#{0(pkde&!I7zgA1*;EFz3Gi zg__%I3CMcQJL9M!tG@Pr?2h>%cVR<==VUM2YC;1xpBNtcg_~MQx$VUg zmn-GL$pKg;UNf$mnkU~+>KRMby2l&l%fI0V>))5NVAzGRfZMRDi{jD6CC++`Jpql{ zOgIm+fre2f%P@&UU)iRSYWKVbl)rzy-jjqlDCN@iYN{7W?>DheHiDQJY%H~+(PVNo z5)P{$`Fu0NJeHd5G&w7m#=pC?DGcE@ELQd7cqmg#@bl5P{T3XG>=yBq$s!NXuHbkc zP%wTP+9;|vii$8f$&&y?3IP#|k~WoW0;rVVfAUnM-PzHnW-M|hqGfDRD1hZr6{c2l z!{4?vYJsWoHIryCW|2r3(3@eZpj>{fwxB{Yegi#_Zn4+8?EeO?p_q-ybrKX*+;bKi zh9pN#CK|=MtT-wvQyAz3JyE|DIgleu&!Jxpn{Dn$fX0 zb#V0|V*%=SeYF+v_j34z-+1(7M60bj!@$rZ;y(QQE%eyiTzgJb?id58X|YiY3FoEM zRkp<)3(<3{0=Vm@*t0+GRbthjQQpR=?hft5$}-5LfG%IMi;UQJ2enz91J@yyHaTPa z-{*WrQL(l!$`L)s+<#W6wAOn`mXwlEMbiNMozGHGIx!RiF%` z9or-~7J4k8!iG;^hYw0gHE=lsK{a)CHMNSOVQ5$HMpA1cRSjBY4iFzICBC2+Tc8=k zrrbt0*(^J7vMPjm#?saGWX4|L`S$H)@s=TbA(b*NzU579tb=4g>lqx687v9eNJkyj zh7X~KU5J7R?Ct2{>FeyRO`TY-;f`ZtuRO@+!c3*AV&_<5?ZdCmH^%P%9B9p`?b>hr zS3W`P1r&`mZmy{J>?>U-wG~Anb+{UkxDFq1#;9k-0s1&^k<8C0>lDM%5^&ibXd7L$ z#i|3;a+&c^1(v&hRc>_^2sxca^Vds>D#~L|I)y;(i`)0dT{Med^Y&b$W znJjHvo2ayv9M~wMs3}CaU60EJQ&aj2rJTBsTdM^FCX6V}n(pR}L1*!R4e6ZhP=DRVKgE)hJd>8@%sfp7Hj zQ?X^f93hB-5rrMMXxSaG+QZ*N0+zo#cUKB}d4tTskW4I^Qy!`V701VDA9K(mEe6qs zUYYFBMS8eEwbEexo@SptV|2GTqJ(16kcNg*3vGm2bPXvi$RiI%Cy@rbn%UWj!=L29 zgOQyMn!lCw6Jr$Ku5infm+l~7eYN4Z(erg*pQkh4(fA0LfUA=YPbMfSJzut)f4iE? z=Eg#?q4LIeajwiidU1Vm>b)q7;SZLIXaGaPdmrg%P77}({W=BtdDhPI13E9VR!mK$ zJ%5P%Uty$gc9!iAQ}((yn0$a|dXAV>NOLccr=7_Q#U7of6{h2M=%3Klh6~1 zOAa)9kB1^t+iw+v5Rw#r-hkAZuX;@EDpbbwInKKvb12xTlJV@e0i?B%04Z}V9P}SfuqrZ+ID8@tCv@tPUas7?ry@k~?EU?7HnnRx zXnFACNB@FcbX6usaIC}8jTe{6iiQ#S6*7gyH5t*T{VDoMwf;IW_VgM;vn18==7NpU zzH4K6>wb~e`At)-WJ$LFJyH8NdjT8~kL~O^Fsiy&M1CSnjoX-%Uz8C;8dQ8HBy>%^ zV)iQw1Sm)PY&^mJ(IW^`9q~$qcY|qdvQuteJz)G zxZNvk-AHTwn_4=^O>?YJ1C5s7&dS4FTitLY<*8vxB|J<02#(_Foc0R$MFyT>E?Y3k zB(BFbQHfMNA|(#21uAe4UL!py08!E%@?6wEfK7f)_Dn|1IO-Usa3{tM@G6t?a*N0d zs}Ua#Xn*F>2cK0c4`rGWC4ls@7_wPnV>`kq*y6@p18#Txd2NJUtd8blsXaDa&W0T- z7CL65WkuHL%Rs4&lru>l2(1vFXhS&#E4Pue+eR!9z?#^U2Rx)6M`R zxwF&kyDJUBi(pY+b((p#B*YjaB@sA*Q!9oEcQE?1u!v9b>jm%CxLMfApVRf-NtaXM ztK;m`gVSA#Ga=6UD#n6=+60eH`e+odT2exdkY?IK7FdqM-cF}o!AzC`+2O%2`_ZoH zb~DAE4yr{7*3rp>(TJ*{n~foypCiMA^{}NBk5KU{j*2e4I>HBV0@>7-e508PYgj!odXkZR1~%(C}fnGCEl_#fO_rH7e6j za#_04;b9zrLtFah&t)eEd~>L?a_%&~Jb$YuvUMr@sNcck6M7s`(wvIxpRRY~rzR>> zAlWy+hgW_9*-EWlz8_nY3o5I#VvTVv=eNfTM=a~xe^2JR{vGe=6%0qh6P~G&;3p87 z(81#y)V`SX>Gw2_-XVauiigj#gn1V;=lk56xw|0?W7Av((k)b@2pq;Irn{i`&#@1S=&K!W@CM zmfAyKBRKTiN?}Q20;}qr`6DrU>vs2TUUz+xqX`2SQyFojgHovXG9;a(%M-aE)tI z(xKo%_r~yCY}effmSU&-YCyoP=N`K(|0KbbR^<<5Xmp$Sw%q9ARqOS5u<`vQ2XdRT6WfgQZsxZ z6hr`_av1!D)dTOB54@%#G_(Hh*3ser{y84PwV))&Q83%|@Q{@8-XAY6B(YL<*-vd^ zOauk9vk&WoBU!B^wpv#H_XR-VGoAN31Y(|>s1{@u9l z5BI;F>bm{5^?W)xOTEP*Dn_^zwXHUG!ej*g!PF~I!7Fx6{-<1z+V2BgEnNv-D?1(U z{UN4!N#+G)@Y7$<3U1@JM!I)>VS@*o#(AP~>wIeZ0;-7?a0-<=*@7J?Rt2w*SGeMi z3J-$MQKVTqshaaiH(L9BgM`}CCT-)e&9n4^4fylC3>vWH#>>FpA{p;ixd!WIjkP|o zVMM)h!Q4Ck%N!@yu_`(a=y7!k`+E(;g-82%NZ^HCzO9#0uGd()NT{=TNzu3Pbfp3d zib}S4h}!4DmKD<#b>0+NFn&nf)%nTgj$Zyxp8((XY>6DOYfv0^?zCx4P*E+&Np5-$ z9=t$0u+ekC$Q$vz=EA6|>WRK~Kjt263Dhj=@0BhMo_CJ!HJNs1t5ygGTw=1b=N!`e zY88;iMMXukl{=~piJvOCiP9E0XNBBmZvw*!+trV@6-wQImq2z$hAiOg z=80K-E|l~UiTNqS>EAhrH9gpjp3>67;ns#ckvluws$;kTNX{K-h$k`O>6}srm2U+Q zwqu(hyGzbnu`tPe^7r~#Bzi)Ge@KmX%3g&up`17NXiH!&A$H;5bU1gYMz?j-+0g!D z%z$l5=DnCh`6Z+In8-H*@g98PGUj52!*I#{2#LdI0VK^m_Zm zXl@}k7W#k>C@&kF;Fv8X%|BFp52pg&OMqzfZPk?he6KEtJ4yo=d33wcp_;ONlAs2k z7t0b1rcPDGoHGZfbH&dM)L={5k<;DZwl2^2s+@F7inH%Fs!X$dfyv;KQoY>N?~{&Y zQAn+%Id-C*irP1BQ%KSy8cd(gEa%JFLkpJ1FHE>-FndVzmCrAMp@QibO^fheznfOBw1HVLh`6xq<#`F`hJmzB#Y1Wuj6BXwd8vL ztB+k?rQ19)TkmKLF;(*{Rd<)y*CFdu*|)I)%`20Sylt^H_8+I>#6aSapy`}cNu!?+}R-*ToN3Dq)r z09Fn1;TBW~v>?>mAq1W^-(+t?+r(oMobI&O|{ZyPgVY_fp>cZuf8-RrIEiuH~a zX{kmM3M7mQi0tyc^fZjj4!I0kE&n%Y0zaqT=Cx zPSrJEE$w*Xd~u_wjY<59nu#tt<B3>I?K%dS2P(o|OD- zcnJv}kd#LvfN`OqH6T2RJ8l2e_u550m>$X<7mURS()CYa7KHaN`mZ5zA^>6l&GhjU zrs`EIA4t*7*w9SbF;1Jd7E?RczdniGTd&C%I4c$$YtpHkL`Zim8vgLusoC0@uW*GVdhxr;Kuf^aJ-bIGNv0FX^Ga_VXme+oFf(^K(WsQ4lUy; zn72(by8Zb=T6O0m4YDZsrd61N{?ll2D>bLz9Y#5bp<#w0^iV3=e8ZoT@*-z4fVof; zp9sLruK+EZ?)O6&Xc^BFaUZy=jqJ}IQ3E+qxBs=U7yOly3Vv!^{QQF~y-vz>MUN}& z=%qG;6K;=|1#-H6Bn@un4u^4n6nq}aT5#|Zo<|CQGRc^b?wUK`M?(pvA8~1vL03~z z1%)^GnK(H)=~Bs0ph#sXsa$iMvZa>ls!q??NeEe$0HsN;i(US0EZU)ky>tEYJQ2ap zZ#f~m5HUZ8;?34kl8Vaf;^K8B=91hj=`Q!sRTpom(UrUPe(%zlv4G1xmQGC-ssl)2 zpz+2sX|7U>`Q?8$^O8h!ew7b3w0Hd^ptcU}<;sTm>_l{kbIOki==L6yMy5=&1E9)A z@Ut4SX_K!b1AACjqbUMQ&gi|o)bG61dWZI$@FK;Dt|ohw<16MkY@R}y4kk@pCl39< zvfYR$@7LZsrUppl=6k6Oc?9ClNKSAKnh&UpD8>CHKuS+_d(EdjoT(}3&Nwk zm~G-{huU#n>GZpr{z~^mYIN`5sH;7u%xUHJGTYxoqdt~9DV)xb*Q&`U60)27CgCH< z`Kb2d5;^Vn#z8YG>8FYaTwb-baV;g+Ap~5SnFUepmwfR)FWrE(3JDHzjsDHyps)N` zA62<-@cLkNz%h+90zdcRjV^pDeQ~HT>Elj`HoVYtWZFPJ9{ziMy<0P0WbgQPoe@~( z^>@cQp6ahnHOA*4y}rQ*i}aB{D3MbvS~Rq@+mDWUC#rILJp3gM1aD!p3x_S%(to5w z6u7Vl@g#HgnS84mK8#P&WCY~$*cZr1)P&fHEX-Xm^fyGaaTDPKx#bA4*O=#bGQ=F4Q&db)o7b!=A4OM^ zY94J7DyB3ZnDYWPNmv}ckxWqxhTtUKMitT58DJ6@ z5=1E8>38j3mV%LM&%XU&8p)rb*$VtAGLiOS)cfw>?2JFD_x@@AoN!=xH+RoAI&06J zFEe6Ko9_PJZjSfgr^c&djecYI=|jyTu3XQV){+yZZ-1PPh?>7KoqJTaP1nYMJpC)_ z^`vZgcmfG5q`&0CMA{er5@!lZ(*MN`)MEH3d%$TtBe~WdO?Kk$BnaXKHHUSki_aaR z;&{cRT4l@cbWn&tSi>f9P=VHK_(;8@RkTWcyoJ0nuhRs2M)_dg9Cfh1Iz`}cPcnrzYu==#mRzh% zS_%=Qhvw^eZK$l$(8h+UmGo%;`c1D46ad8AiO)3nU>R!U=rG#QneKujJA}`8+kP`GxZqR1)-$6T2<)`m$TEq80+gVYq=VuJsVh3od z8E!}iokDq8VxNFr6$7lrrt9Y9j+*efUV??btDgLCKkXOUl0`ol8ABZdzp!HQ;;vVS zo)lq!L=k7d*SygPx#;XJp!rx&M*c$Ns~#R6J7C77=$GtwDN==C?sp(I0wNi`I;ICX zpTg743Dh18%*;udjy}bQi2?wG_@Pi4nc)NU)cqYQc2=0o=Rh-&obOYo--aNx<`>7u zZ(A~6G5LxeR0f$@ML8e#ziXWGpC7}VlJQJW31&Nf*F&|<$`A>srY^hsb^ZHyDu6goF;CYE)_-3b)Xi|%w~@}?MtVR>A-*6*&gVh&UuxaOvZQCqx;`K`;#+eO zcO=r(y)CwHk1%YQM(0=*BP8U&|H{ENV?m$G3wuEs zC{irjjW)L_ChARf`+}MB2C>lt!tsa90*Ubo7UUZJ_8vg-N$7)RSOI{#MgNad;UNBI zOmW+K=fj43hIMp|-=@oIc*t^k?7tj3rT8@OL%YxG>5o49Y1d&${rtB+V~gq-Ot$nn zIY;@cu76iMs|t}8M94T&ikul(kL7bLNuV3yqN3c^K{c5kmS5h`(NLG0n&-%j-NVQ7 zh&WzeSFrlQwH}k|nMi+SRELp*ZX6&FT0LP;HcqD1mSgPPz;sZUqWrSA2m!5s-0ALe)}&_di3M!ZVLC$xi2lbS>=WUt!Rve&GUmP0MrvX@?15k zfDkBsjVz<&#KIfn(J4d^H!1!7)j&2vEo>XEa?UZ3gP8(AfKM1m(b{t7-sbve()D-o zwVXrPFIVpyzXkashL5eCi`V}=aXR<)XgRa!Gpq9ce!1+Ee;eUY?;jSpxsj?WL1}>( zaXqcqCm6w3uN0GVT7&@d)L%`uqfCmrb<_$UdOsVVZr|qqT99sxQ*%(^-5;mIKp0sf1gaCvh8&Bwk4+0#&+xDY9-z z-?PNf5O9Xaff}eq>%P} zL-nHtAreQoX9}iG@3uDu-p#Hv>PO`YsixL$I;*kbG$*CVUIW^Gaf`myW-k@8^LWC` zo~XuJLi=~c(a}kKC_HXQwj!lr>;p4dauh9|(2wTu_()@=cM@Ius}@viJKhXsKOV@t zrm2SH5D8htGMnqt1(S+@QqzKc8VR1!FPXZSW6(xf zsA~!|sd5#Ve_|zkku#Y3>@fiP@Oy@zWM-v<2wQiJ`JRwjH;{}1;`n4?XV*#wBpVI@ zKohIu#1{ivBxUrA##=7k-2neiJSDrbt$P58|DF8jyB^!rRQm1Q(>4+8BDO5~X3l$k zPVf^wkkWcnTo4n>vSRuaFxA^C6>xj9bN>1}clB$USt)aF=TuA3d--Tm2Hkfien!== z(TM9>2NE;JZ_fhLPbME(J$Rb-?zJ*zO1s=lbqFYev7(ETNsq&WN%Hm9{cx>%ku1Qt zQ;1haXW&TWO3xL@d3$l}OcpI0Z)mM=i?aa<)i!yhGjeVw+R2l2hz?gH1>8#qx=I^(OBW(FZbde)E>%QJ5$YGSC6 z8T|tk#eht7+n9zTa`WfUFbywW=gZ^M+H64un-=le*`(bxl&zV*6TXR!YqGXd#4KKbIQS1Xw28<}Upd+DkeXJGgzhwa&B7MV?WEKh% zia%OuK?G#QzfFG752NzCyQ*UQMJSW4p5~{7P-p33iG?f(`V>M-->c+sO=K+$Lu`eX z)(9eJ7UTqhl+8woN%peVuT;e*=j z3wpq}z$5|yz;*y%^VOK?lSsbt0oM!L@FQvecBci=?!eqn;^p?HD;G8bT92!B=sd5j9~4xwhZ`!wu=Bqq%Wd^iHwH+x2*R+C1dkK4c%a?od0Q`JBr z^R;~M6~D+ls+7rydQ_qJt1Gr&B02(Cu``_q%N%(aG4w_~jw^>Em*){#c`pkK%S!@@ z_B&4|m7h7o1W`tfMW2*|++8T*-fD7b+T^7pQuo&lN>(q|`(tEM?nx&UIL?}ihW3~l zK-px3tR@IsnW&5Ax~1B;qABgcuw|D>Y`Q5 zySG@mF@(ZLmvDL*apGg#yD+_s;gIT4_j#3v?sVYF2YowzFe^4U2>tg0YF#~B3SxkV z;N!2bz(%DOv$~-8!LF){rhEfPnDA3<17u#ZzsHi6Cx{APawI!<dR_@^j{Ww%RX=e_liQ-_!lToqORb04X>Uu5J`Xx15g?#r2D zw&=Ypx^B}xb{_e8Vo+3bG%o&`jedsca|AxVg#jSi@A|{a&$y-a|BCzL$6HK1>L|?U zqW#)vAJjnTo~2Or019lzo!Rn(IJWf}=cW9+z+~O@wI6d;9s`-G<%KE;QK}Q>bh3mG zls(JGIb^1jS}uy@av90eOPGn*2D6TeW3$N2D57LZmAi#tn$R42l5inX;?HD(+?ulZ zde6Gv2oQx>xu5!eB9h7+vpxE1anwO8C3U{!(}dTnoDjzt*7jv`q$>6mLC&oDulnLVz4md!f$nYm zBGKe@zz`xNr}d+>=jNKG&{X8tXMa$UIwkZ;YN4Oc#ilFcYN#e<6GYzWyDgPIK-6*B z{?|18{!xvRaIfqipH@YOKYh4+HpB3T^s z+rfGD>uc7%u_JfHA@*iaE6 zEq%rZ-^STU)Mv8n1;gNGSUH#@;qjk09oe4LLi8}QNY#e`f;jG{Z|`jlkjVOgi6A6c zi3Af?6`KzL0iHa$V3+&(s?o=3&Qsh;!pA4u&8r(=#q^MM?yNfIOO?ijBKZW7y7BX> zt){sZ(+iT|Pf3F>sr5Zh{}xLxt(2GL!in~AVKdP*KFHD!^@Dz0m3yolVdVBheD*P$ zAu4GcWFBlL)|`~)oDlh!y@R+i67i(+7#kO>*ANnVvu3*7h-sj?4t`QJaR$X2zTsoW ze?H9FDMUJaFQ4axCN96IB_$+=;aN-|TYsYS2A8G%4>tx2_WnCaBpkc(omsWoK^c5n z;Gupvb7mmUL@Ug?6(`FjY7r7{KP!ci&#ICZ2!u}9O@8A^WW`TKSilki;2pSd;Ixe3 zV|TV-Xs<=;1ljGJ>{7d7q610l`cTt*|bKB zSb0j|c4cqmCiim%;8)Rs6~{U4t_)=~4j`DfkN?}Qh<TvlB!J7uo z4T<0eDcGb^PODCH5c~wn6}+K|%jDQ^ZaUp7r5;cP;O7M@0J-rZSjm`(%wzzpeZ2r; z89>38vDhY5(A$Kw!?$8Ei^-D=jQ82?u`BM}fzcf#ME2_^UO`RetL)Byttg!moch?o zpWEx$*qE;_UR~rx7pIF&gCdm#4T*9YqDK^>Uu>c@ZY3o z6j0X@jXQ|9JN;sT)MhuQevccs{8yWm zL#dzDEwmkOIGpx)AgrNEv%i?gS-=1ON%;=0a`k6S1=zz1DEf(ycr5l_A)%HAXOeq?IwUR1cbI<5EE?iQh^6s78C#aQ~?Zp^i@MT;039xYsM z^s`_b6<%dOJ4FBK%pThsmZCO)b9;rU^6F>YwKS!0C{+E7XOr_@FiUEqS%6GLoG^HR z=-YJ`G31e$sll#GU8My;bCzRusK3S7KnAZAl|zv~0%B*U$V}CdRfjI+)v(AGGCVSS zt*E6|z;vto%qcc3$zIp^A+F5ad_5_}gkIG~R-d-IkjEnxq?$E)^gK-IX_zSs2EY29 zr?9Ib!6C2TYcum#&3tQopYxrJeP(0yGlKwalhb$$m$6Umb?EqrRX{~O`l2!gR2(I= zau_3SxJ9g9hv|jrClMR&T=M}k)C;zs4RsKE#?z}~&rB;o2p}SfK`4;wvvTgXr)&s} zUM5hMgRq-h-!UuODK|(HsAE`sIR6n+2x?Hn%v$6R0bs~U_BwXz&mtNRb*FV=L zER5~?wdvN*wNtVEx4tkzjWC4e*ulSJ7jX{)Mz4TdOw|Ri?z>DR{>S-MzAsM@) zGja=T10hV^7Ij7A91|O^vV_STvSbRM|9UY$JK`oR8KgIUlv8#f47LUEn1z!j1x9Tu z4Q;mjcd1v;?|d0su~QCLfA@~f3fq*qCqCJ#de%yALk`nV!V$H;#-mbib}7#|3dk_n z2XM2@8F1hz82+%f8Y~x5O@kB2YD(m&clP$?wE}+t1l&U{Jaf}=Jg@9P`yBTV>F?OB zDifEtMoerIY6W5;dqVX8y#V|t4WHt;OGAhFN6@NjqPo0Y&mUZ)usrp96Is@yNzA0pPjWI zvF=@By8Qg~3TodZ*jZW+xqBp{s7xEhE9E^Q{|B!?P`|n`2Vz-U+1K~WwI`Ovmp<58 z@3YGDP#Omm#9jUIe?Dd`S_lbc377J6c)V>DX!Pg`ny@AfMNL2fkwE)sixxN)0AXPM zeI0kVRWw_Wv!QA+dv4-7W*Wkp0x(*w9*NXQAxie^%h_m7HVU~ZO9Bu*9NX=#JVi=Rm*s;v6^jyZ+&;LAGtpp^OPq#p!Hj$7~^fHYtJZgsWXsM6a zp=zt9k&o}!JyryuWwod@iKtN%>Vgq88nLhMAE%iFASzNyvI)})h=Ar;xNV()NC<)$ zgb4-$0f89LPltmD$wbQY_~{N>wQp}9hr7}H>u)dD%hnA-?l=gDKmizrsX)u~FJC@s zCDzvTxi2q&{JhQpb@R)ww`++o?0Nq9@%n!0kmLEUfBXEP1)$&&#i2247QnXTv`;)f zSaSqU0jZ5qW6ouAUS8kyP#+)Z&RWM}cF%FRy}2LFLUQ_Fmn58;2(4XgMj7juuM`nh z*hI#8bgCJJNifiJ2^7=p;#9~ez+77VyO!tULNAk1>WW?pM7Ewwjj&TSI$3F#!(kX~ zVDw^a{_#GS?nZO@^?9ukOh7^#v-Y`o-vQm|lFdtvTZ#eTA`oV51GJYo+!i<244=Ng z1TX=zZ@;YtmI<*VQkv$x%-r9=@KOe(E@>i3Wq$mA+8sg-1#}cDqyaXg1PNgqUY_m_UR9RM2qk8Xl5CA}wkR*s2TA%;rSM6EGn3dD^@#BxTZtNP*fBkYZh6HY-x8B}< zeE+yiM91g9{q^(V2zrq8XmAD|+b}9>rggc_+L#fi5&3!k2jmdXoA4JOt?C=eEa3VkQPnL4BUhEXr>2SjWH?AY9$;y zdNc?n4F9g>;dZHnVnTNlg(bcuOGzf7C=_C$hi$ZwwP?=%@#dz{@ZrmswaSc4ayBMG zXWwETM*1q&VJ!Fi5H+X)_MNIqr_ zSc4plI5L&0fKc7st*giAas^6y+}|IIqhJ5gbwBpY_3iELIt|ugsmpb`kszUEAp%s# z+naj&^~?EXzgt(r+v)A)$7Rb5Z@>Qa`KDH-(3;q`%k{@UU$;Ht?dN~{^TS~*tQ{j+ zJK#1l$6T7gT;{~~aeKE!@?=mZ2*YaczrFBy`1skk^80`gW%jLAcOSVnpFAwgNU7qKAymfCC6g2v_s((*4uI2t5$=7A>sm z-r72hQQf3=ugN61?5RB|VXVQ-{`xxMfEx~9zlyD`R`Gn0pbZAv_h#fd?Y zv@Lj+M$_7}9L5zI^!DS2C%~wS>$m&S+|I=q@~qZO_ijFaI%e_G5X?SI04{Xq>HDi2 zgw3+63nAGIP(}dE;lyks@cHz9nh6r2#-bhzTcaDSGMfppRLL z|ET+QK}`0Nec6R-^g6HK|HnJ;>Qqnp?UK23+>D1yfBjn7*TJ#MOuzkj>m;q|Vz8}y znV=v3^1up;+i@o+4W;F=D`Y}M9d(5;BZEzbh7$1aRd;u5fpj(5`_5x2*lZGO#lys$ zLj{NQw1Pn$LdK`dk3A-O*7E7|ZJ~CN%B+1F@yhL*iE(4>vuYgBA$KiH;&To_AL#7T z^vw`jA}-%wrXkCqmM@RR_m|y3(1|(S-4pA#zuX^;3=mzW2PeBG`|{iCo=VsVpz1Cn zAwXDeDm~mth_)~1*N+*%1f(Hos0VFkFi1HZYxU)0vjc!&4MIavxye-2HEth{Dk!1| zYSx~I$2yE2=gZ6Wux@XsxA${)aXl=Hbh%!q+vy+xKvIX}aXCCZeZ6^mJ#{=vef{}y z*}HqL&;Q}i4~JzTD`wK(w(afr-`+1XTAu&qKYl$f;z%&|!r0)kliVxNF3djTW*yX| znrx0>u3MqcKmO;-mL*S*{m<*f1oiOizCGT)fAqt*M~u-(_Se%sJK{7OK&K``EWdn` zMy8YiP3?t!0Dy@^;6CEveIZ=7!7wCex%pp8w@=5=i$e?Btg34COzKeaj)hHh(E^Lz zElDaO&HeqyH8x|_*U!t!9_6r{w%P!)cT2}zdyg6`&0xb#SM%ZlvljM*u33(XdelC@ zzwA>?5b^NzDg5~P0U(BxczSl`;p^YN6eYGg!>EaB6nCHgc-hp|GP{~Yr3J{4fzUEx z7*lf3{r%;-B~}1v01}C?NkRssI;`dE>C_2;5Yh-mW?(`VEFPZjjzx@$AP{lm_FbT`ya2v$1~qkAJ;C za6pA+qdn)=m$!fZ{;~mBzy0U`aDTIS1fZm?(zHwfPIYa4-tTWzfl&sdqy+=>{Kx-% znO?p;^5tdQP5Jig?J0fU@by1DtO9IaPp{{>549|XJuFbmb$z@&$N?FY!wCyU3LcY_ z%MO+USSA36si~-|#eXMnpKj!83LV%UiyE67$HCfpT^Y<`NKs51I!qDk74FlIt9$Gn zj!)l~h3tivCGWwwGJ#ta0oP+xKq}BIYwNCR97o&YR=tirqvZYN_X!IK)W=Ux^5yr- zECbwkExm8k*~hnk`%;x9+d7xH0@x!6K7Rhbh1>@$Bq=MTYbqdj3Vgfxo`V+HIHBZ^-uT95L2vuG`i>9 z&fkCk$H#2({xARe*C(wA6R^DYq~%m#&Dv|boFCUw)DCd$N>jRG0($%3zrXKye|Cm#CZpzL6HJyF8H|gq z+8{b2%=E&kbdmoRx_vk-ri_TQC&r?#YkO3&JX<8nk_Uuv0tEqzIdS@V-Vsh?`SPU> z%jDQ93wj3uwIOp3X|IsQt|;V;xNTm*SqBFxIZlrp*4;mTYs6#}9Uec`JpXuWOWu3q zvTWDx#`4R5`lO`{SAbYVLNizY{O8_u$)TpA#t2++z$OZ|5)*LJ+uPe|cYrVeLyn~g zBY=|G9EXRsa{f3?1BhZ0K-E~L90Apnw@;6UDiOs11l(L6Z?z21`P;SbIe+}0@8@ti ze)@d8?tQ&n_t_Tr-2fS3Sy_jrkmJMC@#A75`}I1{eZ97CKK-X(@0SIl*d(xb?|r%c z@xQ*G+g!f<>%V?kM_9v(0&~|~&@~BicD8ffk1^=lVWO0oIWMT%-+%j~AOHMRAOGKv zQ+Mj{*T3@ppFgh4^RItCF3pL?^>yFoj)^hZ&C%3)GrTN^+hj4i5Gx4MDHCI-t3v?r za57go!x`rRkeKAZ3&&44fEbyzJ1#Ei>5L&Jn4~;eE2*hKf!CnRGWPSwTc_EXhfjZ6 zNK{hvdQGn#;!;FLE0n!Rz1W8~n%J*edS+D6SfrWW>R2ncmp`@{l1wz7KP`Cw?WIHQ z-gl3r#o+kqZ=Z2chSAHOkp$v~(fc1SA5Bw6pfF&`>;MR}9U7Ejwv37G{q}2Ky_PppYM)>iYBXR#5vsGk0I^vzrWPyF<<}j z{et5C*U!T_?dQv-?ZLBc1|c*m$J>?Y@$h^&ZLXTWZu@oHE?&R=>zBI&7Dzy9iW_{L zm)C#%^XDa9cYprBe)(LZ5AHFKdo|sl(5XN(**@^NXfWCoS2T7clPCsW1oAvzl z7Hp!#=035G30)I{UVBBX!5~7*>DnhbGZ&}_#-d!ZYaGXzFaO+=2{F;~^kvQK??3hc zyM09};5xWI|K<5mA&t_WrM+&7Iz3;0KRbb#AR-J2wbwqHhMFc#GeNNLr*L3Z2LYI&$;i{>vbQW|Lx1oBGv+TiEhsB zGd^Db_dnkEVEOc~|M~gO%K#cx$sAKrjP{C&x$oQUU6Duv{r?2PhoWW4btKk^y))0X z&sJ4iIFcY|hLXNu2vGX}Uyuew1JjM_GS6LmPVNY;Wi=Z$f4MGY&#z;@Z|hoKpHA0m zcYk$_0_-oYqXHnXAo|3t5qNIftQcS`gF)14@KowI!ik zlBhIb^M9h}?QTLqAp=pYhHwZ#Gtxz8Rff6(RL~exfe4IP#_MwlLV0?4H_-yYg(xy? zv#2l{bHfED!y*}2DiCG8>^+stWac)R$z^{k0;ZB1=5J6I~0Hrwg~q*U;zd^#_{cSDVs0=m_RTKxe64nJ{?mtguor&?G1bdZZW?MgF-uANE~A zhmjskvDgd%P?)TMGYTY)fMKv2V-SVq@N!O-IZyW=CeC&yVGuz{a39R-?#(g~O1Ud# z0jP#~TsvA1PUR5;Y@Orn%c*D-1B|=d8y&wqjpBUv7@{+$&AP|t+q;QGM_>sv2Hj=` zG+sWx2_PH_6j=$vVX6WSh?HT#gnT`}zMc&Ov|$h+fgl6`bhQ0#Zyv|DOBpa=7ytqU zXn-KsyNBBz0Yn;rdco!O?KXq+_4BXZ?dtIHWns&^Z$7Nkb$)v}450yt);&h~IuB>W zcDq@j=Jj@`Wk$>4aXu+-e{;W@G8%!X?vWV6a`oZq5P^}nT&{TayEQG7Pf+ArB zDz2*)#_{P@CKN`-{(e8s&tGQYyvSu3+1BfI+q`>RgKmKey0X%!N+_Q`9!8jEMTMgR zO+XYaGl&AR3a{qHa5V(fqXd$x-~h{Mg$O!E6!~X)y}g+Ntzb7hkg}bQN`WIo5eowmsBoCH0diz; zSm)1Y!6?W5hjmrTX@m)wqJ$RN5{|HGn3Z76DH}NuW(cfiq}uFY>puN*RM81n^7e6y zh);PLJG zJVe5PNE2Y7XaXP+)Anu?(DC)S$fRhCG@K65G|1@t$NegiAW{TvxFR+;Yje6DP6HS( zAD;`+?|%4Z2Wq^XzMK}czM9xJS;xzKzFt5;xp~+&p0Cq>mE)}FoM#a0_wTm7I}ub` z0im?`JU{>aDN!|cQ?Dn96F5EMxll&mUcOv&es}Hgu~H#s38@+*@8k+APO-+DIHu;Q>80y%9DyV z954(LBgj9)ZS&Blk!B@CkV4uU08WEtN^W1jz77Zg1cpHvL_mN|5RL@|VG|mcx2IEyMNkGo(*b}mX)yctejU*D z^)Q#hFl-Yvh@fc%*mv)5dnAoWk0hUIj=V7&bDQjXRB$8UF;L#LOor)#xX zZP<3ZaXlTcZ*xVc)Nb~-q0`9SX1biv`s}_`+kd#<^hhGDK3J5hYM$S|{PoLOm~Q^~ zhYvd^(oIZN)94bMgLLR}8ryBp!5FP72?z)ljUwjeu&L|Q-==SN8>}Gt(i>qq8b)1S z=F<@7gd;RMW&Hm)ED$3>YvR1{`4!1_+Rt_P5)FuCIrS1Q-AXiUa_VAeq~5 zZdZ|&R0QC(IEQw(tC#?vo-g5-pI>IQcJsscn<-;H9-dz>u0Bn<-tBq1oDPTU$gCP} zH}|`k->PkP>-jX;r_|zh_x^U(BB($sbys1=IL%Lg`}MgToA3YqyW1oK>ftC;P*9`M z1uyeu+O_Bf5TRufLNSLc&9KymeLB7UiiiDv-BRRmV+^K9&gbKEEs}r^CRYqL8S49W zpcR!SjKKmFrkgDzP{^VQlj4XdB1Tg+{s)zthn*4*3#(F85)KcM8Rc{=#A@kc2w|lI zQZUBkq3g?Q6>@>^KCCg+P{{&QfR!W#Diw0j0U8}?Ne(;E9WZEy944Y0mtRj~C=7tH zes>d>k3YZI==3l}#nh2fzyt}v1eyECyG=Bc4g&~77+l}%vQ?}3 z(}Df%m!}!gx8MJGH|3)F?djz@7_GJGZWrg%<#;?PT8(93b@Oh*>k0jCH!a6Y+ctX9 z>fy&}%`P%ope>9U4kMjyo(0br2JIQnKi(cxIqQgbDj zW*Q(yRN#7<&nHLUPwU3CsvDr`L0(=Chf$KdqNd#xK~U+o-2jo0D+@snhiGKM$fc1n1^a0$N>nXVYjT26)C4Ixl8)k74lftMGHyR? zSrss7y1^_e0gIiS-BVf7mxdUXkO3rmB}OKTVFNK%eV#wPTw#Q1=JbBoE}#E;96$$0 zj2MiZP}AdY-uFO4Z3(v`opVCP%j@SVVNl^Tl!X*Xh_pjc6lqgnE^#@2eY@HUF^Yh} zFags5!m--k?V4Dpr{iLqFd)((2#^54wt0WwCj`N)Hb6qw+~4gOQ~3PxsZG~kzFwWV z`tG+6(^NPgzr3F3)&kk~_kA2s&*y6~Igh2hxw-4-H^+9r=jDXHCu(T({rh!~Rz!d? z5L#-P=ku4JKAs2i_K*MaexFKph;%OGoGF-AA7eh&&8FwFDp05->U z*=%{g>1q^B!liVQi>S;L!KIXuk)*-Ofe59*9K^hwU(SXF-F&-s2|+qR)0Be+L=44L z$Y!^Sgm{)3CAZ#4+hkZVVs6oc*H16=aD?H|>itdB$G^V~M~ejH0r!l`$KSl)C}Si- zT3sVYE@8^!^V6s@LaaclfT$n>g_K!YVJiUAIp)L5*STO2HVu=c2{QzM(QfXx9q{qx zbt%I%V893v4u?tP=HcBo5+tBDh^DgCZf`nz`1JWnlYjkm5!?FV_us8LE_HZ19M5eF zC#Hv;&zG-<%Z!#|9*elT+b-vd_V}=>8GC$bM@`- z{_Xo+3sMtdM^O$}pHq}a9kki66bjNZ4Uwa%&#Ox>3Q=CFnxe$1M+l(+Tse?cr^96# z19qr6#g;1|Slji~ig3UL#2Ep#5lfQAN-mx%WFSP?PMdHg|D=d^zwb&olm)6n$%3VI zAOw$?Cwg*bg&mbfO-q7S)N+2hlx<`G{i?#nNlBQr7Q?Ik!?p#+DRIZTcy)m7K$pa1rB0oULBUq9ZgU`>^n zisq6F9jZ23X$&I52m@A zTH0pY?P-I`!N7Ipq5@0G=^`Bv2$)h@>1wvC0sN03rkmZ0L~wB~EHY>>Ik(>FS_m@~ z6H`z+mk6Vp*ha|9`T68wRPMgn!$vidL{OB{4ww{#93c`hv2ZC-QN*g5QEf_D80xJW zuwfds34j0rfF`oa5 z7RhYWI+xpbQf zg+&*_;BZ1Y!cojJ93iu;3{+|)|3pW-yImEjfKVeH8Bwa@ig4LT0Li4na9WL`Lm)Bg zba<^OjM#m%Y48Iv=(r07=D0*C1+=8_Cq)=&XqrkgXP%^YwaIj`f?cz!!WjY zx2yT}r!Q{PsNAo{`BIG5=i2?|`)!XXRIgmv6(~Td^V`>BK`B@uzyKBIk^&8f$pTtY zPI_KmpWbHMGyy=Q!=ypeMD+c;ZA0*SdU>4(Vc3L08l=M{)u#LRHyK0#fPxBH;_6{) ztrah)1M2DL=UF}1?|=91E;Y_iudm0^TWq4%?{_|Z{d82{wd?tKT}nzaTg#j8zTL#} z%Uf|oZ~fc%H`A2WOU(wjXrAl%)x;oN!bUEGSGm7K8(ic?Mh*)gVn9WT8S4!Hlq8e!UKbP1@^+$38#({EIeS z=gMY{9@%oto8SDnZ==VOY6_7u#3<p@%)unC8d1~~|TCIH6j6It0QH0RRjb2opKozuz}D08U$JKwP}O*>$a1_4)>$e)@8N z|KI@t03ZNKL_t&rbGrNO4|kKU%kj(WVR$v&ZRWV&#^vkBH*9WjIUml;TnvO;^v$>5 z+(*6s`ex>uef#bE+m(sN5fw{}Le0mofBoNw;kN(7fBSygj_N@IP#A-~P;hvT`8aNF zm=zA#N{{)}APALGNKG+EUv(x3P+`z=gs`kMj>l!bO5(y?MQ2u{!d|b}i8RU`as@^X zFk~f)F&Yb^6I6wPkh2W(Pt3~w-IN537^zZMt5hx#3~*E{p*JInO4CSKP9sJP&(EhB zENJunJ_CJe%F5!(1rSP(Hpr+zUX83q<5-Sx5Q(hbs!3ghQbfvcza9Y;o%ZzbxH-N3 z_2ax**L<;4(<%@-YW15RZkqxjgn}517*2b6`|@fT4g*Swbig5#Hqr(h5R^)V#`)#T zX#orZrfHBgL6JbTn|HejA@EVs7*;dhU#c3BQDUoUT5tsidUa)(HRo!xf7`_~_CBG43MMY9)DaO5>d*7dloZ(64mMpFz_ z!31nBZHZwn>Fhn>u_`x@K7gsDjOV{VGhou0(1RPKxL;wKE zzI%7unDG4i`g%4505Cw41P}^Dzj=SRO4^E$$q+bPaeKS*3UPXVLE!VxhbvlaKK$m} zebedq@_arOxA))2(su3i<^Zss|G!jdo zDs)+{FTect>#W>-|L?!ucX(|lm%bQTM!QU?;2f9hCGJ<~6$qIMp$s8-UBQ(CB{oG0 zZjhSAiV8WZEqOUyFQ+j~^)h3%$}Wb~*Eaz+iCKu@!6snPSzNlIu!3P3X;io zP+Y&;1}Qt03K`9=p;{x^%3V+(N3iH90)S$KHAL0<5Toc?{dl7U7#zX$2sjucbLoS0 z6c{i~fEA@C7(f9FTm=@Y6a!(;!y?k6+ui%Mpa0k2=goG^bO6z(<{ant?eA|_3fk!y zU@8YliRaHRW1*M>G;KI(1FS4EkV6iHtPH|qIedA&mJNsm0Dwk-CV-gsk2ez{RHvtx z^8y(Z!!|*L2?Eh?9v@dtNm*&YA;b{2x7*B&`R&URKL7G%7JIw-{`iTvfZw#nHbrsMB3p&3d-uT6#yzm8_fWsa*)IPPk~s!Uq{#^ zazKvgbB=o}bR602%-g0uVF+kZgC4ca35DbozR_N;W_%4F??n zNVNTj`*kBAhQc5Qh*7(DQ_q#I$IIC@|KrnXam)ROKYY8&=i}w{bh?iA@ZIjRUdR0Q z@pmT9)hSZ-4uIrZ(UF_ut<) zu7s#G+o(X1&aw0uuFG}YZad6swzKklod@I&C>vrSLnH$sbs*-jQA zq--bi8rfD8W3|hPCP(8H%)&srLP1+_o71#I03Agb5lbxm58APL*rn+tAXsLHT|!GS z0UKz8kW7?WqBJxM5;<%bD6c=<5h4(!G%G1nq(cP73PCED24;vwRj3s?VgO(i5tYuy z7`X(LBO+Jd?C||PH_1$#^EdBT4q%|GbIyfpC~a-@SX>1f!y& z8acv1xwf~vsU_y~RkP~z&(E`>O}GE_$)YLs9+U9_Z)VoY&uTue$+iz`;BwW<&-_sGHExE!ySQnP>}V70dD6LYs| zg;w37rQqTeSv0E;tZ~ z`>$oS7L(PoZFViR(i1*>(;GlLmsY6=V+8H%r>B{N1|=t;AVAY8Sac95Mlqmfo0rqm z+i8&i!T=Qxn= z*ZFE5e)@DA$$s<0Z|?f@vzE*GGOO+SC3<)qj_&*IG!8Gvp7c z{kPvekS2+N!7;9{AOG_CQn7mf?|-;krO|?{Sh|YEK7(|>v};{X>)YNHqzi;SU&oTT zk{~#OUaVqkR811PFAEwM@7mQ|uaCz!%^?P=4N$H35wTqzD7F zQ?q=E)$P#k2+FhNLY*epv5h+xb{RHnOrJhnpQJHIL=%m)S*SiEeTLG_nX<1ZqOj#KJ}cRGnTAhfB$Jz%Xe71_?xN z9`5%Mq=Dn{@OEBYFaXdrLC_?p&4>Fb85IyAQb?tiX}@2!mU~(=eG;kr-$#q zTfZC&$IJN&lV~iDF>~5&yDm?sp1svi z07GgsQ!zxPhFGJ9VQCaa8?N5lU>c2%Y;?zqB{t(Ca6G&*?@9vb9p^1a|IzO&#m=MHv^vT zr$%~2h!rdmfTgq%aJieHia;#s7)ZhVQV9_b{rNaOSBW0#49Wk0K zWMVAm^Z9a}ha}oG0TW0wa=m-pt(r*-^L##^F7qfs6amsCQo(7vn<5+oDHfq@w|(O5 z-TkUZ2j=57?ZeOicphT*&Es9$wQ)WiE{hG=kN`{%4{d&WJ`R8klmjGW8bGe^Z#Uy` z-aX!|-tRX(D+fIW&QCx8^mH+&AO6oD_mih^+RV|a+{!^%1XS1KxY;L}atH!_yxR1D zn};wBfhmMX^D?0@)T?n26|qep&zDOnXcM4X67E8OziqJ6&9Ep8Fe(M1%>fxf(}qd8 zmFckgpMoQ{51X1sIBlRPC>Pt-G74nCIS`_-oK7Rcg&wP$6*l`yf?l$RYQCH<=TSor(?Ei5!Io_`P3+m)=%bBh)@G*}az5NTrku`W_EPonZ(lBj zY<>H9x1W4?eYq?`M2P{*{qcUfe*JtZ2L&^ur(CcNlhgiwHJ`@1ufF~MUGE%Gbj{1t zPd~lR=K6R4DCMxt5YrAR9B%EyJC0BPd))S?&kI^4F^~9myPWHrKimdQhucC=1;A-| z96rC4D2QN~0%=m9kic*{NWnxDZOe1b^L!asmjxRliHPKCH*MBa1VPGP#(X}XGfh$Lad?bSPw~*rW9ZyIg_=Pn(<7Inv#+y>D!Sk#<2!LkdFfSjb_gf(};`ps2j~qVM+cau#=Y)AjS0 zs~KFFtxY}9C}RvPDsr`L=hNWs`#*fQ>Dd>>#i!5z>*IMaZvX9%51ZCl>j4BT%5GeO zVu<#9JlDf!Cj;OhB950qV9t@Wi5L{y)tZD>kQmb%L`v!iznzblt8>+wlq{_%Q_Q?u z+lhoM3IV|)7ZN0kxtLi7N5Xx{!eH?KDWGzY8!<#MyDBWc1k zkhJI6a?F{PH3=g?s6}i0opz#1L6;NTJm0oB6biP;#=eW)Vc&M_&bjNYRU3ro-2V2< z?ymg!)I96hkz>qxdHwv;X*H$&nQOM`}^g3y5at=y?*_=RS>2Qy@^ss z5X$Ygjq;*xODvDS{_QvW1u-$V(_jD3mod~n{f}Qg9HUvGR2Et>OOb6|Hi7UuUg!Pe zUPaGcW%ZnI+s3gjlhM*#W=0@gs>Y7Nl%rdTFrs;Gr`rZ}5)A_An88}=e&6k8Yl>-` zuEmwA>RJ63U@B}BR2l*V@Xr-V#i4Jx(KJkmHVeC_J4+R16+xI8v&^cp6qH4nGTkR- z%B=J0Q~&`LK&A)?sgg=eqnAW(kqJb0BV$H7U`3iTSQb$P4Fl{SJ|6J)(@p&{Z|#oZ z$iast*!OWvaW>10;@BmV;i=m{UT(!SC^x6Fl|>jVt3bP%C`Xd8?bxzBa;wC&%P>VD znwIFh2AKj0kJz^Ld|i&61({(s?X>G{X~FgE-QiBOHI;e&`A0s=cTdN09m`{{b==17 za{lS-b%JdBr|&-PZa;lJZ)i9imSx#4Cm;89dO4LPQ*D0;m4whP0-WdVa-Ee~)b9Q7 zfAw*10ps@hFMm1D$-{5{^uu8nj8Y`4*eio;FQo}XHOA$7cswvLduo!Ed8*1vlAN?) zaAKmP94Oe>IkMUotYzOW=Xsr`gL}xWPyx4WheL0hWW;SZRRtK;D`)rIZ&Jn7ga9-o1VC_^jxu&npN{l8SsHKsfn<$s*+;Qc1gPmCGl-&! zu)Y2AkD0J+izrsiZkEIvBplh124qCqF@Q|xbj|VvOaLOINi90rZK(pTp=~{1;^{z> zW-)4^mWZ}w=IeI2zh6A$j$B_}Kezj3eRw{2yY6^s>)UCL?Q(v(Tnl2~fBg1&yncDh zU>xpRM4sQqes_C)-I5a7?v9HJpxo6u)oYwzU)OCG+4j%B|MlHn0k_-V|N6^yAm07y zPft1odTN;yQ^AZGGa>;EFtMGs{_faNVnrtD?J|r?LXne;>8z-h-AI+&Yzs$HEqm9)wC0|17xf27Fvh^fMx?Zyj@HWwXh_KO1D3f+be9a!;ac^bdyyQze6fVQ|W5qXe=bw<1i! z48wt__2r9;t&n5}3k1V9noZL(Bb)8YD4-f(z@lSj10k1#s5U|_fY9SN45L>~R75giG$cU|!AgoYt&Akh(UW$TJ*$u;N`k}jBG|#j`JDsp4(*2;w2bd8^&G$87H(<90dC75J8UvpjQ0-&s3MDD6b(Zx(~;Q>&@DyBSnO7&xP3lN8YPtV~!b2$DrMHi+ex%Ve?0+^%o)>0`^>a$@E( zUoY+M-5qYTAg%OvYR7wBUT#_K`2+a)%&hCoeZT&4x|uoWd0oq(Vt2pS9BKiy!*Mx% zzD(C`6xpczr;kfW2`h!kLZ8vv{QA@9w@N7D`03l{=C40}zHZ?CuYdRb!@f1S0h6he zP;!PfS_({Ojo0yTx8D^Lf`;t#I?PodijJ6pr1wxP#cA|#OW7(|k+-k&cFh8a3Mc65 z%hKV8eG7<8Xu`#8Q|4Jz^52TcR$+n&n>G<&eew%ZI*TUuG`yI}=IoaQTBHleM zGUxTSjq}@@HfoH^ng$Sk*9f+qX5SxmI$gHNbr=n?d-v(7C&RQ1B+ErcY5el@ms^^Q zWjWp->g~KvX#dThzJ1yq(JR{4F#vnFE&~=d0tS#X-p1+fx$iQnJa(G$>w5DXVNN+J z3d*RvyG5pzGGl8M*<6^7^X)cWRWexx2TNit-0c^mf(g*TXh{spQ~|JME8Wv>Wg;Q) zU)y9gWH?}0pj)!QLcD)KfU~Cv0Gxe-RRWxt-J}o}EE95M?P~k_G9bdH5{Lqz62M}0 zR)rNt5)(>Bh7Hh;D5V`x0u}%hCJ?y)@EFlE=5dvtMMt4GHk$_QdNtiH-D4H{hU^q|#G&8^w$SGTP<@tI(eA8=;EwQq# zZyVzA`59wfCD32qX6$Oc0np>8oev-GOzHad%jEU878=smx7#Q>f-NGVwQ4Y4%03KYO%Y+)xwB@N%s zH+k0aQ03 zSmX7Vl`=sVVoC|fG6=#12ubDQw0n}6L?9_8Xp*kRBH&0ntZIi3kNcQQOpaRHahcp| zv;{I9vyElYjmB)4%~Tor`s3?#gI%Bi7af4fG6B-I0LFBMN(lo|r4&k_Oq6<6R1_gX zrdhNK)7Q64fB)$9mYY^+Ue;{;=l4rpD|a2^<#nZ)vrHbpIePbe4D9m!^^1<%%LP+} z=IQmC1%V3o=#+rWbj?{N#og2Q?+-LE!%BxI3L2G>nG>Kj0>w7TKbJ|143~+pZMXCIp$WQShw58cDQ?xGj-s2dE1Z(%}KOx z-sSG;p>@a0*SCH@zO1#)IhJKTzukr_N|Y2mEN+H95+;KCAHI9+aDjG9xHPD&s+l={ z{Cu9aBGNL2w)_4M-#;w}b@Ul34RVB(7GQAmQUsGZx7T_1+*SKcR@LktFYD%TWwC`b zOQceXNEO+gCd4flFL!%Za)fJF`yot3cFNfjq&!%pm`2hXF^Lk9EwwF4M_F4pXfye* zCJ@=nVTVl=g*2G`{kxVi8>|YTlyeSZKq5p?RFFaiW7>T}GoW!mdl zH^%O1&z$Jkt|Mcc%VFLoe7byXkD2eAiF_03ZNKL_t(A z0!4cWgy~{Qv;%;uRb!&1pc!9Yt}v0JiZP>D5`z^i5TgkJR2i@e5FJHU)DnuS))dtZ z=jD=979u`AFQTfOdBt)_3Q@s=87LuEGA7L2)|dSBO()hlx78!PUSU1lmA9!{9nmuPN1rpd zyLTP;Pj_HeDRBO}UQW4QYqyK_av8%3(gcHY2`&SG)SrL-&7IpwT0jyQ%2~|JHMiU6 zFKZQobe3AX`}U8&df4yS1#(waZiBKbP_-+UN~}qkx3{;$`vVom?1-!Smgn>BF!#>Y z!KH49r~uDEFuz+X^+lK3E!#5ZprgmtY|H6O;LF?Nuip1QUjF0u{OO^?QC2nww9?8b z6e=6Ab|VIaBqE6^_bDYx$v+oK(4>kn$x>2q0(y8_2xNe?fQmBAv_~@$6$IFjQEEeB z81(7qwL}ml7%Vxgz0|Tz7Dz@aiW9QHD1lM#7CHbE0ho^SW!*-D?B)5>u67klcp#$H zhHL`er+XCx;j!whW>a z87!-0!=!Vb-^Szjk1<$R-SSFbZ|*sD4<|tYv%d(`N!M5 zx*wKIY?sSOlt4pPNvQ%2MgREQPe;WDb2d)8wAMt;tZm!YmoMuqu!D27-XDMeyKncq zr4x%IBO_Ua`zlQY;K^3d7OKnnqz`uoGPA>Dad})WbLM@mHMrZaqif9UQ#dRa?~%(K za|#b*n@#WD{&LIr z87^A1CgEyTSYVU|7FaE^IGaTvq3!K76Jcui?&BduONGiE;8nMSY}35})iMDqMWsN~ zyqx}es~M__6v;H%Qfbmm(V$pSECVb7ZPLPAR8tbd1vrXPtT4<{#e}NlyuMxUzkOXJ{`3z<-Td$Ew_~M>G@L`jH9T{h=WV^bT!&4C%W8-3{`kYYL*Lb|(A$s;ZrO&w za`WuT@aS7F`+7U^_+ckqmqH(NDd2jWDNSxH91|JIX#zW_Y``ejqU4|!{MTQuE4}~W z!~2*2_v_%@pFU`2O)T%a7i|KQ${|jOM&DQ{l`JQqVre#D2LCO?GGvAZT?zv<;o`${ zIFbw&K@;U7li&ad1wj`=+QC6WCcFZxC1~aAVBMityDk@wOQMb=Op0lF5i-ueTIa^GZ zRFH~}l*-wY7RV8734;LHR8BAw6v%`{O%_;0<@WZr`{wze8=UZva%j1p+Pz%9{CtTI zk2t+fKHTeNJv?Un@Tjf`Waj0UpI>WSt1ostE^~c-yA3005C8!dCb{2z`t3LO+6^fQ zCpdFdW#+t%+x5D=okzlIwxi$u{tw^WFT350<}hslw7HCGo&n5OQOqdRb~~-_-YpAd zyU!wPzP+yAEjgzR?ObI{6Wy>;AgeJMgN1$n_U4KG>wA$@%f#W!4gu+rWKwG%SKA7;7ErNDakj*Y_ur?w2-pok{Kyv)co@ISF;4NU;-fN8Dwz^mV6q}QlRj_lM-uUguJx5h7>vmYS(U;p6f!??C=dHhcs@EUi+S8N1z8>z~pPv@h zPT8*b{P(wkb=a*^YP*@Q*V|UM3DAsUrLFz)@ekiWIMfn70c{&cjm*v4xL&X8`E8v> zAPQ~y@W($q9(Q}IgNU?7_oNB7i5V!xL>N=>wqD-u-#th`svM=NueY2$y2d7eeKSf1 z8mC7nd%-A$754Es{{CNYSC9Ss?~niAKTfrL_un7pt+YEViY#b0Raw|#b_~i)Rkh5p zs!vrE3={k_lrbbUkdp*R6zO!_J?~I7RD=K^DTAv4?~-&tMZz*E*tB&1d^Vs-rob{P z>h8Py2BI85*xBbKSb-#qtcn@w1xT1FM^GGajDzVy})lKmX&k=G>r~ zo{UAgU2bcZizNUE8xzrXAAa+jrvQSz95=>VBb}qJnd|wyy%vk zCdxJd$CLw3L`|w}?@w)AQ_fN~1rG8wp8+N4Drb(pAmdOM)Gh7l_vsD{3%|Dw!B4maE zC{+r(SiE~)SWHDl1!-ieS^_j-C2b)p002Y-X}`U`+NNyTV3pnbx6d5~2!*g^t}2Af z1|5j3lEJbvBM4S8FQ=`DV&L(+Lr6`AmG>a@U&{LBJHEAPQ0N_MRl&+VzeE9UZ%i40RcgO3B=R3T; zocqTE=Rhzy&R^zo(3dZ}_Y3a7T_TU3!gl%Tms{orV9s*ex9fJ!IV%AhAS(!`dVBiq zAKpbnmx(HvbFG{s=iJ8SdOcmvTQUgN^7v1Ge7?WOA-fM%=}1)!LtTvLM)GOIR#3GPEgsvR;4sJd6O!qM4u@3?*gSF(lA{P=#PQ zAcF>E(q!WVUFg#UK^sD{RAr9!<#zb?apAzc^5D08_wH~#eIAF8@7OZubxYReC3n8Q zE*}rR`xJX=x|Ob9eta8q5FD%9ewUZibd9P4Y!-k3jTR5T{@usBQU~QWqL6dWQS&zQ zwq4KH*SFgw18)7BKmF#z9rp?bO;v$(#f(B^RHPA=s{IN8x7*ixeit2JIf{{6A2Kn*U2O4LAr^6tkPNmC880KLP+msB98t7`9uAt{pvf(0Ui)*!7(5;luu zp%6nF^Y(IEK0NI*P(wc7cF#}w`gyI#PfrcGt*31saQb;T*BReGn& zxy_<|%eLQRtY=qlSs4J4nE(Z$-F^3`@Al!P8&sjHW?~yN>$8SQiQf z(*Q`J=6=6yKc5~CM|b7?e13RrGSSvvEe%AfBBRnMz!ofS5DlrR887U>gi(H6>?5X{>+y zvZW;x1uF>zXd!88$Rue22^AEEjg~4}03&GFWT^ldNR_e`Cxjf^>CE@fi>*N6r5+#J z<>fqf@4q{S-Y##ql`YO+FLSGE-+rjw`-kK7<#yQ7Z$E!MZRHw?eK3GAM&9NCdbH)X zZL`22 zZL{1^C>gbkX4eh{Nf4mizP=<)*l0+XlkVSrJkBsNh%k`yR5{nnYk#`eGMhc*iUJC` z*s|vBWyNSFx%>VZfyLld2=;I_Ig-U{G*o?py&O}Vs-AiK`5)V82?+@k2GEN(LE6k9 zV3k`HHKW-~RU3?`D7HdiA_~lAsHS9?#fq#^r_;Rua5pkKjp<|G-d?jket162_4PEi z8)6yf%PmLpn{UYv4+r4&#rw9;m(Q=~jdRStEG_PK+wFF~Z6mo`cKbZPt;0|g+xy@D z?kQSygv!Cc<6D+U>^&HsZt?EpZkrat(mqAph%DH zbk(QFWgKo~LWD7HZ(Biu8QF?93eX@UN=#L2p@Fa}BH#&42L?^grcA0ZduSn=r)Z_Q z$&*GTL6;*&%iJZZj9{97u9iZH4weu}qDn{=kMH*aOQk|9mtxow*ItGp!eKhl5@*fL~wEysN*WC?j zN-#%7QFcuQ!Rj=Vlr(~AZ99B@`SDT}ktn4I6rDFBWXQ6PY*g;6$L zG>kA+HVoKdd#m&LdVIG;W-N^@1EF0B%(tH}ecgyw>uFp^#>a0uK0e;p`tq{* z;c+>C`SQAwbF|}*x;xJ6IDc8!YKNuma=#LUaYpDzFfy2IO$7o-}8Jgi?5YeOU!6l}Z{_$lW*3 zcfzDSMSw)x%E-9Q-0v6}ER-g}G%W~Vo=>+CLx`@A9}Wv6EUEw*34n>q+}QUaQCl=X zMOUZ_)4%-f#YGiLfTKWKMU$nZ0f|hDngB${Bn`?cBASbZ>SeP6CNc^x64kfbZm;X^ zu@&UDXt8R2x$M4u-t9Sl`s}Eiqp#=7I;Q;oyQ3eTm$sf?F7y7=Zu|M~uSt#BAD0<@ zmx$Bnw{7%?rS|*r^>p2eWRKl^cM4tH`YG840Px9{>1Z*G6y2a#W2J6Ve2#RS6YlkvV%+RPg_C831X~ z6x*f(NVu^3aI{j*8xS!BR(1=;inL%rVF0{$=vJLMlC0Mh? z=5hLuugK=2O%)~;C?Z2;H=EgLl!PoswUkv18?10QC>Zi2P=RbE12bD$o<6-@506}p zj#w7+E?-WE=VwH%zr3mC1lsoHZG{rYU%eaeJ|32_y}exTpN{L-zkD68^m5-(LhP3L z_3LGA_s1D`IDb7~s}PF*{!hPsxA34)OM1f0%vrN;W1KFhFQ328V(9Su|M~s?$a}a! z0ascwk+kG8=}2*C?3^rZd_Cpy(_Ti)m`bu;wpUalg{bV+WZPtx5U%Rsj#f%#HbZ~^ z&s)_rU6h>mES6MvfBNB02?QLsrkRnm7?CK;FoIi{P#SRKzncJHGh{$EV3A1H^5I^^ zbifb{I|E@Q+n`EjS)jdCj7Axt=iBF5C}}iv%AU#o@rQi@a=}sFtuhT^TCR`@r-nqQ zX_G)8*SD9XL7_c=d*2aiE_5Ygs7jOD1-h_#&NZB)hEgEW&Wagg9?5VYL!u{^_pF_8vPd(GS(YaoDFGPA0ZRmG)AjY49j z)nQQy)!X;q-(WDzon9M3<#tW22<3JR^@Bn4)< zWzyhT&8nvuaLXp93}O>#g4s-!NhN!p0Nq3v|Fz6GfZqxRBXjd0`cphVH zXQg#Le?2D5;2$5<@9%n}FON^PotEWrIJ_-v-9T_NZG3szAFz2x%yxP^p3WJd`{wTV z-(Pp%PR2@hR46OBVp+!FxPSigcusbE_YeR0aM_x5FC$D)nu8%0`guyo$Rbp7W2UX^ z+spLv#sqj8kf7E>oHEj4Vq1Z-N}D(`XRxYS8sJ6vuxy*1XNnxyHE-XOq-~7W> zH&|L4g$w{FFQ}=6HX3XZE#YYZ|96o{2Z=PuL=<5~QT_J9+AB_f?jI8%g%d77YbFPad``87NRQ@gvr zZjKHT9!8?P7>KdY>C&3IpPMY2%!d~lhIsq&F$$DrgIH8`5b`RMK%iVOk`u24A@^~n4^X`BA^X(;_J9)vwd@)ej3h*$3RXriXQi5yo(17xdKi9wUQLPeL6EC?exuS{&}uYX$t017S?KuUz#-9O%LaywkAsaAl) zWVKFZs%+Il4k>ArCHcQ)AkwIGm5Z>FvA{yRxptyj0V0Edz-m-F3xvucT6Kv6O)aOY z*Qb*#C|a1IG82CP&DMl6NnQ=bvSBc&G!IW&idAHIxDAIVfY`rQ%0ckz;c`N!HK$Bg zX(Fhov!k{1rf9Zq&qmZ#r6$>W{^`q*s(}`86l5?^f+q`BG78SojfsRSy$mIT!O{u4 zx!pmTCyk*jl?sIdJTTVNeq7&A`~8Y!o6R=!d9GhavWD_rbJJ-91 zi*6i`Pj4-cOPk}k2C9u5HebyRZ~NEa^|iK})9Y&}PD_G)e)zlJ-nNU`smcjZR^e>5 zjPvrg|N8jrA?W?#pa1FIMK^E)O{@ZuhB1el3Z8~6n0qRxptS0+KYn^|Q)yZ$v}9p9 zp0LV@Fju9efH{NdRve(qFsfBf%xV7l&!-ZKGRP%TU^(jgj}IGw6g*lAi7c~J3m8_X zLD>pG8nW?!QAQM5E)kYsIzdT}n_J3_P-ZJjx3#6yLuMgR4nc4Oz|kFoA>W=4R9Q(h zCNz-!=6Ad51Ez9jPXQ%o6#)W8*f0>oL<_lKS$;mgCTJqs=6*Z3^45Gt39u5xDnZmR zPqRd|s%AmbD}?&->w+K*WE!jJCNZ5>QW6y5P7{iwK$g1HR2~-1iFWhuf&@n?*HvqV z!lJ;i1$-k%TE(4F&6M{Pe&>zA)%d&#x*@~V+T=C%8Fhuiz@#@f1nc{`n# z)g}zZC`4v6@4G%;-%hr@n&<6!eLh(~@6V#Oi|>E;X}8<9XjWt)m8NCoDUQd(^UuG& zg?#$(fBgOXxr1ht*izLJD)v<^1(_o`tTUN}TA=D}|911qtyfQ$$&>kZdW!&DS#2`) zQd-Mal=X;?hBPl~Sg$#s|NCpEmC7h)Qi55ru7Cgesz7E>**cvCS(r#4EOjVl7G^3| zEB-%}iDd#HgGj3A5)*cFGnbL9GFGyR1{ffg7nvaBVhdMx7^)|D_;N~+CD4kQA^7dL zSB8?S2#OYM1?Xu+wxHB2JRz%ch6p-pecgw_O6=PYH=Raogb6Bnd8Gx35(=OxLAhGE zSwXBAKR=xzWv?{I%FR$#ltBIt+=^+?T4P;dk)<43 z*7M8RKis$Hr{`*JP4hb(*6HJAJbqobmoe(}bc}>#J+`0ja`*Xi%^*i``|tyc{{#%iAEmKm6VIS2r7<(nRCR6zg1w)3SWs|Mbgq(x%V< z@`u|?@6==vh1Eb>61L2R;c6O2T2a}MmW7w&argdW0*rzsDV)zIg{|SSnQSi0Dbvvx zc~3(b(`8|4gAMuo!w;wMK+&uahO#n;wcmcf>)9NcU1=l5auo{3Y()rG5?Dg2L34oxbAiufwCV&Kylw}%dmT8blR)k2($f9{#L)H0d9|l9AT2RRhzx#OYhS5}I zfY1`z66k?~iGVx^Tj6B`UQR{s_rqi=$L716$u>okF&%|!RrD(LK`Rmx)T5E*767T& zUmj0o1*0ef&rc`oC7Z72!^uA0@bP(UH*2kLk0;5+GPL`T+F{={-R|xt-<}S+vD3?1hQ9sw``=w`FG#n56{?gx=NNH(e*NjMr-iop{I7q! z>r=C8L<6!6VMH{fwWGs)^rBaZ)ymQWFNatE=Gv!WX(G%xV!YP6TDbR%=59euNr~m9 zLNo&)3l$lv|`c-{%49-jY$%K zD0qSLEcx}l(NGS^%aVoFnu%3SKv6KF%Dh^cfhH!`r~PV1WQnjqs_g2!Yr<8qGm#zr0?4 z*wx|X7&!{On~%qS@qRiyt=q13eSTg+s}f-kA7=ab(9Nv&U!D%JBom=BEID7TFJpSY z8TFK>SHZi8l0pWYQ)Em@@lPz1i3Ci{CeacN zgTl1EolR^E2M|_P0MQaMkOO^~MA8M2E}4nT<9;Q)(x`%04)N;Ky_p57tjf%)uky4? zuqH_@JxxUwtx!yd1z%n=J*~`l@84g*y?VB=B?4%>4NT3VQ->H532EtyN^!s+etc>T>KD6}k&!^=;P zet&~FpT6!_W8Q9#s@=QG$#`=01axGaj-28Ut`%hEPm2bZu&TFoS;z)YtJS`_) zUG>=S_iMI`-5iIvY_l!LH9_CL|IHsiZZEQ_uMm8Q8DptDygvQ)=l#$$efMwQ-_G9X zg{#4Kr8z6@%?3X8eG1fW2p&IUZqJ&_e{Wg z4x6^~W}p?oT)+PGEPw*j3rV$R9p2ohCT3}Ph6|aAs+3xZmMk~bp!%nWEuuJQS5w7t z&x^}Vwrr4~)BdNQ>-K%i_3-6sosm}@X1cxOwJ5exqp5;Cwh0RtL$PPZ_6k&-+lV~-@dz|7g>e`P!;Pb_HSSR^22eU zZ@&MRKU{6h+^oEX~pa?Q}RkKA&fC!mu6}A2!G5Kt`QUnKhK8qksF(u0L*l|9U)}M>yJw zI-M|YTO3ufd1zIy&#!C20z{Qwo`rO!(y= z{^z$>lRLW5F{DixLSHjRDqB@S$Tdv3xFs4r%$~b>9EW^otsYpMG-`5S{KfiC0w%0 z5;Wxc_3^+K)v=YiZ>V57Xx3O?%tFF5|TDx-GGe z0lT=s;cY#&+e@4euWxG=szAV|Y0JZLq>R(e_kaKGZen8*A_p;QS@thK{@0gtq22zE zfBCT6q7$9zX=Ul9B0zzHfg;D8Kr+>2g>}5Vw7dI8%S{$6JleU|bxFW{m;{5DGN>vt zW)D>bwpnbKTg_kp_S5Sq6j|27t0qZCpKgEq!Qm(ibOjd*E~F&@PXoeGAV?Yi1OSw$ znU`c%z%Y}6h26U?2wEePNzYVg5tZsCz<{)3p_A3C{`VnbKL1ammSr5z{;m z_i7x}`t{e>6hxq4S<$w47xTvDRA^0{sVrqODkY5U1@}e0Bn5c+=>xC|9t~5kW|bfr zMF%X;_3O`H`^U>^U5@85j;TX#``UhU(a*1^!FG6AmMpX8O5A=s^^fm-oHgEl-EZD+ zPshR2!1afI{^`r9?%s{(^WnU%f<%L%P1CIX=?uZAyWjo8$K9rjVP4fnWsURe^WXma zaU`bq|N75&+h((oYCN|J&&Pw&2czkzy{b8V_p^_8Ll8V->n7X#!) z{z+b-6|1qTQKHgh6w0=@lXb#b#cv(N9$$taaCn(mRbjhN z?+> z9P;i{8Z0GSzy7o@6s-WEa`^7yYMxu72O}%8B2|^eNKjE^nh~mGrg-!Dp24J}x!lJ@ z5F8j)Fb2+#kB|Q0o!46X^Kv?^!!LZ%$9tUjYqE`(w>YDV?jtY0y|jmixg7D+kEffP z&FOjKd9BO$m-+KwA9eR}d3-&emk2=(X4ZC_O})J=3s8OY{&)ZIX=HPBa zUw?ibvdyRe`G?D0Z`ckPizO$*M72mZi7F!jC_P<*Y-pUGmfQR7G)1ctAsSAjpWv*^Xt60v?U8NCeCX{25 zHjs+bTu!e+i+K9w1qgV7RX9Oc@9(x9tz6W(G8Ll)3}xA1ro2FhI8-RV`FvLzM)-v4 zbSS-{ELS_TC*K}ley#g^XD;!2cs(t?y^Otmy2t5dEzEv+KA*8cnb)%Ucx!hLm)`U3 z=hxg^`f@HF((3kpKK%LT_5M@*^m;m;mq@s*x$UlR{QPowJ7|Nd|a+2tSq_1lY`I~=58?V~4^tz^iKiq5oXf@d{(_K{%3^7wl9VdtHR9wM2_ z^*Atm$^?p%Yh^a8bge75ZCNDWt!LPzc6IpS*K;d_8=6(8sk2+-_VQvwB#cQ}DVc03 zS$S18NFu6N`D*;XU?h{U@^p$qQHCaz`ORguoDxl#S;c|?U)>8Vuk0l+SGgQ6OSos8 zp3dM3fPlbKuzmA>Hk4JB35CW&IXp83(Y%4>DO71{}qpnm^w4RcV2(s`(N z4Y~=U$#Nw7ChasewMl70*!lIBm$OBK1sIt_R}T;KWKKhrtt$skEMz5zRU#8GHH4T7 z%oHJL&+i|U%Z|?H-r?-cz!{1Kpew;?ajqdZ+)uk_ox3~RyR9c(w zzWc+c?XH1d7QrGR>I{ zfUGEoasrn&`fwQnm4>3F)B|3H(S3EQ9Q+d~D_JxH;iarb zWSIeFMW7Wd(M1Ygu+s7Co0ZavUR0t~^TX#!CJ+T`lub+&S@I0B&;g9mEuze$Rh{V^ z%Tgu4s=xc(XKU$3O6L>WgdjZ4N~We`_Sw+X65bf3a=d)`5(QOf6xTJdx&Lt0yP0Jh z6}ALN29u*o$^=*-t-@-x+)*mr`yfk7aLmkPlH)Xvu2Lg zwi9cKir1fheHoRsY4_pV@9u7Tmnv;}d;S0Z_OuT5_y71$A10rBW67hZ=4Bp$iv8S+ zo{nMUXp|gT-I5tLj&Fx?|8b_d%2PJXGf$^c*)95-4I|r_)|iw~PKA<2_ErK~_KRI- z85J9b)wDK<79hh|Oc#ZcjKc=yfi{fgRUR$8%Ba!#C%}lLK$-xs3?2X|_uI?HBF(xn z4OKvhqJb_jtAWFfWkrKk&IKKxp9!n7G87U@O`kqD!J~(R83y@iT4)A@BT_0O8Wp)K z8*!4c6vrxxtm*dtwi|(R&x$u(Q-?MD_S5afGid8C?$)u($z&=6a!Qs&(!#}7 zk(*PKYLlW!`ME{<>HPe5{hPLHiJUS?fy?t(xx&Ou(8V-iay83VJq;sUqZz5Z%5G)O4k*NOpVHP|=Im1vWX|<}3IV|fejoxTdU~aZ9 z!>U}TWq^j#boudOwpKIKRLdzYy^5x^oC{!`hHo|`DR{UEKH~MK#}#Hll}5&>&Aaa| z=8kR_DCw*SRaJ&1LX~1eK^85hD z*Xj0ReLQU5bAMP%^&8ivFa?`q5QO30u%45#*Xn^Cf zf-+q0qOmQOCWAl-h%QtceRwDUPZ`6RVwsAvf+xxUOfmtGaw_RXSsGvh`ps1b)e45$ z5=w&#GD8@|mI8|)DSC+pdW^?Iq(HC;aTU})JX|5FiB!6JAe1YOpjXRvr42K>;q2X7 ztd&9e;hdnA*tef9H|&(NKRzv%D6`1YSkgay zeBUQvyC5=|?Su+tg%z?20}hslrDf{iSw!`SY=!Cd@V1O# zmYZ&NDT^Urw2lvu`hMZ!hLPo*$nMk=%U#!|!h|X4A$9_Zsl%qi6Hs zB|;WPnP4c$)2pB<+DM$w`bCH(PHE*AECCnu^wJMs+<_KYLTAi zc6xfw(lp)gW{GL(BfHX3bg@XNr&Jg$u$||ms=7o;28EOVPAf_zArPR*1Yv-%xwv&J z7>J2vcz{Y{8Br*c6-6@3Mi}O znlWVWzP-6{2T-R$_c5Jt+>DYC7^N!EiV~o*cb{)m6%*aLUCem)(YuTamI;yTasTt% z_VZ0UpZEJyW#9TatewtlZxOz|x}BH5{t~x0%WIyN+fSD_A1*gnR>o?p11nKECAhxq z6T>A#Zg*jMZmE)2%c?bUoE{&a50OF8+^0>S#&TLvZS(N`AK&kM12-C5(paE;*%V|Y zxNI~*l!aN6DD$O7kBnvi5+AQDeX{hTD*KLxUp;xVMY?DMy3+SVzwdLIU`8**h01Op0LRti=oP@(zmo1G!b;jJ^^5l%B2xKvlGaUNs;<*R+V_i;Rr z=H%h!}VnIM1^LU-R^h!o4h$|NX-az+8ZR;3F91a7{ z)!-NJ-}UBhGWe)twl4ZYx}vA`9O>RfO%77Bkm~T$(-K9%)>7t4cJbZ)m6;j_QVfj> za>!RHz)%)pRYV&#KRjH@QeyWmmH>fssa(fxxJ8cj`RPl&zn^o&vK-$Aw3dt)o0g}c zX?t~+4%o6;y=LTfe*e?%{H~_15Wh8fBDKQ&bz9AecP?W0}HOYA935Tq>Q<%iGKCciYxr z86625ZJY|i1T365gsCJiD;7|JWKj_sCgprrItD7(I2>UEfQ;F=>D!ibscMP}!#Zi5 zf()~y4-f*4LH-FW0u)J;Gg(%$GP11g-g%=y8jT{k%vm&)B@zwD21#(1$z3uQj;F^} zXh;>!MxnA_{N|l2q4IRNid~RnJX_oeY|djr##xI+K0@M(N|;%sdOck%`)b1 z?zb&r)VXhamWNFM9D#iC@+H~hmz*IyQv8P&}kxUOan{C;Z3b$Q+|Ub$j(c{^F1k74cNVt)SX*Le3~-oL&r_xHOG_g7|d zj?J#6t1`24m^G^Akw{b&BTz)O7Ut(z`e>*q+)nE_yqpi~LUilXyxm;ww)50xVy2ns zh9V6zWh*b0vB+vtRUd8)!fYkfW$S|T^Hcl$p-miZG-ev@En2hQM>9;-#Ftu{fk&s(rz^kHCeCAoG z^z&hetl~)Knz{M-@!G4`TG&j=la?k+jI>cuj|>vB0uZ}z@8|MvqobPWHU58s;H^os z?79x?oNMiU&X=vJ9ght!!hR( zy+zH=74Q&1DJgSa17thG+9ZsV=DE&KACK?9-Nvk_DAw@!_%H7skH%Wbm} z@_@_*EYGH&{oUW)==!0VmCW;;Eyjv{?${zW#C1bqKW&Lhi5(QPMMtN-GWqXf$TWfo zk+c;7iV!_-UhgD$n3G`Tnat@ z{Fg6Wwrv)#K0IH3bHBmMo|l`kDvzdJZ8XabNgNiHt!F>HN#wc-Nf(Ne!5%Ul0xA1E zpFVwjf4+V>jO*jW6GF$)`RwJh)cN^9+}+)O{_&^BXJ6mzr}rQ3zkmMx>owTUwMB!v{O!NJ z-yWXM$_go#cBQTDGD3SGW;^3_w5dvk#iS&Vm1fv0`1g!x1%PzYR9F!W2b^(vc@qvK z9BC%BAErCa9`30X!NzcB*nwf1icjwzK|mB6v=_Di`l~@?0bVQ&jZ*=`b@pwn#VFjN zqk@^Ub8!jlyk3DOBkx|m*d_-9fe1wh+f+xcZKaeR6}}X9iIPW^^y6PX8ih~;TC2ck zFYot3ftTCKCFo?sEiN|-4VeQFD{p`IdfTcK9n&c`pmbPd6qQ&w{P_6u`^V?6ZmIL> zb8Bgxu|I#l4L*H2*4%Gqz5ADQe|@8$-ap*@@ch{~uWxR!FS`ARYtBmfP!CsVaK?eU zj+2XILgjNsVIsE-L>i_R=#zun7`csYaD;Z<^bo^+X(~(y3>Gpv^7It8rzvNRN&?n! zYdxLsKi_@->ZTBMBhY;RpZ@7{lLd6T!x;dFM<@(*v(YG-&hz%0|NX1&>G3c!f<_I% zO)6X4-Nt}Hc(&B6$l}_twzdg~v`zkf5H#Q(G1|Z&8V)!rbAPiXz(__7gg`4{(Ugba zW+H0PB*IaQp8oRw!OgH`rdsZG_x-mU9BROInT5!%wH>GwxJuHm^ zIrcZtE+aSt(IaJ=Lb%Nk2zQPq8Kk2_qfg`M-CsU>5e9@>Uh<1qcbCib*f~|lP|8}y za#_G)u^B^-lP`aG{;abxPj8?CGzzJnf`Eih*5l)ccc1sy_oLR+(-XD6^yTLMc3>Qj zPpjjyt@pn?ne;)xk%m8R3914fB66W@L9~|&7Nk%lnki(#0&oe{O7$zv%dnIyR-+h)n z8bX#~L-Fkox6R5jj8+nc0b{u0tXd>5BdpWoCLyWCb$mWXWd(8j>g~i93`7iT(4w%+ z9vyJlF+@(=!x9^X2q#3ES$w<8}3FM;wHQD)gSIM zLkLrw!jwlrfYa=>-CU2y$6tQQm(SIDI9BQD3&*~VP}F)ngy=e++7VLV*{|O0Z{Iu{ zxvB}&0ybc*#YW4>G2pf0XqT}~>)PwcvLokSm*@8lIIpy5jK0B4jv1L5=@cRWXc*(l zBn1cAu`(*$X?j!)LC9k$SNG#DpYiQCH`9h(n8i>3^}qkMX$N4?CW9uhnE`c3K1VB5 z=zRIV{cS$=*{y-rScHLb9o=zrcgvlHh%C3G91ckV$GTa_W3v~7e~(BQ3WFetkdf^O z(;P3KM^K5tAe)MSLLnGoqp1k9r!&?-R5*<5PhVuxW{-|`vG$k0zvRL=38CR&p6u*R zq3(V>_PyuySPK`9xzt^a{X}f~kzH7@A%PxEi(mcv*`B!+Y|d=Uy;+o<5oa`w z)9dN+ukViiE?ke-E&;*OR02G`Bgt zR=Xq$5$VTO-LY~e1N-Y2!zi5ATkP%0001BWNkli}L^cscrc$OT zB1jZN8j>?um`5g-mmVM9t+(GizrhKlO6Jp_{@MvyQ^5{#7x)MmRO4UOCX z&V;bymG$Twi)G4H$LguTx zj4%_!4w_~dogJ{Cp-@T@4+!JU`7-aT>Rz38DWp+NJQSI8cv|KfU|Q z!LXlH(kY}&FiaDfBQA4nmwmt4=G^L97Z4Cw-qp~;zlcP*K*2s!fQ~<{ot(3u7gL2p7mtUTq z{qW-Ej97sM^Xbq3_fJdA&TbN#AW}Ip!op&=&Q!0_494&O?OQxO=`?YIhGL+#=5V~& ziAg97#B~ z1OYe#PK6jfq4WJO1(FsuPNL*^^P3m2S!2bD2BLwLOejkbkV(Qi7v@a2`$_QA*-fPK z=GE&hW5#A8M2Wz{k3X)KIoH_o;>+v1E!o40F2=e({rIk1P8p5mKE1tudAD?T^Pykg zty=qtWz?D1re!zLV8B@OtKU6Ok26C6wQCV*%oT9j8que`9-cn^`Ny9Q3y|$pWJYq^ zrU&Ofx62lnnfnG3c>1EZFD|Njs>*>%UXs;@l^MfPwMyf111iriXP?Ib%`tO{`SNwn zYPXx$5lAL(df#fq=Ah@w)L><$0s=8Aph~i)K`30aMrUgnd4|{d`KQP2H{WdIv;b{Rj>diO@g@Xd?l0MRd@RGEE}Y!!-cb$DbFd zv^!0475(zJuOe0G?Fz$=Myn3aZaeCTrxf|6M(dB(* z$UVRP^|OK6!0rqISTSX)hoE8bOldtle);*Y@2{u;GLvI&oH^s}Mx1l^j_Cp2bS|LR z=kw)@dmQz&l!ZyHK!veNe3m587u|yn3;N3r}0M!<#pZ4LLG6 z9b7r&h=38Y0_2)7Lc`K9R}ZFO473BdMx2k=55Mf+em4h&kgD@||9}0<6Eu1X9f95y zoso$dhS8;E*=+&&=HL9yr9Z7TTWg+Ue~b`EMv8OOZHJ=62+I~aU@Lo;6=7i%`F|Et zKt(1^t^g>~3?VUYpKpjHNsdm3DNsPjAvi>l3}Y18NhQz3`wvfHryB+Um+;~@Zn9jO)jK{^oYBC!J@{H8|(kZ^YTm zhONi0nsX$=k8!S}8@ZiXOX%ERZm2xn4ieli>%yrmGmHR+NsJQG=_~+*fvCZAh*1$o z4$WgOhCE#3dLAD?9N&HY63!(>2kI~X^?w`{Fv*2*0zqUZ6CQMxs?|n2pjZFqw=e49 zVI|s8mkY3&3{>yoWbHDC#f3b>xjMqNtz>JUSrPnW8jcQjg47Ubk#y3~ynDU}MyEZI zq(ML?l`fJZ8zcyXMMg&t_NSju(}r_V0jYWWn>SmSN5-PpV3)22Bc`Ve49X^Wu59cQ zxxn-3u>pXQZ@;<;kKh2{*oyA%cRzi0dO)#nNxph9ox_cEHCm59{=81oaNF&5Dqnr~ z{1Uv?(l&6MjEXB1T`1rT*~(Cu{Bn15Q$rfs zh7ri_cE$!pkC5Yx(&?kuV9+hEQ)k}9w17Gn9(!ODPx+t% z_cNL&Q;g)8gPSKNVmeT}nB}2SgNBr&LbQ^0M>rHsf_4;A!A9eF`uxlF^>_Ch@*E{p z(LK)6S< z_2u)C913bnCvGpPxOpU^v#asp{m&0pz%~}7qw@8)ug7GjQ5%jc-A8##c3EOKz-GXh zzxn>&qmtc7ZYMO|bwdy?AEc8LGITMuBuBqkPqN}8*pk^}A!+}Z6n29lBklq+8XGe78n#+-@ z0iP3JR94oIliC_!M~Zm*@ktH~cG6aNY`^*HR$#Lo-Lx1- z*<%o4c$nxY!e(RCE;m*`KAytfo!j$opP7M)ExICl2=00UX=gf_3eF*zJ-ed<$J390 ztt!F*54O+7{SPl^Moc{BjO+2(I4f@VSc+bq-cVEvc>?4r9b+r%kHYV1nmXMsN zLWx#h+EHcp(Pj~$U>Gs6MTW=N%+1rmXzy~Or`gQwR9drwD@Ybc4bh^R9ZFZ$(7{z- zetDR`ejDLDEgS3l-~Zc32kq>jO(M-qF&;$!qz^LGV3|l6WfYT;IaDW1-ynA*RV0v^04FsWp4H~3HhdQR^ zaOO(Xa30t9UjQh8a%8Ke{hJ@QFu;R^E1Pb?dq)@^>_EFC7a-YVy43aba0Hc#@#^c_ z;h-ZjN+bembf=~;I(5^z=BOZRREM0`&+mSIVwD38c)3p9fA{rGIHBcOeI9_1EuPtu zrM8JwK)U<>`wI!wI7To{!J@;l2AGRk%dE2>A0BwOP2w19zc70pr%!Ruv)xOF9v-VD zOet5q{o9w%^NM|o9xzxZLmG|k>alVrqR&;akQ3X<1yqO6o7EhroVoVUjL8u(Gn`;@ zG(!Px=e{@;l?o6(_u5op6M-fX?Vd25WiIqnefZ$--rk1RDA>^Ppa1baLP54>BM{-p z%!Ci{@fYpE-AxDs@#bIu8t1i_Jw%1*4uP8Eyu05!!}3;~%Q%=`ksVgL#yJf|^FJa{ zus3x?0488E3MP`b&u)>aL;$_@E?bxCeL<;g#gM^lz@&+BW;CwvKGO==f(O(m=GVX7 z8Q_YB;&dX)9SkD@ITam-;W`Hy+0yCt`7r_jw`X79?NYXx5NUZpJyr(HA&xB-j>=9@ zy{4AWAKzV1$Q=rv-FWiNx3BIfzU{P#jFS;v_e|} zK|^LN3lFY`FW0yUT_M~*yGcEMeCiYXzFjN%cwMX#qESNoc=Nj-?lZ$lba1haTsoZ| z*HCUdy!vW&Fy_|fakY)yFU!q z!_#HKr=|vyy)tb*znKh=#t_D0uL!Ovq-*P*Eyl{?-wy;^1d&E`8ZeQv1IW9(eWqr> z&_IN#G#r2lbs~(8PNo5hpt61Z@>o_9lAY*6#oOOKGtmLH!Dgik(Tt&}3`K`+N^%cv z-Q!eW*Q1z5%oktX?F%bvUto@w=P{g`O-+KgJ=+STA%m1oAAkMns-`5$rIOCeS6|;; z3tIe*73QHO-~Qy%iC{X zx!%41?A90?V$_$Xg@ol+XX!Xz{O$KIZzJ*;$nNE0#!;1aN;UGvYi+ALR>TbWRGx@q zi#fY=COCT}bI#ZrIgrktnk(m-L@}Zf?3iT}VX$pR;v6d?*Z^uX$2GhAtoNVK*I(Zb zX+qa*9RKv6KP#RNA!S0 z8J((O9v|MzPLZ~$rp(trJoiAO6J>T3A%_zo78rSQvbDt0%+s@Z`Z%tL2*$kq_BM#4 zVeAEmdpbL%)HVYdH7nsz&u&j~HLgFs`=S*tQEo1k^7Yr*Ayy=f4uDt3ylCO`vc}jM z7IxkI@b=yT$08BnsP=wZDI{#~Cn$Iw=UPW4j-RV;zWLR&)Vu%r3vqDV=FIc+W7A`G zLBTk$=k4$R=EZFU&s0aZ0uk%fVkV^#1Eoa>9htd39Kj=6V_s&;PRdhZ=QxH40x7_vY*Ar{Fe(?b z%zw`y8>R`S9SAg1K-j_g@-9X?5JO6eq#UNlsStpXXOJR2JC&v^^e-Q)31o$=kX8BO zZ=TcJNywnYniU;38HQ7|M}^Oj5(LbJKCV@F+s$^v+ZXfHd8#~VQqg2Y4fmdNad${t z^@e3yv!&M4PwyYAqtyZpRTW=+b%O?riBlaJS@Ilpt>xTe3`da~&wusRWk?J|L&!YH zs7;1?tL(lU=>?xS%01)rkJlb=e)VEMKmO@GyUe%?tgGqv*{end&&Ol`;SX<~IkbxH z4aHI0S+aBZ>M;_19o@%e#g5#be7fTtzKlJ}P)0^ZWY5jR+K8lp{S4M*cM|(K4@R6@ zD-c2evgb;Q8rNZu)?QL{T+h#+9`|owMk54tp#Ss_@2}%r(3gXk=a*@QV@6V^s-wPK zi{k~mnIrP;-+i|~K0PUp7`9);h*5&LdR$_&ATnr4L$1*j89b>Th6ev#!puV2>2!o> z5fq2a7n?#rZ38l^R!-rCUfE7Zc%)1}nGNVn&Jp|>- z4kyr4nrBd9+9DAw(q67}+`WBydwY9Pupd4=ou!(cMienbhi=5t1jJDp9j;OExT4Mv zfBD?9wLn4Wwfy3%I|?E5ynd<6{RQqdw!X@fGZ3!r2#goM`6@4mM##wW#Ie_?;V7U< zMY71QvJk!Lmv`@w&wusm_W1PYpM^fLypO8fd->{9mmbpbn7{ko+vlk6u5nJ-JIrgJ zW6JHpoxK{pV$IAk-Dja2vCn;PB|;Zx8gq(ZI+Ubx$Uz0q41v*#glREE=Sm>1n;cP0 z83A$)ude>|FuEA!wZD)Ro0>5zv!M%7sE$g&-Fk7zn-^6yE|h_p=vFxntYup^N7_Zu#~ zK?z_$R5lSsS^zs8PO$fG1}%oN_3^`koHV=uO0B!!zuH3NqTHce96~_FkoPqVbtD?3 z66kb?^6tggclWoKEfbaX`KOPcujOHh1UN(mfdt41Lxh`RR;BUz)1N;V3&x_@xmMTh zH_yVh(Cd2D{j+eM9XYimzyoP7SPZ#J?t#SR9)T3*pL9TZ5n-X?v=Jb{1o-A+E%L?0^Un0abdm2nCJv>H6{W_N&XF zKw_oz^Z)zP$s)X-Vvh)F#KznrI&yS>e!7T4sfOSefB5~S>a+<}pBNpHURj#^s}W2W zU_|vW&{S58C5EB{1^&@tC!&!Kn9y_u9S|KJ&z_ILq|#(~KnQj!1h0^TRC=yKQcA{iKIFdWZ@F2%`efY%>IHYc}s*Jb!Wf>=L&*B6h4V zdiU<*!@1~Ag$LLGJrmJsLx1rh zJvw5Y>p({h5Qo0{n{QvxI*&!>G(zb*kAX>Xj{R6y6OH72;gX(ENRHHg>C1FZj~e4? z1_R(&9M_yp<;v+}gXY3q!yHTSwy>QX#U4P9ro-8C%#xE2yz2dj`1(b5hA2e!FaPDo z!$_h{4-x2$EwRl_0(4v_t5=6R*S!7h-&~GsOPI&@6rd*J5Jtv!cgaqe?$mhN@8?VoM-Muiv;l+23U%1&um+-s70r$;t}hP{*+7Ub zgqKm5@4mm#={(v%7%4%i5YkN#DHN2`L#F6?|LWO`n{D1~xsQxnH6H5YPoI8yv};>L zx-oj7Hz#_8AhtE+(H*LO|JU~?#4HpVK%eLS_SJs6F%f1(Wb1@@>GQxC@afpbNWc94 zn~l8gE~iJ6mc!X`f)TVvD*z#pRG;tv{ENAN{oV6?c=ywTJl2D5?sViw zy8qoDo^5Gh62gZnXoVnj62iVn*WySQ^XP!J&fgC_g_?9Y^FL3>v|l&^fz~zji~*o+vDLM{%1K?I-3qChc}De$4h1gX{_E%Kp}ZIm`yCe74W5q1J5CNf!(B)-`crtc))Hdpcke zNCg}Y8;N273gq4WO-xNAopvY?awtux!;W&04%-M39nJ2?&jmSQMWtIUL5ZMDTSx7o+aC>$C?0$}C+rHiEnR7vW><=G)`f#l7xk72ShH^xN?9m8! zh#+Lw`RUIePKDf{Po`?Ec>DFGPOiDLxuzT{6)st-Mj2zHE|{->_3C2Is?lLH;^e}x zQcxh3z0xrn2u3Xb`JX@A`>S8Qoaeir9=f2lo+Zy~_tVj^5jow*dHI{)zr4BD@c?mW zE>5a^ZuieGB6L2KB~4E5ww~Nv_cz+AwUKil=nRr^UMiG|W0xZ(f};WSK%SdAnn5(u zrXmPYgjOwr1}qF|p?iGu!<|zU0I|){XoX`9QYnP9X)tRv z=(yax8qfBdcrh<``{V>{t$MnC`OBwYzHB}Tm|3Z~#5lJm76=k{RCJvB{Nv9{Yzs!S z^IXjr-|f=kh@dD6DznD!>pGE{NfMK$lYVKR>JkT)aWlCzZNM2{~77YpHSXt7 z>(s)UmmADl$;#UemxW|>u3(G>P%&y?CCIh8M=T&ZDHDmcX>A2euVigTJF_dp?Ua13 z&!76m%aq27s+fNIhd*DBbdU_c188lCE;k zh^IwHV%i>v*FU`4NQSwlg2$|_Xv*lVqm%&8nJa3B5sx4K^br-$e*Mj*9{%MotF@Q+ z+w*)-2UbHU1Y8y0{o(CR=i{M)zzsq!t$<(MvV2~bY&Sz0Jv*Oyo1y`N zWM_lHSV1MMfku*X&Q713nxkQ-QbvH+yEWWBm9pt*Ypm08{i01~-iAcCE#Nb3|^F+l#MW?vG!NMVv9I-ga&g(>D@+MqC8@|0B>Nx~3Kpa>gfKJWKF?b_VtM>{jK-ENy3 z2k*DL^*#>Ab)>N*lBThoS@oR6@nz^yIP~Gdp)B4yp|fg(eSe z$!RAUq4vdB7qeNJOf*yhWrTzjTG=*%VZQb9WmoCPtDmkn+Wz^A)A{E8&xcCZy4&~l z^I>|hyIVB2tsG+JqhGw-^=(14b$2llb?`8@bw6n_h#VO$dMa&?)@mKWDox|WbUrtF zQPTHt=vZF`37Xd2>UQ#tt7EVDe);Xr(K_gQ#)HuG)M|(Ha_WeJe{_5iF z?z?~7(jB5`X=>?at){uzZn!$tU^0eJXZzN`Lgp6kPN9vIYGg&hPEK29u_VH+6Ul4@ z7*?!hA4uoF78rrD45Ng#&OLI56kjY01iJC9^&eBPz41wPw?<&jia2irIolxuZapwJbEG z=M)C{It?>fu~=RLfmTJ4SuAsw%2+kFcjN5LpmFtg_4;7M`HA`{JwfiP3y?mWhLgusgwLWkT)(H4izGHt3dP3J{nFX<;H57H#Z; z44355$DF>*tM$xre-r1;kavH4`|Qi}*~Xhc9GknjdF%9f>}UW?ELGD4v8(o^xq)Ox zjZQNde%nAhGFx3OjQ!w7716hjV9_8N0Ly}BwS+aPP%a3R zaqW`T6x9aXXqC&=@CS}okEIC`4 zb}t_9%uw=}qGhxKY_x2}Rs^gB7HQSPdh`PuGq?LsSMl;Ihx!JEL#4(tC73D$C4&=b zZ6LRI*LR1oCUrNOIjb^_26FDFXOEs-oILD_u^yHU=AOH#o`P9rTOC&E+;I5t?ly!% zDoiG2t#SwYH#_$%bnV3KH0oHXvZU zd3PO9oxOf`g3lj6990#K@~P$LVMCFOtgdBDFMsvL$sE8@RaI`9+;#NfHH2#nF?Z1DY^{>|~jc)oRI1E7CN3QzM$&Iu?ahb=9htqgb;2 zOhdYSbG8|_%|iLs3+XB!0Qgs+iscEmq6=^dgg|(um5BwH+u_s+YChgQw4*p6{BCJdE7f!U^P@R`S{?8l#Zwr#ClQKrxd7G;-s`^K(B`uHJk~ zFBS(v@ZH=uB`a0b^z79mLko5cHXSn*>>ykGowmdz^57;pMCS{foEymEC8pWc}Ad;f;ES7$fx7LzBPu{6@_^>b;GpR zG<)O}u=47a);1YMsYf*qhi92>-G`N@NcE~RAk_+ObRpY{R+`eHXRbGE&Fi~AeERA) z4wE%t~H-Da?l3} zxvM}-!L-tG{rrBp{Ayo>G%XY`I;>T=D#+~VDyo-3wS4~h`aaiLbhpN4-3@K4EmK|q ztet-G;`!sVeIb|FVw;m_X-QC?MPFR1+OFQ*4SJ;%kgPJIPM$xpalhQ>#f34=vI>c? zlgTX?TBZed&%S)57?@-QKQmL7yja*84nURG^r`Sin0R116g^<%~LyTfti+II}} z)Z0w0650uPtmWY^zj`#UF=7CuktVtEGI9qRWu_K>y(OZTQ{Jv) z5xskB<8T;fzyA7}4IC6)i=201~EPgF}JMy);)j%Ve^o80L|>k7k&fp|UZZA|MKI+Q{YV)Af^Y z&ec|<3YJV(C#ZzVA&*`FSPagm}`#k|l(ezU-V>uJmCsLKJoFZF z^k5q&D+M007Ke3EjL%|&;RA)m;dbY6up%>mpg6&Cuh0S0ZkQC z$s%0G{O^U6%uXZNPqI|Y%HDOl^23a+uB@yuL7+hsg^T zMzyrfD|?|QhljCR5ovU0y!-AZ0mQ>sFDL2q)%_NETnB9Ds4AGuN36@T=KT5>PiNbX zTCChB>u4jlMe0XWKbczOc5>Pc@+hPiRBo*>Ev1t^2^-BfXLY2drqQc<%7Z0C2f5{b zuo5ORb^YV*lP70p1h!9G&GX6RCL5hu>vXqVz5D$SU;H;;U!1WMyBPPI!)<3?R_QE; z3mooQmFQ8$`8coc63by;5Mo*;31i6tPF~6;NnkSBgx%D@LaB|G1+Hvj1pkTztROGa zy^P)(o71#h4+<;;T%cr9o1Q+OTA|N1FKs%f|PvE2Q5 zz5DiQPl_y8r6FMknUI1d3D1bOrO1pp9Pf_%05=+q-o{KRdw@WU_Me|T%JFHZMmBmYe}xx*O^{~fEG@bInbY7PL_RZgi(c713gvkc=)_oKRZ)40wQUW+{?N! z4tGC(?yp`oFA5M4NTFsG3?V~Vw1LX%Ef>#xf#`MPRs}JbC%$ zlZ(k?TL+OQpdhEJY|Fg=@zW}Cb*Kg;Vy$}c>WPk#Mccu!D8kWYVUq0Bw zi_yfkSsBwRtyNhuJ5@-hrU6t7U*G@qY3n0vclpg@*6q#R;nU5=YNvb9#|Rd&j?I=s z`ioyYJI6Z4(Kwx-TaQ|g#d4dG)-c&<*$lEKRbzMprmE>UIPcBOWLvLnnbr%mbqc0Y zT`EQ^c-rm&SdycS{P>6W4=*q0zWw~BT|VUfbl&#_vR7{7{^Q^M?cJ-t_~yL#sm#>_ z(AX8jy~;4ki{dbqAlYWJv;?Y$Q)L!Cdy=5qs1i{~tAGhvGSXxnJ!xfC>uyG^HnDDpM;}d2D5F-qJTO%3Kbi zl>pUp_4X5=zBpr2ZkA1yJB!vQHFoUR!b7ZD=gHW%v@yoA>KrN~X%PvWis?UOrVGO906*G&sGNSs;NSg>BR0 z%P(4QY9SXI3c+m5atQEYrOxcpq9sf`-h6jGqK~pCzkGPI-Co@+H@Bmewf*kY*6WRK zx!qAq${zmm)v0S6Qs-%F5RpO7B_m^A&k`{g2WjB8TbxlOAiLk@^SKN15X#lImH?Vm z1)r;efQR~^`$`rwWxV_R4{sm6el#!Nzxndj1@D)W2UAVe5_ve>{=@Hny7-IVJ~^G7 zFb67k$2mMGfYD$yg&VAx^uUTYG;iXnZ&4u>j$%!f(U7)N8W~*x3w0VJm z8N_wAAY5RH0LF6t?n9kjKJM^Audwt0WGqeu7=ki#qf!u%LyB#SOsJV@>)u5tCll7g zR`4{c3U>PB^6O`(lT|LIGWqIESAdL<@88~6t02fBlvJZmpFf?pwF)v>_0B%It#XY* zg)+;+E}p&E(T!lSoFNLvL7)e$G)GaM&Jtab@7{i12_?^e`E;LmZ*Gs{m^^fzp3FR6 z%Nnq(r5M23>#rU(OW$TtZAQz@Lz*ZrS6KCG!&pXvDy%rv-sT{V)u%JKs~Ekic54IV zRlal!XBMqgCOWb_1BssE?VJDa_VV?^@n3&_{i}ya_8tp+MYQ@9OJlQk7tl7YqIXjUPprYZ-T zW(%5UWs%ypb-8{2;r;#X79+*~dvFNKM2yY%CUw=0EN{oF`+~{@LrYQwXwRQdJPx9l z+*|iy*%JUnD#zGaE3l`Us#SxMlJ1u8->Y71B?hfBoX}yk$lPk+c*OZp-oAn`M)4pp-;HTG1|_pXai) zX@_YlkcE~Jz+zPjDJr{NKK;UaA1&acQ3YG~+cv|u2BP38XL&V6*Vu0VcoW4MJo)nL z)|cy_ZqmBb%3@vG0CStQ#xoy{<-hcdXyNs;@DDmId8c|TUQcm7LsDbE(`;RxWG*z!^ z1s05M&o5^lHycH7ZEk`m0m=fKCyUulZfZW}vXY5d+u`QN563Ka8flzeGX(79sG^c# z#xR&zC{rf_(UQm*xduB;wz*kP9uM0{YwV5XDv7%%Uw-xUiw-|zB??^|z~JHC`W>bwnO>pi_JiD4Cxa;QSd$gTC$ub%GAEP+OZ2+Fr?u3#%4Q(CfG8JN!6 zKKba1Iw_j zcxB&M)r%Rqpaxsh@?Zbv zkNw4V`^&$0emWnnmWR7Z*)Vkz`vX z0FvlPFK)BaRmrB|NZD@dgXx>Q*oV`Yt=g7~!{P4g!@KMIdu>@_3IhLqAV8G~qO6fd zd7=mJZ$1JdkPS>n$(sD~@)RY8?MD?S7GPzPw3ZAlbIibrc@Y+Ff3@=m- zwao4IPlpwS(J*{SttvKq^7;XH*`4XgreG$MDGgPmHfVrnmoG0gd6fuIGE1qffWA({ ztjw9B2V<8uuD<`6vMl1oH>bPt`NP%7lU+b9EEUw;y0xZp{J0(-y}CTRa1^#(xVf(k zcs0bbWkjonw(lgs&A9#a_IlXK^OxshUgM(mIeawHm@Osrw1JjpNU3kV;KM3JuAWh` z6x?+9&wulCWBvMHygG5+yg$2;@$P2C+uwco>ham1y}V$Typ=pPZ5URK;fb=5yDWsE zP*f#2jOkfZP+o-|FqR+?US;Z|YT9hd$*L(6xgD1E`qTTX&&!gTl@b8S#Q&=#gds>+ zHM8ttG|*$a`jpj^au`sSl$&2ZKMileG%L2WgeZ{0VWjt#OPS9z0dPtx9bk^TpWYoe zOPBkOW<_fNItWx+5T0z36`4f3j0vUz#HKYyp;@6wKV(* z2L`jQrKkC@b-{+(rTnyPzckB%@!^Nt>iefJc75ccllc^8!R&=%)wE5JDO$&Iw z2xLL5>Qh4l%(5YoA8*z&Flj9TmYl@%=cg`NRUf8aDv6$IRqK{*0*__BZ!IXY!6Xy3 z-TnO28maD$9W7xpnkJWQv$7OiAW$Z?B&jl}D64RfB_lwnWT^ERC8-(E1jq=O^~Ybm zeD(N5p>4EfGf5dY?>{3WDI&ZI$Ov70^>lXk>RD1NriK?{_2P(9Bto=@Uq0QHv79X5 zXjx`TuHHyZktTyBFxX|w&7VHiR#4!<>t`*OH=k^p9fjJ326&iQ#_;3Kx~}stUwv`z zxx6`^?&fwjdE}gGsoHXB618h)Oi{picXj8}{^{A|k>hT6acZv7P+o8j1JD8@k}X`U zCIuk~tNh@K^_XkT@x$Nz{0aDBH#j)KIbP!w4P z@!ySs%xa$O^2x%~5zQ@peELv=B?Op3C>rL+U!FuO5)k`APtlPIgFA|!$C%r7qX>6q1sFIZ|TN$fN1O>p-!bp`DsxlQ@LSl?Dl4WjUfd;2Nc z`PJD2;KQHpd+U#%o=%awVFf5OhOt5J@@Ufr;O5=3YyI@pAl+^I)4lc8yCj(i0iv^_ zwJ;`3Nd`C?WBAbzyBO=TDDde&{q0A+_!s}}qfYzjO?$N4%{una|IhF8*UwLW{o;`W zG!hmBLdId~ECo=-EY&uZ1|cuPAVL7>n6^n`I8&$|X$%_F1ZrBdHr~wLDd#RL4U=j8{PuQBpg=!4Z(9t}VVK!^bGALsqZ7D$|EJH# zgTMUtiwB+G{cvA8Ie&3^!a)Osz8q_GW5%=t2&kjt?xxSn`QEK^9oV1E0?NqjDiG=6 zfNXTL#bqIylEnd3h8C-lH)GuV{eSw9kN*3A{$v_;{p0o7)60`F;`XoqGoXvng%%}dc1D+#9bMA1jLqctB+qxse~ERD?M*oC2y+LjMjAAb7uaUE+G zBuNlOP$+{rdTabYBGZuBTI^&7lgz}1vbghtK$u`MfBfYs3__~{tt2s>gO-`K z`A(~>dvB__P)0~rP4{o#9<4}Ix;jwRjAjk;%{B@RNCyhA5=|`umgorr%(j1`o2%<% zDRKk|w=z@}2+>HW*~!I=uU|dbAMX!09BjkxZtoPPgvvKcG9AXri|40%q61~Y%7(?o zNL!Vag^(iAA3b~8=dD2vMF|F3m4hXhRP=;f1Py~%Ki?-%<}!yhv&l|knkU-q1ezC$ zs>A#5KOOJv(XU^epDu6S+;8gp%dZ}Ipbslwy$(q8W%6bKI?5H>QKzHLt@>EjJl&tf zZWyB#pfae;klj)~=%{{d7-%`BsH2szZB@s+yZb->?T0w~=YM&*!+P`moB6lT`?BWY zZ~x={i{JR=Z=dg{Y78f$BeoXH1go+Y*^1(51E#XUqFIG&%68vPjcu@EQA_sn9lK{? zyLL^ZA7kWh_%cWMp@%@i$nP>^4XBo_>*~(BAVow1@6CpT^ zqeZLiMHIn8-#=;b>HVj*G9q9Nkc%Y(fJ}xmpH5$Xd->Qt{P1~e*3)8DIx7oR$W~M= zW4?Uxa7SX!DyI}S+E(RNl|vYnieR6gJUyE%*lUhzh3X8pR7dm_($y=gZw?!{|9FI1 z^;Xq<@9+sHB5cyy;X_3Pi+7*jy+7QIFTQ#CXwQ#7evUAI{`#Cwq~JIr28l3DIT~l} zMp=e-Yx5$dJIwpDngTgh8bpth!;$oCr2^ZAmP1xys3B||EAQ^F@BZ-nPwUB_{SVL1 z^6v2A2YY>~xL?K}{>#n7U$m$H-B)M5W)<0ovWTrE(lHESkUU_ej5d12Txo^^Ar6G) zdKbCdozBw)_hD_@Zh!x~A3v@!R3(A}Ac!b)dYkv14lwz@vYMe(JW#&dnK4PC2|9eZ z$L2)I4Io644_}{n|^chMUW zrX)**C=*1rwCa*oI6=#@f=_+F9d2*e${bnfWI5ys0s=sY^!DJDrKxS8( zYB~~$l89PHd-CPOy|wJ4CKL(?q=he@T5U*UBn$20?A#hBz>IJj*^Jd%u*`^ZrIWtU z2kIUuwXsVx?`lP(D~Gu)=BmKpyk*?~c(vSYkAD5??9`7p9~Ngkd9~xDAr1w#$xMD^ z8#QxBlp_jL70khPjdpfojqRGy`KF0*WHkT=Jc+O}rwFRJy!&aP1f+R* zx07*naRAAvuLo!1aR#;bi@cPNFyIR6S7M2(+s7j&G$f^zDwKMmrtb=p( zK53$#wUg|0z*tR%j!mFNwt-9$91Wx=G;8T&8t}3VbfrwH>$mTh+wK1+2u_?}Z{O0q=XVH%zWmqpSCL?*Q-ff^jS{1r;>jf$&?^a%nYUs)o*=H~k2YS8G)$}*om z|Kopld5#7zdPh`sM^##}2EAaJWO^k^H4;Z#*0mqCjGl+x?QY*?zxDG^wv$bZDVJaU z#ot{Yk}4EHfB+Fbo6R#j%qwf@mHeMkMfn)jJ$Mp5%m8_bTCQ*8mCo{_j3|?ze|||R zG77Y0QHG?fvX;et6e@q!matTyqqqUq0P*UY6W`z|67g&&Y zlUW+YGE`KhMpuoscfe6*6kRfztQ2x6V!n8Edbl1%%0#k|R+bz{6w!}$`Pq}xFozGI zN|7}rt5j7Nj#))FUNC2Z?L#{-MEOlb(Jbn7&+}!CMCKOg? z5hyHLuwQcwgM}4?Duf&$t!$iLeRbI!=~`p3dv{cq4dW2m-FmqU^s1^^Wdf1LHH^bm zwk~hq9PW4V>5m^>w!pD89i|O7i=%gTjHIDT0O;=J!`<0|p@4-AE;^kfw1ynfYm+Od zQG%z7X{gpc6EQfL;;L~#-RJW0!@KXlxgLtiFr<!zb@@#1`4KQV? zB8UVuMz0*}S~_Ss9Ph6`e7xURSeA`t&1%!;XFvb(=^4NRzWKNR`NNXQ5(RifY+D3X zW0lrgB~-Ei{u{4imY0=zhRWF$9i>v=-zTC7D`OExq+fo1K{Wwot{%e*wmM)VT`4mu ziVhL(EI9yKF4tclK$%q)%;k`mo5NtGEHAMv8*RwTR7Q}4rUI>GBA7Wiv zWJ+ZcDH>=rk2wI6#a1Yl3`rzXW;vF=`SjVv>~d;Yfh87JRE|3AwO@;fi2&seC^TDv zg22b0KAeTo#;_EyL@@y|G{S!JSNO_S|bkF(j$hQT^40m+%f2pdu9o_$`7!xz{X$zAqk zSq?X^zkPGP??eC;B`*)^yI7G|Bb80xhj@l{o7w&_mPBAoThm;FJP>-4kZc+C0JJc-wU2q zgj_7Q%<@hsnW)3ADnJyA@}edD?921b@PRgfD3n=pctKt&qRdRyFkDuonTp6({lm*$ zhKd5EFsp`P7M2sS5UUxek^@DrLb!Tc9fT}hMM*STQpHhyREZD;6q5kukP}{U;+WC| zhou#fo|Pc72=;Z^K6~*X<}sHlYMBNGa;;hphh=05G|q6Ri3CB+%D^DJK8v`>i3z4!~Vm&zyGiw z1-StLMHU6o0K&Gv_uv2Q@kNDqBjCNTwn`MZ)*yPT6i2Fb_vY8Xe0967BNIMf_>iq> z?(O{H@BYrC?Sw+D|KET3^w7$D`%nNrcpodOCY8rnA%p7sl6#BGtz7eaHwgis;ooxeZQ-- z7oT0gSel2FCIP<8m9W4lPbd~3tNOasI{Lb={do8Nn>V{96EZpg0ssWiWS~ty`5*q- zqf-OT2M5qq7=S`q4(}*s4vhW%o3CHKzw38*YZqgC6^9D7X0nXm|8HKLsGz?8o4@?} zFtR8=-3U^RN+k*ygbJb{h~U3r*(k5Hf;@9Y$H=OpGE}e%h$0GvSaJHGy4FwP*cZbctPX3{WBW!bGLjAg{d=a1W|qOz!=CHw7RIaE~} z4Q_;ifeOGgP}azDfJ?Tk7bndqK*Q6^6C9cn9zCE;<%S%?Ev-TY83c|(hj+M`T?$x#W_E>+fE@+Z}5_MS~M8fGm&z32!g{$DhAA%eD|%up01n%OY41 zRh8&D>Tvzd-@m!Z(C+=+s%rD&tDA$jhRQ1b&L4f=uCEWm?uTE#J&YpY=8#GyKq{ap zs3?;RM*gpYl*t-cE4z&>x_1zgu>_hGMN63h)ae&j@N#7vjwD1D(C4x$tZPjf=Lr^I zGBt9U2(7w)dEcGolB++9c2tWde`}R$IM@qzbmg>PxR2 zJaWximfLr~`sTWK6~uIcCW-(68Q{g~*}wRsPfjcyIWbS9IoC*Y4kQd+S)=;R{XhKj z?OvxBn-8zw-3)O0_|1RuqW_ogYCE;-m)GkGnP8A)5K};bMF9{+xcS8Y zDX}J{988deL=q*F6$qx0Az6|{$v2-qV6!605#=QgLq@?$&`{>g21}S?OcgdL$ohwu zyH4}6Dig{LE`z%#AeG4nixRv5D~Dn*TdAd05nh$8Pbb@{L&Llia;27apGnz-!;x`v zdiLN#>bce!Hp-Djm4?8D927H;I)CksJMImak)S6M?ZZspG{S10y*3| zd**SVXJ7JIhxORkbsX!cj#2D1z>G0H{PEA8o<+%qDYBZSrL1T|$x4>8RImBo^JrPs z^Y-S|-+X(Ms|pZF5wt-9ER#_VKKQ3U|LSs%$u^*==pC_^duo_UUA7+A5C8b9@9yj5 z=_CB`=GDhlm>xg5y_`a$TxAde8bB+WOf-x+0010a7oR=Y z5?N7cBZi~oDjz<}Y3P8Z30hN*!7P;nRj8Q4nSTG~`tGpgv)_Dtbz;zpg!0hFSbOf* z_0aoquXU_BMx`W*WpvW4Ih)OoKKs#=3t_1HjHP+35ETS^$tb2Q+Y-!SYmL5E-hO!X z>sQyy+EZ{9T>u3VqDa8y=fC@}e(TB9!pqxYvMMAHQYp!*TyxpI`n$jX*z@wmX1w`$ zeSP0CZ8m)*+?vyL@>}DZw+DOlbi2I0?E{pU5C{n(0GLDyGWpcn)c8m4G>9yiXha$* zBVZaP2aG0`NpO^K^66zOw^0uB8j(d73^HK!eo%8iiD4=SNs=s5{r=nA-c*KiLnmk^ z2ZIz*N-qec3NQtj)ylgkX(?j3lz>0jc&8_m*+efOhkD4kyXj?{)60u(OAX~fPavxj zN+gC?B$-vmjH`!MwJMZUVNjsT!D<-^0BD)CoR~&3jH3yQ^5WT(ZLE>hpp=cnhda`I zX0>js9D}SZt2KwMHi+X&-}pSwO5T5bzuT|JM_+t$7M{H;kL%ixI4o;F)|%_Eg#)50 zOfZ=vobEPZGS2q!>Z_~yWQ_dC2W|3hT&E#SDku$3$kw7Sv8cE#y~f?`%WuDbf4uKm zA_-Yp895~U|d*7>20+U6ONf`|wC~6ei$+UIjzcll5P^2m-!eG$_R69Ze4OhR-5LOBGHFchsQg=7UUA2xsT z3^1CiG_4lHfOI8T#bwQ!G(|L(Wh;|Q=IIzW-`+_e1Q&p!A<8^l5`2jz-X0@)XpPX%w;T$YNjjS5#`+aJxm2AreR{LrnB+ zMHOTk1Pj*6A%g&l!z)0!Rk)>zBXfTKWIE9>lrlx@G0HhSZL@`7#iEs=YV_Uxei17y zLFe{-6HF{0ukUZy{b55J8o9F4N+=iPY=6O;;GXM5=rQC|`xD9HaN${^s5L*FSt*_BmuC02l-l2vW3iv`_xzk6)a!+2mC$ zKt;8se2^=LsvqxO{q^5}SOGMe8){i@Z*YD(X|1u2fr=+*?`~F1r&qc=bVx|4=$n}Y z&EU(iS4@%1kZ;3Q@XukAQ8cJDNo9ayIE!SN%9#d55j72Q^W>RGS6P`UEs9c-){z}u zt<-1%lB8vTf))Gj+uMPLVW0rA+{&xb)2R|AOQB#UlL(Ws970(0&Eyz2H+ya`Hzq>~ zK_SFZndAO`rE+tA?2n$z+tI2ag@H;`j^xnD(R-8u5SgNxn^`$jSwR#f!L0&BND>E} z#d6RCmQ7LzpFcTk5IK71;2;ND!ld($dkTQ24zxkVA zzppZwZBOP{_qX>SdIU;^f+XMiTH<`Mt()CigorNsw4K;!20Ppz)3!FokiiV(pNVGb z0T!*?hJgYAG!aQko(4EnaDn#u!>vRKAjq;bm7y$;>a>{DYk0*JLNG~20UG;nZ-<;1 zEsHcC?rCanEV372MO6g^wIoQ;CTo+$G>04Y?)A+$dpvPzJ_-gki;+FC?)It9%ZtPD z;=!5EYcpA~R+N}SSVzy^$&v#SDw5ooN)8~S%9)gaxx_LD1wlpu>CPysN!ELN^8B3T zWozDZz{-+7ji4D#Gt8nl6MJG^KlG1dOk3X+^2|?~%vZjBzuO;n_e*CQ4M8esG+KF- zk!*5V1Ew$`e9S6y8Ek|?cK-O2hZswLyL5RUs`_@Z8WMN8yZQRB|KU1KCQO%C zr^k2q*B=&%a5FLyX?8Z-{NUmC!!NJb6a=VX)9HL#LDRb2_JmO?5^g^6&tMV-GxTN@ zC`*Z!QxPZ!k)R2YFd62jPrb~bA_ub>L={p;BJf8^~WMy6p9vQ zGNz#!0FEky2!XI7-Q_mf9P`9BVX9U>4&T1noj=}a5ZOIBRd{75cFRF)ao!$q_2|r0 z&LBKfc4Q8r)?8MBsbV=;o`{L*HsAxWBA_zMpiDH7m2{ECp@|J88lWBzKRKN%P`T{; zX1FL%f*m6Z+Xp8!K=6^Oa}|1ZY+nJk9Y50 zzP&jvSq1?F2pA|3q7jPAFMj)vzPOydm8%Wdu)^k0u%WiMp3C+7SAYBKcO`?UxR}Or z*xjrU!#8Fo^JzYP{NTam*(v|(FTY>1Afg1$XQ$23wTxrWB3KmNPdD2Q{}eRA1hL$U z1^_^ba2C8<1!RWgXr{FG^aU6wd3wfh(Mn1{Yog5>!L^Ur(nwGwlc*T?-+xG!lMo=! zP&N}e!8BOu5Ym89Xo|_VK2PNh=0=4PU2p&Ke)DKf6-urViVQ5C``v1zcItKh$>Ys7 zy%10MQw)pQ59mg^d)=dSoR;j|Jz@^U58n*<%rTa z9MYmii%o2{mlqdj4=&oK5y!v!i&uxCG)NTVbfe0w8YmPX7N)a{dEQL?g%L8LqMU^g z2|*$$k_9Os3SM#uoSr_}KoG4PtUy_LNyzM)Cj}Sv-F9v;m76>>nBI?H?|YeylBOgv znwP`Hko@~$0A3Zp~+_Y)p5>np%=f8M;7-^CWC~qL7fnN*0J6lb-r2Oe(0O0 z&mPCLde5BHz`{U=t%Wf#Tm$=~p-m}f7}kaX7fUdq$TAosX{<;CSP}#ardy${ZQ=C% z@x)2VG4=lO{rg^A54mm4eP36fJbh3id2_R0v!=PvAbgUMWv< z?;5pge;l{BySG2Qy1v`@5&^(S7;OskHe0R7rOyA`KeW5cpFF+riE82y6Ald`vDVn# z-@X0z@4o%G9OV<;BG$F@^ue=74<1~cUMS>VqX(GV-CzD+FYlEolPu{l2tt{Nq6pb^ zakkl>pYs=1FcK)3oT%TRC`6Na&GOb_ilD<3L}V)%8w=1RD|BfZbvvu{3q#)kAXR{!wb#|*8zJl{sL_mj__Z(AE!-oLsVV>;Wehl53P*y4G9oN&-fXrpZ{3f#AMeN6KlxXm-G3iH zd-~WJURHx;o*W!Z9dQ3~`QhDn|M30CgTm)HncYs|kH32OB<7&6!?J=hn^3p^&;R+| zZiqq(q@_`mWP|{LhRuWRe0FldU&w`^RGuIWFrgulm;iLTg!Fe#s#}M3MzK+^T7^p-K_3T>1WI{)vuTueD zW-BR{m#4Wv1w^aiD5?x$1_wq6OMCX@j6Ec;()G7LtgVg*oSjq(jJ`ibcpQBRsvz39tc>$&;0h)&cVY$t~eIC250bY>%E_ zc;Ef-kL%-~J~^2Tt(RNL2H=ngFu_8l>K>!t?T$AefB62*{r#Z|gj$=Y3740fjkj5~ zyZx}M?N9&wC-L#?`the%+YBR;X?C1PRL2-&y}rBo@ZI-ce^`98J-aw>wmCc7$BQRh zBW<^bZ^!^c?1%sOf4tg{l!^ryArl0+L6pL_;OVrvxSaW?0ti@zmlxqHf4(cvgsC*;K# zQQiqak!D~La-XiAUDxhkxWBpi;q?!< zcWVdAEY420fwM=Gxv~0TcU-{d|NQ4y$J>86y!ed=b8HtcubfsI%8to?+}+)M`0)Mp zesg|u{$QG_ireMg!>g0&(8gio7Ba_}IQ;Ja{`cSQGTCi_0sw<5d^5)wBIiwDHorWd z`LEdqWu#!DoMMpzhYg6J#|BFgO1^Vvm9kSfb5frMP1!^Sbw>2U%p&@oKG5u)|+ z@_J2{iRNv>>~ouN;tDzmU&0YZNz*_ZCZNL12&Nh^7>0Gd8HZ8K4K@cXEuiCZe~)Ps z$NQYmPERpGms=#rZT(^V!C&Oj+-iTzU0}fAqpB}6J!7Y zAOJ~3K~(L{^RGUM*rd!07%WP;&*T2EJMm{?otz zdS9h1m8{Il%IMq6c^c~oH8X2AUz{}lsU)XDOjZb1)1ZtA89kXH$_N%^8fNz3M`2=m zp=pFohgCGK5$Q{mnhMB59$^bxsd^ZSL|Jaf zx?%(d%fK`Un$krOWVsR&jWVXYXr^jo;K`>aXiLiqtjCw%4DD{mS!8cMaeDfr%Pr<5 zZ+`VQX(tc2xIb8%Skq*>JU^rL<^6|o6iYBUlGP#&(LMX*?1~W*;0v8h8CjNq$soeu z@YBZ+FZS2p{o^t{`i(EIT5TZ8GN6{KP_I}-?xzph{{Fky7e9NxY1U0>IojTa7A=QWy%)0~Tv2O3-tGIk z@WsV;UDJkR$tBM=m)pYi|NHO0y&p(aq9`K?0^6&LHZ)3ZMziVSY{Ne*7_4A`SWPS# zQ3kMFML7h7bbGLw}9a2vKeS$4O*BsA;Y+vLmc<9$t=w%!iAyg1tw zZ0eFHur`{dFnAnruER~e7$~}d*|c9hZJ1>1oDK?rRTN+ZtE%WmTf+rcj8@=k51wC` zQR~FQ?GLYx{n))}IpSo>c=pxR)P(i!yYG)|^ZC%%A<&zy0Oba{2lj|Jn1? zaP(MWEQ`2|KB{=k%`_a|kvaDLE_bmWUR>^9eVC@q zXWkv}S!ME!oUvIxUf*FlsojFOd~h-kns*Gcae$z^7ytbCubS`w>fe2HH>yx90S1r_195RaSt6A+Y}!0{wBesCl7%uy$tW+f8JbHHSWH*2n%4&l)L}L#~l^|VKZ2vz&aQ~Pp_|5l}{j_@CCySvNXjwts~(Su}_M2Qkb5+sm74sX2ooV}O759tt< zE=%-&`@X3wE$!m<%WV{rs6EYF5mQKWCLBtm#B}dfN+YavnJneNdB0yQF@!@XjN6ar zGLG%MaC0?ImN8ZIBs9-fhCwAgqN_!Zl4vr*2A0}|J}&_Z=_yY`)TH&SNQYoT(t?n; zLNh0Zri?OudTAxB)0@v9zP>wjt0>uEURSG6Zib4;)3+ZEJ$&^d_jA}V64r-{HO;%46 zd&PoLzIdv%(uN*AqsOAXR)^V!#ZYVcsbjkN~k1g%y)ohW9b7gve~t?fWz5gmrD6eSXavq?tm6GS~vZED1nj z7L#N^)-+i6(ij{%cs}mtKsD0JTx@rDuW{gf#IPM!Q&$ z56`q1g5l|!wy(#B@4q{)U!Xjjp1(H7R75&cW|0_@YD7%UO{vd?WO1b*;zy7PIb^cHP$G5u{3>uJ-fiNN*g`lK_=ue-&9QkwW z26_^Z$pnH-3v*cj45x*ROpB7%Q7+%CJS|I_#IPvosWc*q%(;222c>}q7%Zg9Il0{5 z9v48_F+BV1m+GGP}MC} zd)Rn$GeptZ&ARTsdw;|-vH%>*rn0VW;IOp)yT|);y_ilj=$<(YEKfiAWaY&C@xxgm zCNe9Um2^@vL^{(UfR;q+*)x1N^bC`VAV(Kra$Y_k?%%%8%@eI|I9*M~t2I_=1!S2$ zYe$lqUFeZyK^vv)7mBhuwvC3xD~`kEcenGHli_KYQ~hS7ZCn z|K~T45r9af2opho0%Z^gS-E_1UHNwa0ZRfVOBf}R$P$E@bO55Ir@(;f=98zi=196M zkc2EbNMy+*lOU`!91MpMg5Z|v z0tJtAP{-Z5S}|zVmFK%VjH4Zn8lPOcB8O$Y29UEYy%g8rE(}fcvQ#5zV&-V2XGVLR z;b{QVS`i45mN8HWrv$;$`_Q7Q6bU84KD~UpZEPl4o=9Oj~|X|Ngn$(zlG54~ByWLcvb zOzf4bC&RldG1U3K1hM5t_EZ^up&0!1hYz|s_!)-gAfVPd0~=9VmKo@xDj|L)L{ zp{8;3{7;`+&GaYhqhSSZl+woEB>{=a!u%e7d+~bTs%_mficecjLu0=R%J-gy~V)@kRgnuD=DV28vzpV?g$F&H(O^( zq)V_2Qgty4%2G?8Wn~%!&|vcP4zmSw-u-yUpkhH?KmF{}3R)35s#H>zM7Y>TWv@$7 zuO!+q*ObtfY)k+6uw2~KR>@wZN}WGGa2RcO;A;D9?Io9vRdIE+e(qLF;RIxndb2_| zCPOI5v|01Rp(Fvoli^VTrKPi`()!S8LP~PNNh2i#%H{KQ0TZXgha6T_%j1Hi`Q~Ef zeb0r%O8IgB{l{?{SGwoATAm;CVr!Q_z1~3m?%l4bW~Rx)U_i12lh(~3?UX#tvAdhGa{&I5Q=TP-JVd!#8NQ89| zJs!S0bf&TqoAoC@ol3DZF>3@pXv|a=C`s>zFo?Z0p7Yq2SdektE!Qt91sif%rlIX` zms*E7o_w?24jU^Ax>wi2rO(~Gx{rNGLypSC|1ZB!Qtjk`zS|Faa_Tc#4*U zSl(Ql0m&r2*F|GWvNBtGZ*zl{T5HyXU@}M^h-SC%9+Ii1wDIEkPo7ROWSWO*T+-wr z^&!C+LXshXrRl!+b2O>I&^|0r-&70H%Q0729`0CbIqx`ZuBSQ{Bm z_M5v_>*nJ6VtcV&4Qow~?n1MK^#boa=We&#yt=S#zxvg9wNXAX#IcQE4yuW=J7xd) zkH36B51;*Se=+U9J8!S+;WpTmb-P^;USqM#$$$N?fBVn_f)Wr7V1j^eFOUgA6a0B>I#H=JN3N z0m+t`rH)U&czw~T9GS(tBhq`3!T_|Qxz2e$#cA%XB`nS5h3(gsr%Em6ri@vmTCucLDMb~Q;r6)OhnSWQz+hpXQNooqII-xe5?(P;2UU&`^3V*|c1K_LK3$kB3DTvZ$7s1_9a54=rQ!>}Ip- z8iRx1yn0ovs5aCjk(Sn>*ML&8Or3{=?nF4x8=8<>gS_Fd^+U_GClFxJ(Xp=+u_? zkJq2L?SJ*v#U;_okdGbP&#nf~NbF`EfAb%|+m%207oSh3+sF0Q>hSIib`DqD?aCc_ zSYIuF_%~lYE)papEFGkh)HrUL>PSlp0Qk40U}-{054xBFYot*iECC3XCPIWU$5%H- z!I5yLz_bSQ6nZwEYn4N(vT}4vBt4Q5_wODSC{oB_oZft~btD->)=07&x_1ifi}E<1 z&dbs};EhGaRp8R&;mNC!%NeUxFtERGb@2Jr$IItihC7Ycht|Tg&sjAL#S~i3rK*7? zBOMy74oJrNe%~A>Mhz72N)%HRYwn3~d3Q#VR7tR;S(b2ZUOXF}9r@wovDaZJ?c5_x z!{AeAZ|=j;_PZY+`%-yV&|C@d53}#v}nZbL=eYGCB^k7g(V~BsBNP~$~ zNf8E1fJUWJuJBF}kTGczir>6+3>lpTC1l;x5T3F0OnP-=oLlj6B9-Y0kn>XaKi-Cd z+1aMCzWCFpR+(wcK^?*F*#mPwpU-hPcZFHV(q%4lH>btvadWfEm~%a*bG|>==s50& zi>n*2XJ)M>qy&6vwU06zEi6trTOtiW1FVik1&QPRQR%$|Em^`5Mc%v*>@)%*s05TS z?}cs&x8TLg>oWSu`tjXE)+b{;oal+N`sFXs>KR5nl6>*&xhj6<#K!7YZq zeE6{MDHw{Hn`L4w3Cms=iY>+^TVy+r&o))(X6bDVRK;+v4GQf~`-JL~SI<_d1bcb* z*_&;S00000002O=9*ZA)qMwFE6jEdF>8&WM58*D9(onh-$~k8}-16zg`L}<(+$8pg zAMTfUa{1?~}OM*e5eEM`!dUuoHVhjt( z?3}e|vf_w5hYv$Z#<`jb~fO(;t$NH)ZLJ{(Sm^CER+9SoKR zGq*%>vbqk3;d->Lj4_6K{J7L-B1Ip`xgk0bJa>#qRBH&a8^6K44JcYO(>*u{2*=r}DhSxt-xYtXE_8WNrhC zMQ6?}=f`83Y5VkMm|7;ITz&ERl@kB}0000iY>nrz6T+56hZbwZ^yA3DL(fje@C^T1r*U%c3wbL5hEuASq8$gkUg`0tHbjNx3A4WTJPP zy}Di#T0AQga7i9&Wb|BCHk61Ki?2$RE25NS3auUP-_Pu+Q2IDdZ$95<0fOq?m$@J3 z-En{FA%|2@Rs>uoRG1ZmnfKW9`4CCUvlFAyO@I*U zaY{`FG&c?ODkKlPeIQl3(}D&|rpW}R$wX1|ZWNO=z-Y=EWq9%QN~Bu<@!_P4i!tvP zSXV(Q;|fC#P@$}Olxi9YdcbHND4jzux3{+~qnB#gF19N~SzQ;&63u4m9C>c%bL>wi z46E&CU8X2RMQ`2bb~>C6$9;kP;_~HK!#MkNdHv<{0ssI2KtY|`Y&~WuJrFZ9JC$`L zybQ@+A*dG%3$rLq9!t;O+q}P%zkU09lQ{3@R;Fog$>%q#4GxDR??1eK_a2{qc6||H z)2i|Hof{Y?*|7H2|dC>!3nrUs?nJSdzhi87@L_g~0%#%tcQ5 zf`TLn3516Q3R4(f+zg}yW73qC%A)4T-dY`*#`z3xtiU7#jkcKE}U^5RDQBnz_6&f9|ln*yA zHq|;?yMLEy;}*x6fIG@K4FhVR!iW-SDk-`Nq>SD%Bs}&X-kma29u?E;i(c!9p^TX_ zmL6SmTiP6_rP(l+It@{iedxYe=KS#RakoEuWv-vUykH+PZK%(F@@fPC0Bs)Tu6DM7 zwdM6N6 zYQvRhA29_c4T@$!%jBGpM^%l&C_OurD8l32{lo5%Heh@C^g3Z_QGNXM+Q(`F=Ou)Y z^_J>gY7Wpwu%rzgwb!MjcNmg&p=j_9DTfwIo)s;7?0@+B@y)Zw&gj-uo@|!eck7p9 z9PZBNf1N!ZoYfmWe_C`qXX%6b{WHbHn_YGt6Ac$Y#yc86o@RLM$FM$&y4xwFWkkimD zjU*xO6lm-e(`X{XWHN@?^Ou{&XbAN55}v^%G8IH3B+z;tNYZMQW(g9ly??ueg@nbp zq8x&hVj?YFN;IWpRs&u)gA0HW26&;FSuTA&K(%V#^=PSE~*17r+&MX=G7wbgDW3TcJ{$z^5x{CWjh za9P*~XO%5i;Dckgfa;ZQ_bJ>9OSc~@3zQ*3JF zNKA#v%pMLfhOT3(bv7ccXzuj$ake_3Sc#fccX}IoyCLiNxx{X;PP#G~o+V@Htech>*57@^Cl|KN?Zmrp>Q8Qb zRB~00k5|{5Y6VchvoG&s`A`4tZb=|PBTO&_A&3G*){!Plh(Gs)XaE5KqL*gr0ZXDt zL3lbOxFA>NF+pNcrwUtpcc%FC(TPT-Jz*?hFKDEzWd>vkq{#>kqsyTI8p*Z zMRdVYDqCNTlNTZ3q)A>#GuNVF3{6G*hdyh)u7^E_iwhX6xO12aBEidIs?{`>}U}u zWo!N6t8d0nw4D1Hh;emuv(7xf|B)|OUB{-~xA9juo6^#;U2Zoowi>`-W-jIM{i^?m zUwoLMw6r8GL5LCnQ6NgKPBF>9ahTE|qbGv~d6HRz7Nr$=7?ng=NiZzxv*#B|8U+PK zA{fk)1f=uQF=Fg<(xdol;^Lw!Z29oRIYUaNSf)jSC*{(RYHk1kAOJ~3K~zDR2xd=G zV6AHOGL0T7160zyf(0Jbn8;fDvTv=eS8+Pk)zd2LP7WMaX3S15uo7l=N}$4VE@9ys zI(Sh$O&!Yf{a%tw3>83~UZXZf0ZoB0x+J%-5ZpCMjPNqPe743IplU0~ggFu*ygRZa z!P=}wu+FT#NOaX^kcEb*E%pB0Lu5pET3xRy$MrP94TZLL!sMKXQ8uS-^uRgqX~l<3v86mMheF7v37GPRl(B^jBt z1N+kRq~-1p-z_h>M9}NFT5nd>6vuZz;>k)0E?*!2^5w=AuuXqIyxtnXz}Xu3aJOy$ z?|;2p5@87dpa_u(0C34!O94~h-&nfj1PGAf4k#UH#V97jvH)QQ7&4*q^wr>!JY1p_ z)thy>&`VjDw89avzFuYrrxsjeHbeaKCy1F|fxSDe8g|_f?cWACC z3_*Z1tPf#=rHOzg0TV0;7-1zu1I?d(dSNn)60npRJdnjgbumx5h0L-jDwBOYkEl!W znJ|E%dan2H=ef_v-Py*=wd#1Wd9t=@rPRS)%)mKvPVGA+WX*sj#a2*bF6H>{os@Mk zq-0`&9H`U9#pQNgQ5dK})nJyIu+{@ntrJWD(vVAGM``QUy_F81%|acqw56YQ_~FBc zhZk2uDMK9#E2-}Lw*RrcT0?gJ=Kk|fH*NT+P$--LE!zzY5u2Q-Y)?JAel%& zAYhc4n|T>}S5o|&G>3%|G{}{55O5_djUh{yW$B=&x|xQjuUC?(smkUtyeB9_lH&8H{SUO#8${du#Y3hbl?cuFjWH(#r zvCGHXZ@xLKUVgHfu~}bVJ{jk(adlA^S1jdx?!EV-PNhd$O{hDfP)Xi>^#N;CA-o&I z!(d@G$L*`v*P1NN*hrB@bUQ_3cZ3|OZ9G`%&J0VGZk((2G+6ZEh^6=W82$Km*?)KQ z=@Y`SR6|0Dv9@^sy*;l(v_HK6^3x~NY#D<-7 zBL-%|N{s>}EeL3Nc)L#|G80)=Mcp6=ge0Vqh|*~=88T&7hky|zfNJJuXvKW?QQjWs z*5%!X>&qHxuyh}+$Q2+HL)K(OHx(!&qRHG-y_j!|=%Y>_PAzE)!C?}Z=3%mcdCJnv z5+IyOA)Ew_Om1IYj_&p39<>vmfTzXSy|btn?&NC32vn{cRAAhjZH~y#3iUM;5 zdi4pGh~>Mt+ZXKd?T?>-_GD^3RP()FUrdD^eO~7C-NXHm-@M&*Nhyk~?pDmr;G_{M zNkwbX$-fn8X#|NhKs2NZ&|UPPWO9lW10)&t^s{RQ&(>X{bx{FNPYXyM%x*B4ygMXM z$uMT-LpWRVhNp`r98(`ylm01{qgPNOZ8N7iXI-!<(I;Pm8j zqX=+hfm;c2ww%XnTTXpt&U8vqPN$Z`$P@;&QdeqVyWMq!d4y9;l=LiNCW{)7%fy5c z)@c%HqD*PFzInD*K~}-idS!Fa)GM+!w}5%lQ?gLh>^&d*{vZD9Y4!Te^Oc$VsJfZz zrmP3AR;@HI85YB$s&hoWNa2J;1}zC9)2x?pwXp{?)$80@Oj#l$&bb_Kzq{}szI*Z0 zjXQ%Qn5?sb>hrhnFE9P@yZbL+u2It$mi>HFTO1#k1xe?=tn&|F-}R0ZY}h!5;vQxR zGKq8v657&&Ker?dl8Gun62>G2P@pABgAoK_-K-fd-aPkID$fUFvz}x52tbf5$ShzG z1Zk$QOqOxA8v5?r$A%^c7EOW^*O4wqBUDIMEG@g_Yh10Tx{5YRZ%d(v3cJ2yh z#<*Ga<6*yFK)Q$f#bkgi&8BCc{p8h%D8ZDHor_>|@dQ)F&19kP|Lrf1@EJTp}Rg(s*;L~m8K)q=Ft!Z~$lr^AO2SL+YI zyZDpqF+I~M(h}a3?S~&$tGfT{{AVv#6EdRv!`)cg0`=2}Lp!|v(7VuyNS3X6C$bdlB3XPYriSKcxh8o&#Wu}e75!{zpPQY~b{I+;F4F)O7NhUCUTIo&>n;E@i8(!ezH zP7{#Mz!d30nM$WX${=Axt2fWCES++usSRSd;8|PLl(NT(vuL?LKAxS&xoH9W@-X*I z&wyO$5eiGv!|S-JWFpLbb^XN`&nIivN}1JjUKY$#!F2RaTvpUbzWb+dhO1|yWd>?=JgfI03jg7(Z=chAC{YV_XqyN>%Dn7kE}Nz`r3oss}cRN)~oTG z-@e(Z0ARgo(L2Zl%dEn1mCIQf!^emb}b7mI<*?A7^M#n9f@U`X#LHLv9*U6-y}}O6sDZ3ujPn8Ki?M&Xo(@0$_^w}fK?wWf z_irz08^uz&SX6-|`nv8wgD~Uj#m~Qb(R&t7Jm=G;YPerx0<+&-UD4~Tr|-@$KfT>u zuZUKKY!FqPtpdq0Pd=O<2;KrSHkOskWhHuo8gR_SCC{9WIUoOe z{PgDGo4@|;=U1ewUsjvgkQm$J@7KHb=66?rd)IP1ZgcPVhyA*5U2baSkL@-t|NiSY zbJ~FV&7xowoSw+(tfFls&4s_LFbG-@Dl(9*sDhHLpv6RtRw7*`jkx{dRacU!sVWlO zgtXbp#_GA4u&UaOsMz3^t%}FLyd7z#IZfQ`>?8@>sD%Mw3SswXGchR^8zUjKpg<); zt>CS&ZFw2%{fpa(5o23tutiv~Yn!~g=b@)l+o))2cCo2k4MLVI#rSa8SU?5{06?<4 z2%v3^&PJwSs0=XN6BOYY&tBelQ5G;FrOj36t^nv?ZxvYkB7HE9By8}*l}=N z^6mFO917rsEpx~O0@WP5{o+Vz%E9&1pZ)YYHv8;z9SX|qEvhYj+3lss-PnGZKl$v6 zJ(L(YT-tt7JMYgkj#d5g*LQU|!O#Dv*E_aJkMp4il-A%N&Qk6Rdjl1?1d_u#^jRHK znA3;fe|&v?`Swqre|5##M!=ldP#PP$3?->!lsP z{nd|06ioFi6$5EcRN1KlZ2%=V{t|GwD1io{$(C?40G8}f5k(eMqSCth{M8zo;0&iL zGGNISbGF@;M)O2Vl?9TBBrMiKPMZ{2q&o!$ix#wt z8hAQwc2(_PK3k&|5ANNK4!9cUJmdaF9~d~-R!Q!ta+F3?kk%Hxe7!w9W~pQpQ4J+q z5H%^AOL}&L<>r(TvdQY!#QOTh5>8brdBA1MF;00Ib0#)VaJ_xD9v{!!WvlzoUVC10 z+qUCh-W(DzZMjUa!X(I8cTJWd!iwd^=g-zQz^bI(tc7b^dLtID6HyOu@%nGqwL~61 zy!~*wef_Kr&(rqt;llg#caK2(h{qgO~`{N(~@YR<+M&&Y{UPm?4+uPsas=ohjcfZ@IX|4V4@W9(^(Oju>@T~HNmngs%E{AEc6l!R&yz^njgGmA87P(=gPDr}~%KL2D5xD6By z8**f-RG`!@ZFHsGlC;X%VJqqL4?i5pN_eWU+lvLDKox0*vO0>QX_sm(I%tI(#I(FL z^q!a6KAbLrQoq0Jy>}05)#!840A|a9-Rs>pk=v68)zmAeh?WpS8c^@@=`Jlf`ynwa}ktFlu3iHp0D6}t68TU+q`6L_DpWuoObr>r5!hC6LItDoiUH6 zyqw?t^-S1hl?o+VCLC^k-3djDGR3%g@%)BMi&4AKjy=TQHbgPRDF65azx>-9vBlv} zzy1+dH=kVd;kZ72Jh^H&wrz{Mmk0eXU+yV-y!$in7BVF_Erl3ZU89hNx-25Qn^~nf zImhOF^T+GE%Qqjs{4%m~LTE9e^fr%g{+ux$-`(8ZtbJ{2ZM}RL_kE#zpK%%cl{$R; ztG9@^E1WHBNFbtUqQak=qX0asDAcTrZvPiK&6QZkS02wG0aP`IWXE0Di6_tWm zWu`5<6&UGWWL6|$sLIVgzyHh8g{hk1xw~3XrVW}PASFuxiI}Z~n4PpBu$u_AFxzsT zBj294GT3(ai}nk$q19}lFfC)$)u&fu%N#XkMT=flg~jBMkh5dy%RD}0TFJSLPE1mq zjGhoMWhgXNg%}J&q^mj5Hs|gx<`|dC^E7f=s06rekJ}g)u9muF+iU5uyu78y)A@4Q z9v&`E!)B>yw<2g&V_$a;M-ix&)N=d$`Q3hv)$8h5%VC2vlH6xj{`iKU{{8d4t%ra9 zH9x&+Pg}j4Yo6xw-QhAcqSdqZlfQqd7PCG4@$vbUBNS0VBX^rOb82I?=x7|PH8nH5 zRgS^!{qJ!-extANVmT1I5^Ihzo=)e(`vX?IyWHR2?zFO@uYNQ3cR~x=PkrB7p1%3F zAI@pmFqF+S0GI_Dgh5!)6@OvDfg-?Mz+AE*rL1ZrfHoSFt|*8pmG_^1dQC_p%tV5a zrD><4>^?+9%1Wn9O|c5)@!`z@X~TuG?iLnZVa|YDJ*iX_+>`<2L!F1r}W-K~rHP$B2XnxnYn_5+G$hy?b|IL2a#zch~E#uG;<8GPmQ?cFw1V zbHO&{Du6Ns6LR0L5=uodif(cH>Z|830%~N1F@s1Up$66EPw)Nt-+prC(jWinFQ0tz zY<~CWu?5a+JzI{Yy<49*&p&-})p|Sm%{Mon?WUWOeWElyxf$4PdyrPIVxLaO%#l?C z=fm&cTs?jN`m@^=#Vsbs<@9hl9kS|t*zf)3^78(AjabyP$B*aRU9YBI=i|-N*p9#b zx9=}iD4i*PmX+*fgt7#ny2?szDV%I+DQ^Jw=U|>c8yQx%V8Va z!#0yffpF2`NU;K5c54teNUI{n-HV@n`D|TcTejxPb&Gn`tC=5@(-`})cE$F zp1%C#lm44OJoLNqVZHujf5PrQSd3+NecdZ~{`K)E_sHzi?JN!?D!dOjVTQqMiIyyF z6Nz!bAy41`X}(>){=9Q~yNvVYcsOhsR@JzySM$g5^7*dC(p#w7yVKRxLaO*!&(=P4 z`u1Pm9Lh!kMG6M#FvOrjEkcSA_)CM5(`<&LM6gH$FzHZnYl2Fgw8>Jx|NM%!p|a^L zASN{EM5#DX;iSvuLfUp|GCeoBz*&He{k|~?8x%@y1Wd4c1xvLb-3Yb_p-C+q$(hhN z$84AF{k*-sZeoO}rRXZS!Iy?Q)&B0Ka*lM((ZV>{V;xWhP(;+q-8>y<&ZS64GS`Wi zX0Qa=Z30m?<(OgEfU7FX9@#ROZJ;U0#gb#D$9($X`!UK9jidVge%HJ2ua|K-ZihKf zkEe3c1Xu;pGzk{k)ha-itqc&r^~<0CN0Y^`|Z1@pZxS5+iCl7x*X=50n&}WocHVbhxq(@7ri%yrH8}*#!I8l@2~gE!rZ?3 zm%kpDjf{fq0xV^mF5RK?jgOaio{96i@M95xii3P2VLq*Y-#8L8qj zZJBLWLK`?n*y2wwsUZ zP2BIi%xgY;zfo7~1^wpwM&^%y;3v=5%ZKR^UF%$xYh}O<8Od>dp#cor=YHI#Q?thV zUwyOu{MO^+>2&mT2o4B=JY`=`KkQ%Kx89bJ%Io8CU8AejVcTD8p)cS5%lF4&B8zQV zno4Vz*eFnpqOzAG`Ae%vDGjs&;5M5HKy)=12$`BuZ4f|JjmmD=P{~SwlrvzfT(X>$ zVFW8lX0S*`NG#K~U080r%8p68r+b+}NKgaWq!1($(lU+a)Z~^-Cdt#O_V;%z5#pr< zb_ph-G3zpYdw)%iF=wj`jwrV+P!kJKED>@Yo;1hGWSbOCz^ra2+LSdYDo0FK#YDo$ zo&hy6f{hJLP*9Z_+xdLT{P6z6sbHWXXTQJc*W&)B=QtjZW6aajL|N%5Bq3QyLFRJR zWJgg6nE=7-&%gTYUZZM5Z&6T0j9^SXeD{tw|M120Xxn%H^{>lk%Zryc`&!$ST#p|w z5m&LO1wt46?(x;D8=oJ>5>4Jsa$r}q5hw!H&oHubv{Ngdc^>UB-u&)&?JjC;-kJj; zw~PvKp69MTy}A4JCZcnXNcG2$yR|Q?FAs6Qct@VT{patG6J>MRU9Zv3T6zN@0s){} zIn2p_L^D95ShAEPWhf<}4Tn-OphCnn;PjlCq@z?OfL0Y~R<&|cb^(9@Xof8m9T{>g zMnXoq676mQQFN~;kZf*nSCCDt&YobD)kd|RQbx`>oi!bp=lSB<4lyWKWS~(c#{f?A z(y#7srR{ReHdPf&B%S0?OH4U1*UPz-CJ^Z+mK;+maMD|o6KIzINA1{}AIUFZl22_UKK&Z^((w4ogqF4l( zmP703Kl$VyHWvwQf}j90kKexG?brWs(;I&L?XPor_T^XmWhpy~+P1kCno+{ceDjy9 z&t5f;Q;G0?#8^iIGdSgf&T*t}2Ta>h6vkK5a~e|g^)k|60$8m*$Z%yGc3e|LKR z>AtaRX;5*zTh}$Z{j^;5Xx;O-|L>m<8;jZc-To>x9U+<3NP}S~IV<>&2#_Yqf@HXH zN~2paXDd)ikN{$`=;=9%0tkw*P)(o!7h%x003rhh0hDPHVbe62a2nd(sHha!RLvb+ zm6RqK2mv76xiMyg5*jV1W;z_nNdRbWvN|G!1%YNoS`bO}hR7YrIX%beFW>xdsgH;X}q6(k>cx$7dpvJURE_3T8VLg~=29(I?iw3dM88y| z!m}_`Q&SnD8yW0Scc@{hFrxyhfPkQu?wtcMnS~9PynOe&zkIARKvntrdcTjp-0XDR zo{nQ~=i{6-l8$mwBmh{}vMdb4BnX2UHhSz|{oU)UtI|1W6&e-LsK?)Z@bdNlcz+eD z8i!k5Wt+fUGG)){ak=n(86M+!`t0wn#a@R==31n-Wm~L~m9`5C8ZM3Pbolt;{PuVn zm(u*mUF!(a859A{03y$)xIX;QKDoOV)Po(&`Y>;=ck0_^z1>|eOCA5e|NN(?IYGyI zf3t+JAp!&vWoKs1S>?%J0PY0=4ueDzVGyR^Y6eh3F=)amm_`8*lZ2*>08eQca~tDQ7duf5wf*&{&yF8X z#p*6e1r$tPa(FJ+kIXhbJtVXf>Sp=Qm9 z5+cUhRC}R?AVAoP+OUmw9G4$|`^W79Geelz-|u!y)6Eqw=i}j=^L)9aLBIvDXwd`! z^xe{CPeC@J89>oZx37Qx>AK4$SxqKN+|}vf4}V#%UjO}e#gK2Y6)OA<_d0Wu*nz?+uhB+@A$a9y6(Yo{OAAck56-|$lcx5j`Z0Lo>k?V>9U=) z!CwM&SVctHf+|n}CXFaJ7TSoSi%k=N;EIZBiHc$Y0?c9{l7&dvVm24)jBM4?q>Dw< zFbd{GySujnT>&>`0H~0ZG89Py1Cbtc;5f@D%1&m%q}JQ}=kx73Vf3UJSuQE(+)m>H zq3!SXoiMen6Phl=S(-FUZ0rmxlP)7rC8&CLlXvKXs9p%Wnc+aV!+SS1v@l!FC85zy za7{gOR9@cw;m=Q5LZMC9{`z{ocHCUSAGgDCtIKICr^@ zh|tX!Uw*zuYZrnLmWsaR@z-zW^%sBhv))M|(e2zWmE+9Ko6k$7YPKzTKJb6|>B@}U zvRG1a!mNusv83f3K5Xxgj~@?*9Ja_Bt>OH+p>ZAn03ZNKL_t*E`*lr_k?tmoL^%0) zX}b^apTE58Z8k2QQOc*o^A}gw`*EmG`ZAaN_|O04Pmfu!x!*Rboo2My z-R!Qr!>X`ZoFxyyblHv!11;T(LPHe$Vzurr<%*aB%!&mHiA(Qpxz@5Z=CrBNFpGng zwN+|V8}GmU&@@KZ_Dey{n;*>dC_5+=P~j!i!OJtRR+&_G3TNF;mf&z5e~Nro^!lE9gc^G%LUjJBxB=@nA-=peMc|_G*y9-)Wq@F_5Sqoe}s;V?5ycOxUE>;;(sOvn~k z2+KAc6`XD+3`a$o6{WHO0#pJhS_R6sA;^|Bfs|>$f=%o8#je6+&$-ViATCV^WO%}J zVvIRmFh$5J(Lf9?67kK}%QUp*fs+4x>vYk$Yu`aROv+btL>X{WtWFd{|;^+^899CM80Wg4&vV}?u+FyW=`6joISTABumgsFD((=R@|3NbEbkXT24{Ouof z{mIY1S{BT)T?#X2+O62IxMMJRK9Ab6YX1D^YxDW{m%AlgRA*z`IC07GbbR>u@wja) z1|Sv@8WRE@AG_PysDdmDKn5J!`Pk|$KfAw*HWtOcExlhJE}y;Jt>fWvce7)KAAj|K ze)}}i07#H3!hiu2K-mNgfd431rD`Zyazzz;QihbKDHcK4h_Yya2B88#i&Z`8lBz-p zTe3l-AQlV)FvwELhC2!wlit6&>J`}v5L3jY04sCm@CC?{i(yy{KvST>t#X-OuI`_= zhldHKEQ^jTOyoAs=h@rU5)yX;ors!HZiPf;OsLeBmEzj4vbmX1s6yaeMX*Omts%Hn z^r@t+|Zk~1+m`1r?v|9*@j0J?a$zuL9lulu|lj+bq2m!pv|9IjM# z(@+u6vhK=gMOjs%4FUk!?!Wl@lYO5+Nfu+%+i$*~{n=Na@0FRG-HeRLz!k_qR*t86 zIGv8ZiTL{K+Z=!SL;G~!xUZPY*7H>3;e32}xSUcdaWx6d3OG57tfz-HYFSMOXERu& z3DwiV_3rwU`+cKYU$3rP#O251tGgE-Kk$CNW~n4SW73S0$s$46BxyR?%ApcmoV@}WFi-*=X_$&BD?ueKKrR}f;h?8T6Hp1M zRPCPMv?P^LQBWbKYhsM7ER!l!Pe`U2v!`rDx@Jrex7V+Ae3w<#0TUH95a%3q+^l}x z*9HJHJlZ5CYF3#EqM^Cp?0a+Xz8WN!QZZEAvzA3-v=Gq@N2!>pG|JfMVnEe;ay2+u z899eFOHbc^^ZnDr3`ZsUdcD6^EO+c5w~xnx)9Es4CQ&NGArqx?c)zCrLM4ZRBtTY6 zDxQ7ulb1JPrJ5^J^ZjpsKlWFze!6Qm-MmypH8|K5XF13FHy;mARq^azFTdja@b>AG z&v$Ee#}=oD^V8+=X}e653wp?4%S_D@21Wqe;i>J1T8C9`P_bH4sqx`-IlcJwdfl(= z>&+T_#QCou?_b{G{nPE$x`c7s{^dXa?&C~NgSMgo(11ypKp7?s@)wPh${<;=3Z|P_ z)C5%B07)l_5-peo79ql*S*&nyz;syTB&wwy1ZaR3${u(iNFk*RaJ9t+c;{yy1Bac<#-{i3^Re6 z<$)Q-Dq(&7l7(Ikvnv-w3x$@3Xk@dhN?^m3w>FF^%~GlLEm{V{YydWtD@$h#RXqIg z-@p42x~GZCuV-D4H`3n7f-VzWj7m zvyBDmdi>WvZf*DZs~5h?@RGfu*fzIXJbawkz5nLn#Ix4BmzQ=`d5)`3K3&bcT#gUt z$MeZEyIXWMiAuR@(u2sVlEyrIxY~KNMzt~%J>{6P>-_FLU%!5SeZ4m9uv=Tk`OV|i zt1G|5^SE(f>he$j`|m%_LBoJSngmnj&Ez+{pnD^&d=L;{cl01ao* zQA!p?(glDv3DPhDh@&G}L`*0zoimpP{81u3b>TMr&^^;fL z5Ku!$S8t4##A4YHibXhMD$A@C(H2#73@uf{SXhHhBliz05V|$Mx>(Oef`BJYgbjv1ncy#e>krD zPrtZZdPzRFF33;kMx4j-V11H*`fwgSmiucR9Lp~Dw+m0l%jMy?%?$U@ilsX3ZuAw!jI=j$+Ms->+YPad)-bXRw7<)%oyl_i~qSuWxQlhV$@m z|LNNggSPEr1V}2R=nS$>PaBdZ$Y0WAn<@w;RivnjGE0zW0E9|FS%xA(8$kmF2sWsa zP)vXU(@4Pq$~FW|x}_yJ!;XYhs`AaV{RGmNoH-lZr);u-RzV|GQW2JwYDqwVYcycG z-QPdEyno_c25nAO4$tAa4Yiw_yIq^}G$N!>u?^?pG-hev)z^KlpIFu}w{KqhR_|aohiA+c*{HzV-?q=c{Oo24(h)nA#=9rJ-W`wkw_F`tuhXyo<2R36(k8Je z*o58?5*q#S=`;<~B!5Z5LXj+is-lzx7#RhWA{k{ua50KDDHupn21^)?N+Vp&00CP? zK$gMT65S=JiYBsEz;L+VESXzX3gxDZ3KP{P$e>(gRiBm;)s@v$RE;QOGHza8>+Qp^ zwlI) zJ-WRET8wop6RxY;*38K2lb4_)T~&$7!C5Fj{QK|TjmctB&a3@??Y&)HYd(EE9=9<^ z!UY&G3@~iaaEsld5~Cm&RWe{x1}w(%?2Dg1yDqk>eO|Wz{+r`||M^$9`z71Qw|{)P zyQ{;Arz~%1So%-;1L0_`Gv?aH>O6e^ZWE$&Ra+u}49NtTh^mp8oT$pJ0~kL(-ritj z4^h=PqoiH7rSJ>o3-Y9ntFw+%7+yzvv(1`MN-YwEfe6{N3XPRkYbCA`k%t z0p(@ON&vL@O9WubCQMLMG7uCp87?AbvjMkaHQF#_(izAo0g$5xTZ%@YR8TROf?14$ z0238(l15mz5?CTpEO*G&$=R}L3&$m(&_wr)QU)MopHwwtB3%<~`{ys~&HKwluS!<9 z2$(as*>=0zoiti!t8kR7P2|*mzphJfdsm}cQOtlN0p%!7vVzuJyB9PEhFTO$v$3F3 z)VakpOPG=JWYy3du8CZ>$KU_`mlU-wPdS8e-vI3Bibj&#v5tSX40QbnWf zR#62KfTJ|Sj-rJU!Ts%*U%c+E+?785=HDLIyH`K^yhm*355Io9G+kyb5lzd|`t`Er z+%D-=EFzZ7I=y|`v{Ua4HkhG=X0I5OU88u>gq@dW)O>Tg*{!FssKF5c+S>ZQ1>)ns z9)5E3IB#A*TcR~;Wnz4Px!oUbZmwciZs7U9{cpehI4ha30gwPG69nv{4FDznLZl%U zD>777YIGoDwrmLtEE1*#mMRJ~p=t%wjzWnV6g4df5GRsq&R%8pbeOXh=n@K);a04m z$uyyuv6ybN57CB$AW+RCfGESFX;zi7!7i+~&z|YWH!}l}6-=;H>Fs=K{c7m|+T9wv zv7oM#vCPn3frUhy2qD{a&qCRjHwJrY7ge1NY$y@gFdFF$Ycy*Rvty_b+O#RFP+P`G zSo!qle|vixoFtf7Zu-^Q<9gpN$EVZNDd(uN3RZz>Aq;~6Vz)HNMwl=GtE?m~7(p&C zzWn0Vf`*uS`pvH%c6Tp-{>8Ge#=BpCFL0^ldbjJnw`DQgah|smz!++6UYbvbDli)M zV?o$hhANV@;J% zeSf3vv)i?ShG|q5eTCYg$f`r9VN7MsVF0~g;iXPRzz@eE(yeJ1_vu- z{{IBQSCehml_lme#$0RfbMj>7&1+x@f-D#6kfKA6tp|m=+UQk(T%ib=?y4dIf&@tr z_L_9Z?6u|``aYL|jAS_h7iUy!w|95Ezt18gEeNP$RgKe$j;59C&E55$eL)Cvs}8rC z`zq!-FzFFtRP`Y(Kv%VZ2nVe|tpi3gDzKrK>6q@=+?d0Zr*xSyQ1*q1hU1^U`rA2O z1X$G$`~9-he!m`1$LEK6-YNwn0kDc904PCf-$NEGGhigh0zjc69fX2v>c%)p@7W{fvO!ujK@$K7fd7dIxEd--5#DFE+kSUD>f#Qo@0t_UDjp2J**Ldl3OK&n87lqqm~nkXtfqY>Qs#VM4Jny zgN4AlK?Dp&AuFf?l(Wsg$(w{4O_!?VaLuwJ&VT;;yZbZ>ghoFs``xPLYS}Ii&!^+q z%4Py683su>5(Z=4w}LV~_vhO~?+PG<$VPK62@r5HF{jH&%u09T>D|2D1#9zi zx9isp09sPU>0f{T&F6Pv=er-rn~z?uA+~z=aIa5a?HhAyOPqiH@Pzl@ygzPP7nf;R zLnKH71OTu^{%2t}ffZ9Mg4IigP#{3sD#1iJ2>=#d%>*LdrV13GB2zFd$c}Uc$r+&O z>V}Fir-B(3oNQ%>TP4C)ttiSFEEZ5O%n58W!k{CC29QB;GW*=sa{cnE?$5|J06|2Q z7K3VPZRxFgTY*Xim`Zs@xt~GSlIRs;Z|G!_ok_IH5KLFlY*jKKx7jkuc3DhqWxxE_dy^gJS~h%K?5{`~&w;rQV))36H=02K)UG*|@xGXw!K zX;`SRs-e*AiXveug9Wr?ibaA3gjNX$V7r&0f&iwZNn6Q~P1~ek8t#c~VZwddL>Q5p zy-)&F0dImPP?#}WrK}E3+kw5?8 z>xVIDfUwrvzFW9n?@!yq{rPcD7l@=)bU_q~O2S(DLP4lzRG<(u%pzPep%S^i{?%tM z_c7+z|MItfef#;}U9T}Q-~Di2wwi=>d%e|i%>B)BIez!JeEH?mUmiPhzkYbjSBKSM zG|n#2ETgEFWgw@gD+|V`9^>?OeRI>6u6laDx{Fq5Fil*3{@wE1*VhY>%RK+`bNls+ z+a7xO>6h1UBF<+!R9iQ6*&dAg+aG>9=d8310ww^^0AP^(uOoyKX$M#WfrKEpAr@h& z1k4tx0tplVLJ*Rc0n&geR5O4w2_Te7l`@(EkY!djQ{jZNIuMlsluZ;0NEBfe0oe?J zQZ@DpQ$agQm^_hY@aFKU-Md=n8d`f|H(*77?-e1ZFN5sRZ<0r%FK-R78;|0+wkIpo8nnUw!_n`|_v%_LsPM`SBO8 zdP_dv|FC7%0+9P&T+y%U>8E#3?c?9R9QQvyAM@_x`#<1UcMGlPfM%mJ5&~irU=LSS z#xzau^7;MiPxp+B8uzEeOOWVg6Wh06fBNgYW$$vQx3@p))7Q5P-o1Z1+>p=O0&4;$ zRr6|B$FG0)?(}%hQMOP~RuL>30KtD|pair*hG__fP|_r6#%y2;*-bb`xM%`Evx;U$ zS&Rx$gQ%TRSREx8mL1EDA@n1pFGX1^>NfC#$}1`ExVD~#$W z3TXl{mr7__6`Px<*)}ArX0fWU&0g~3*WdhnD!^!!TK3n=!u7D*&JV}O?VLy=g*0Us zNwSEN8xGN30*P=k%7Ce&2_OnZs`cAnfAP}(_W$}Jb~hh?_GTBCbKL(p(%OR_p~0)G z-S)S4m%;t38*G>3w#`pI{_y+y)kjyNn}DF3r^7MBWjUQSkp>}YZTSB9$=!O#sLe9TNg9lA^-0L}f!oDlCU-qo*2Z29}D{Iko?6C9@Gt>INqUl17s0 zKB!nEiD&^5tOcoJpK2IXh0zm5&c-6eYMuc=5ULOKArVQDvm(0(&?ew`UYvH(H9Z3( zT^vC@eEYkfPKkm5Wxw9-yOyi&)AQ5md7Bf2VX)+cXaZKzT$VLT1ZB50Sb`0juqrqy zGW+uK%a32o@Bi<=aCf--^_#2Zag5`070rIx=Gn2n`FQ^6`?E1Y6?Q_!Cog{fx_$ifo{nSWEUtGqhyC*6s`E7GeE8}2-@My~EP!S@O+yC3{{kyp!3o zq%;Fb5)NCYMI~5ZDL~jvibb?U$!(yIMhHj?GzpejFdRn825e1E1~M{OAOz896hU_r zki!rX0vryJ4UG&2tIIM*ZI|)##R~+{-6bv>m*Cjm`i~&Q??NG^`4Yn(5c+g9=VP2B(ZH0F7mc3-din)+=0@sIf8%Y7rm0fmdgv;YcOhKUxr zHY}XoAAa}p^S(<16NMkocUMaYF4T9wyZX(mtF~CRu+KSU?PtyPhll%=t9ZTBuGkEE&S+2s5KymKk<<9R$Lh>^U2a2xgHk22^y@ z1QAu82C`3CM54{fc3C)y=_G0{Ir=%77{-Q+l9hk^=DX*qLN#c{^`Y;2^eb*p56AOq zBmgrRHWHK!t4uofOF)K71XwWa#uS*$2!o|!u;2db)#bl^JJ#Ld(>H4ym(zXimt}i? z&Kg@Pmdr6O9%D@O+ZXEF?vwcLFZkli7mP;Il3*2zF$I>?(3<3yDpg&6{Ndw|c8tqn z!{hPo{&wF>E?)llhfltI*{;HpsGN@vPvbJS=cm(zf`-glg?RPR>w`wlOP(*ep8oVW z)QehcyuaUy0BL~#84^5Ng)F!_%TSbDOmx%bW;Yig6IK+#6xfm|02aw9pd5D4NW`Rp z678gkNtLQ4A-cH&j0#eAI8?|+04BXuwRxyUsf9u%bW|3Blu)6Hat(Wq_Uc97X9Ffs zHI}6{nv{cLHWyQ*!&XJQWiQc!m%(Bmv$t|&pMi-WN)%J!sxU!1s@740xn5|x7#pCR ztp-Y^&2{1A8FK>>+&=vNuMb02VKU=pzw527SKdyK&(G(LMz*OGY#VGg0mE#|8c`q# zngm<~OqNWVhQp?V{l!Q5*FTMYTR(cWGRNtd*j=^L^RX^-^AMX|m6uu7Ufp=&=CkcD zKk@Uwd$CqDh-6e$0c3PieSu1>0#Hake3M`9*KmeMN1ooDKVJLdv_AaXyWf6xy+)vu z0YCrwhx_w@gw|TD3UF{k$Viqdfne!5}5(7{QRDSsCj~}uM z8G>=W?60)y5SP=_)6;gw6z!sHz>o!&W|Xw{PO<_-GR#DURVsi043Z#9wY%y+|8(ig za{F>g=Vjy;%WgeA);6~}*qede=1i|2AI7FvZ;pR_$1i^Sx}zavt%=A`wcgd&Rx)x6 z&(zo-G2i|9_TvM|vH+zY-rd|R!AsQJ|K{I(e0?C$9c}W@U;Pj$yxd2)o2D|WP{U;~ z9UDj1sA$-X0A}6)d>mzi0Kor3kcLeLQLWr-hN{7Uk{K3|0>}zyr_;1m(F_|#Bs5xO zV1_Z9p#lKHRE0s3f(ijd1r5>yN^?m;XthdgY7whzfq{%ZXuy(-l@pw-2_jX1W>p^* zFK&_d13lpg=JjWXCBrlqmx$zYW-V~c3bYX!j1)_TictpLE3!GG$chYL-71_dT<9Yz zxGk9O#nF9PhL@AC2N^spZ7F&f?azO@}kupt#B^oqo1di_qTf4bybBuCja@XgabIT1<4`6I_Bd&tLtaRCT*!6xLzkm?JsqW+X5$a?47rce~`AW7?;8A1(K^Xa!~}fY=%>I|@<~N|`ZNXZFnIc3vj!s659dwY3pR&R%e#P*Yn= zFL?jg-~X_U5*Rku{h?i-Rf5R2`= zaMGg^+myC?`Zhm%v9w|s)f1bKkJkr;_4vCVfBV_ZvI=yt)x*DjUz1nYN_WV^+huxM zqA}824Wm$so11>l@m#r<&%Zp+G+_b!kKF_ag96E7aHvF!AYs5!C`bb`F{Ls&dPdqR zRI$Dc{V3?6aq12Sss>xQl zp#<2lc|K3FTWDB@Tdy{^vsd-hqRq3r23!N6o6 zLc{E?HkO292At8V#A-=^s(`i-XSbPhdASf6iJJXfgbk5on${@0yrGc9>D#Y29s}-aF{NKlKKD}v8=%a`0``fVnp&8xP;E9t$ql1g9|6v0L+nh~{r%&J z>$?~I@%?eLrguwkSFb-`+z2#Js+A1@sAklfluL4RMoT$n8J=JE?cGBAgvyfx0?WeE5KPAeds@6ll>T5eh9iNU*r!yv9VE_db z9f05h5WTMmn#Cx=X3;cgm=*{F1i%Dxj$<~&>N(gji^H@TvBPPyEp6Z9_~EE8ezX1X z+&|)XZ!i4lAAfzjYgfy@bw*>1nArwRz;n2qTcmwC|NQeuAMYdksFt+AJpHu1=&k18 zes}YCFR$4s3gG!qe;DU{`C_RY+t{}0WF>MAXH`L0uqhhMnZ@2JwJhVuW5%*;V(@=A zp~;d|aDXrfrQHA%X{zjEsc3-(gb~#ZP;eNE4A2at!j7Vpa6>s90X4R^R+l1Mu;j*a zB%uwKfuvmaWI5w%>A9rAfmWqUA?AvxhT4LxZiKhz_fMPlFYlJeA0MA;SZ#N6IK26M zPnLFrZg6q*EkrNJ>k%$BGcC0ZB-qk1NeLwb%=HqgW<-r1X&@?%0=HtZZOwwrxeTFz z9Ev&nIgU<_;S#XK{ORkzJf>ko6U$-UT`k%ze11MY9xt;1mRtahqGSUG=%w!j0WwXM z2|E%HtRkuvz>FZ0rK)*M+Th2EM_MWS-1!DXETy8zEXFu!GoAD_IeS3NoTUs4eT zm#i|H2~89Uc_ON+A!UMqkO5khB+zasvO46HY!)4`!;~mBMt4_JqpFpIj?r5xo83&E zT{EMc1yms$)6ii@$eO6>wPv218umgFa6%4&}(WMThV1N~uY@V)4 z3{PWg8Iuv^SpD7azj+*H0cNgOyWP6rx_f(md_InCS}a4Q(ljL6GKV z|KmU2s5M7ar)L6my2nN75pyeTD3l+6e0=kAiN1wx_Dh8~|8hDUmJk1ufB(t46BrP- zx8ME+v#!@|x^isi+?<(4i?%LHB`e?oLr((2gTn7I+Oxa0h&}9-% z0f?d@7!=(n6$J}{0IC6MNhWAo*(wSUgot27Q}YV4sZ2R!q8OAdEwdU3cFgAw)5UPe zXl|v9^VWI`(WEn(xSYMa;$B1*D-sc=bG*GjV}E$npZ@ss$+V#T;ge5(`%;xFCr#3t zJqd)R5v)LwUNRdiDss7S$BPgs%WV@|a$;%{=4`3i9SqnMm~yU@X`)WtfI2JD$5JK? zS>^rA`+xoB{YWS6mOAWqyQS|JoKFwO=Tl};R-u?tMF?bpFnYftM1Y`81J#m1hE-%i zsK{`!h`F6c#QviPaodSe!IKn!}ssztljP1&8mmD@8WRPM)JjSI>zm%UwpD> zZwSJ`g~N>z$f&B?mfXB)!rS@X+xGDbF$&Zck!8g3@!SsIe{=JXw^xdQfa8aUZ{Dtx z`+VMV;^J9ZFx}VNLyz2M6>1Jl7l6L3^_*PS!!_T%+sfMaIrzUaz~lf}j0zM10<9P& z3T;YJ0-^wjD23-T8^8>}lqGj5xTAVSgGI|`E4dcaY`K)GF`^{rQe&6ZE|2$CTdD&w z8ke*Wv5b*ys{ zWKLB}I;x@=$d+rt%07C58oR>8GTUs(sBqDjnC(KFWCI{K!ac2QNXP6K7#^3v#6%Em zE|N)ZMVfAT|MgctXO^c!<(u8QZ|!R7PnV~s=VK;78>UMM%Ys2iO1q^AV2Pkn1Z^2) zX@a830<2=u=i`a7fA#vXw#$0Zld9z(zi|c@gJjdwP24Z*JFONtgnIh2!%FeE;FspIt92AUYQ zZ3^Rf&RIqQn9+CpoBhJemY$hoOWPgdy4G>SaU zJoE7Zkr}Oz{dwk2Z5*+?Ue-e@lSNcy^vv!2^P_e1`fB{!4<}GG`_=0&KYM*;lA=Wy z5JO~CdE}llK&S`ZCZ6_bk+4eWF?%RhSpR%ZB*Z3c(jEWKhZh$}vVNj-6 zEybdWAgl~CLJGE~6@h9XWzG;HLpB24oUJp#CPOUB;8az`V1~_XWNmOy^=!^IR1nJN zr~A2%(fV4us-U?&Uv94VhmLH+Cb#aEF>(L)w8i1}F2DKq7!E+#fBNwkA6)^#mK;k~ zz#2<3qGqUFI-nUbnoD9?+>}72iCQnxG6gi*Y7>%-1Ps&=w&vNkV4pjuN+rxChina$ zb&eX1s^ee3`pY@13Iw?9_PeDoyMEa|oSx2?VgWALi7F=r3WB07%VH7kBkWcv(%@_< zLuLlyD7l!(aeBJ6INa=ZD^O>tKh3MFJbri@`=FM5K6}nABCa?tlUFZa|NZ}bv-5a< z`{TPuu4}KS?SJ^E!-9s?xm-NOrJc9aW$yaZyW`>2RgX5BWv_*;j{owvkN)ZA%4%%( zt#!26&Zmj~W|6M>{M`p5#C6C21)@L* z5NQSh0WwX(&454^8$yjcifoY?5NONI6j~zCbJ?22W~ooKN}+NiDhUox?RzdsBYVzn z<5G(jju;IYX?nKp`Tn9BIP7*6HKlDl;IP~6uQVGH6QRVdDgNI6gvS(2&^1mn_DgW9aA*%%~x z1*qXZOe8%hPmX?xO9>U(HU9YfKbDB)S@VihEYVX*6J z=?DRq0hI-qa1o#qG6`sg&B^Wfv_7V}HZM2Mym@}&|ecUb=`QXQ2o_DVf*8x(*?6dUl-`@V+ zXD_-G3FP~4htBV}51Z#mRJ4$GSJ(a8V>^ya*Qm?Tx^%7T9#7|K+jX{km9dx_c_4KX+D(0E4?rm;g+ns+ouck+MeLO4f9w*vtk5XjDh`3-*K6Y6jBn7#Z$v z2+3fp@@@k(9G)(CMh$6BBCU;->f7J{`C*n(($uos_XE~tKhDSF^Ya-4&a9dV28#{` zDog6?+M|PDNoD~iR49avm<^UukYQsxJ!Q1MTd%LK#@pwZ313F0%R+;gTaL_0?CKmF}#`S{BhH!Utde_g-%^mU6;+PG+B`Elm6-OS7Ve9)(FmR`%$Y{bkT z|GWO-^-Tvbx8wP7Gd?^%eVDZsxTv?j9>bIhFOzOG_K@O(UHVfX5e%kJv< z^usR$Qbh2d3#3ZbKtmSLLQU$?Bm|>Klmk$PmGm+cLtv6dlDqX`OB}Yu`lp+?XAy3C+jHBYmE6edU}?h?miC_x;e!;vlVTlkoZe z{*1%^_4?PZ-Z~VF!|NBjk3YM1F&GXwCdn8AN=%YD!|4jxg7-Ees--MbRZD7Xo?;n9 z+7ufXE5gOF8!s%i(~@xxbC^S^X${)LrHyQ-pML-6=MzvQ;eNI3cI;P6UY?)MPv@E# zHOmDlXc1%)ji!o`CFqmS2h++kc1;F0sQ?W0%pMG}7AOCnC-FW_RoJbgC z+sElV)0o!op!1kSUqAVLxtwabd2@SnxaRW@+uhx~z3RwTTMP`>`T3IEDCW-}_Iz${ zK3bRAH42s6w|~9*^6mxOJU`w)XJ6~`_VM!J;-r?f?U%!TS!13a=g1n{WkQDqpn7O~ z*oF~TzdEp8^Xc*Dx0fJA@IL}3rH&>>YmLo8$pE5gaW++UBxwS4xRH z(o7Aau~nXD-gaD2b1k){PQf~FyM0TI)8mH?hZlFNFsknfC74l#0GmHA|lA_H4CvY;f4M&xS>6r&Tf6q)pDoQX<2y+*-9^jX3u?HJu()vp~H2 z=9`CsN)co`EPYSzud%&9A0IDct183A2%03B)1#Ug4a>R@RM0Ytj#9y48m2HCEf)<} zZrfnree~PA_Wbre`T^VDzIzFzw2_I!W+ zss%^i;xawA=ga2hlJxlY{?&DC+udCkr$^>x{O$Sm&Fy-?`TcXv{i4%{xAXFNrefJG zH|y1|M>{_~&kZxTbJnu&F`~I+JD!utet2`;!sGer{y9le#s5R02y_yi$%e{sASp~K z8{BJj!mg<1GEkT#3MN9Op*@#gA^?QSj%}Rku=XNn0T$@e^y&W>!;UClD+$R<~3n$6h^qBeu&zU@k}mZU%hq$w}Vo>^;_ zsxqpNasix1GCgZ}#KpPwVst$H@!#HUg(|FtSa)le4ttLKozvmcpo2KGl|VP|9^tuY{|APxen``?rZJloH!A2yX-2f0HgsR zY4{+Si8M$BQ9r8}>4_i!qRpzz%Dhc9r`gR~?o(s*ZNIHsrRT!iPv_r0G+%FKvTSnU z^|>AoKGkIv>33y&`tpLfb}Q4};V=z{+RAd>W9zN0dtjO!>^{i$b?a%$I!{IHTkbs* zpuzZmC*>&z!@^7e8RkNoV4#v7Oj#xq37BfSr;`o`(AQg;AKeiMnH=83m$glYYV7^? zqB0)_W31P5m)QUP?_Xuk;r$Q4nNmYlv53M(4I9YVE}y~Hn!o&MvAbA?uilq(oaT9m zlCl(#W(jQT^JTMlhp(ZJoTIpODj`KMp6l1O86KKPJ+ zcT#(~UC+0k5F2RbOp2b_)Wgd-PwTqZX&zwCBm*W35EMnpAWxD)_e76vgW3G9NRp~y4M z7;6tH4V23QN+m&3z@T z={!E>HoQFn<8dAj%!mqx?<^_g_2uC^W7Ez;wDHT(z zPe1OM^%O zfKryx_YK5!nxQ?F*L`1a5fG5(rS_hZkzyGwqR`9XG{2enXTKTTnuRw={dRlv0m@x1 zvpp}nBBx}cY}?ymeV&hfJ72AgCD%XTFOJ*ObD)!tk9z&}Woh-W`QhO>mSP9CeO*b#bB^Qw^Z)#1mjibA`meqpG7nA*mt}$} z!MDrHZrOHn>EocW9=OIh)#Ko@W|h@iHIZ%@f?yY34Cl zNoxf#r5V(ng*|CKz}?BP8l{YBBUI$ASP`Na1X+xk6$vthD1GE^jo3U=eG{^IH%P^# zs7*~7(e~@*=YN0MJ4{|`Vwy`apJ!a3UfU+A_jJ-IfXrlcC^()*Nqz6rI8?es3s_pG z90DK%A(Vi`479ciC`apx-m6EuUUwuAE;^AAL77)$wQ-&f-@f~BY=3-w=kOZIR)lO=H> z-?sg-MMx|pGob`PAz;ZMD99uO92CUg6KMe?KxM*ZX7C(9Lgu0)NyshD!cs**RtQWB zhP`gB9wyKA`qRhGa`*o0nOIjGMqHm3bYxfk`d!J}I+yt4Km6LmNUId(jY!JYDM zDJI~Z{q}sV%7hKY+U&0AEcCsuZBr>8nBx25$R(NxAeL*)2w9{c&@^KHX;IN*2~$c-Mc z=Nfyq2muhmgak^SKr#tHAmmmVO_0F0cEz#s^1KcXk9|FSb<%aO7C-&drzV;>ef6JyQ){2i zXi7q1^04gd%eh*H)ycCMksVX zXJGNzVq0FgZ`-~*+yuzQP^R2^(=b=FQN3-$T#IE% zCIpHmCXfV0f)Enf(b{zvd~(ao9G$&ww=L2HnvB+su;N24Z$6CvIM-t-=g&W@tsnl& zdnLFn*X!peKSl&1Z5(A#1%+D2O!<2G+}gS|WrI>?47YBOUh(K zRLDAf*S=nC8nFEIuU7(I>YE?FW?$N!+wGH&$D!=kSG{}0y1MJjzy09~(l~wi>u*m= zn=Jt{y`cuX_wD6^3P-7yAi76a4s)5t00fEJfDCQVubZ9TKHT?@&t$cs`Y;`RfSZ+W zPAE-aA_Lhgm^QEsMKp)1il8xM2`5FsV6vphUYO<*43P=0=!{B`J_W zPz-}9OeH8JEtTr(%etnlm>ER4SKptO4kDR!FU~qo!(nWA_vWC1yuZHu^vgM7{O)hQ z@qS%iUteCYyMb1Qu^g%c6<&H9jH4rY{rvQF%Rmx9vflmXw|C8Q+h6~5**oD?MMOf# zq#%+;qZ@4BHzm@L6a-2l1^9TF$ExdX&6F8`@A5Pwk5f`|Yt`q8{RWG0mrn%WHf%q2J7S`RQMtM1!5a{p)vw zMX43($@ESU@HL)aFbqCgs)vPT?nAG2oD0Je(bPRCMcZySoeq!h@p8^$#m8C)FJv)+ zgrp8-$zDKr_fj!Ho?b+TnQ7RGg1wlvAw|g3fU$e94~>>;$*nfQlXcwS!$yO4gre2q+xBP(6En1I%Wdf*NSRfmvPSdM-J99kb$eZ2vVc(VXsu;-St-@_B}y558WgL> zT1bF0qzUO_FpU8K5}<}`ZC!eZAk2$XbG7|>NfIf@$J5<-yq|`Nx98*CG}u=4<>Tk4 zt1$lVKi}=I=a<{kL}A5zxH}Ak%V|DqZGD2zm3jI2@t0dqQckz=_~F}olkLmp==}8&*cXY^19tQJZn28k9OaSEx zkRkwry->Z%(vY6kotE{qUC8wT`)6&YVvV{^`^B^&R)~V1E7l zZyy(#*T=v5?%v2e5$Fb3kn+afFR$H4zwZ$diHua_FxhcbCL;|1ENf)j+fv8V<8-;e ztoY!ySP>yX7Lec;qoW?6Dhqc@7+@?3%0ndzff|4%6E)q`Y6QC{S`5)e(u*6QA%v7R zU;Ea!?PcB8wr`n?@KOyP+?VC~y!37qX%3pg(5JU=9;ev0%jM;&nugf@WNl*vIhJ|4 zUN)=6r@>n9UMp#$(kV*;G6<3sy)hl3h_N}sZ?A{k8O@bLcObns!!;ptOq zI(hc~{Pg9z1LIc@v8>tC$~f8e)lbvip*jkDekkkH(gyeGVD0w&(@)Eql*#Hiy?y^U zd0+Z+zOIoSGc)Kpy4^i`s_tm58B* z6zpy}H&(H-A!pEloDmQXQ%NdOkm2_J9j0X>zivO>Z9`54bMZ z=ci_tQto^+v^|K09w?8c7ak#4^igEh(VgKd&y5(5Q zvF`ol*FU{nL!z8znC?$U#@>4Ck)oCP@nIg!;N2SNn7fxO+w$ep^V+h55(Op%K!wqJ zmU%EhX--3ee@9gaW&i-eM22}XjV0ug0J_KothzDKS%X3S*HNjab+_9ARZ%+qXFV`=x%>ki@y*q5}y1`188L_o2Hq0Y)TQN;en9@Wr zEeM83w0>PTB?<^L%jgjR1Xc$uSP$d;c*^$6mwGy#Ml;Vewe!pBW2M>h@udX>qr;|m z@7^4_{pmBOad>cob-e$!{^_+}uYH`omTZ0b(;v=T3IvPUSmyy!A`xEhK79Mt>9DW+ z)|RDb_KdDAvv2#>yMjSW(Ubs^9EP}TT{Mx>Fqi58|1TH2ThdihE{P_X$Sl1!%9sW@ z6W)x@4wEO+JktcvzFoG#b@}|`b0f^_;fL=JWm_!wk=Dnx1KA) zGOEdyeIv(WC4t!E-ZR?m^6|^Lkw8zY#f#^No4>n%b6|UYeSQ(Vs{`X#cgy9nqEv>+ zeMd1r4xanUX((n%0I*il5SD7auiLVRfrMMFu{Tgm(uZ*xbzL66o8Emm-2VCBF2}p$ zXikBt+sD`8bR3BN<=5Mmlt_0!eRw;1o-Z-Z#by?0n#)7HuC`wred@Jpv|s=6*QHA) zR<~Np005TA>D^av?&~d=c5BPldP_BCSYt{`39@cL5|RM=p`fkH)*(t9j$<`~zc(<= z(u}aE0heU7Zc4L+Ga#A>2EDs#NQDU*sRS(d*Ik3(UVi>~rObwRKYVpSztSpVyPWsO z-%NtRvh8l!udkmz-FW=n*C!WCKa4I)RF{BAZtZej02*j!#aX>pF9*UbDfI!n(wNC@ z-=Z8JAM$)-@nJ5b6}l4+)-Vu2CBslGdIg9Q#=synCqOYW(8sN8Lu0jKSzz#FIMK0a zYkl3;+umCSGR)`>R3Q7>t<<4x1?;)4!^GIPr(d3L8Ae5#PwpsY4Ucc;L)ouSpH?P% z06Bd#Zu#Dq^eme zhh5Rv*0*(U%ig<`IdstJgK_9C?fc%#!C03t$+fgTR&P(gep$E9l(+Cwy+A_|?vJOT z?w5~GO}4fm=?}+!O&^a_Th_ksdsLPYEt!5e3}h1U$m|Lcp1rlU^$?UMV{xUI)4Lfd z^J4M*rG9w34)^=>laBK^6zfEHefsrzdUKN6dcEBCNC7|jyw`?_o) z4%0NcgTIF>s~{DSV3{Z!I{)%>9}mdB?|bWv1VG1lx<5?R!12gXrg0hyx9+2E)8XZ% z_px%^pVof4-nyGRiXfE)7$%4mfM7xaLQmeE+yDll{knAcG#5AH z@5w9xjAB9#QZl9`;N3xn!7Qm{kt7wc)0wbD$t2r)j#_fLyncRaEQfD?cQ+#2W<}m2 zy1hMJuc?gWzV9v8e!HpMA4Vsq@$T&iBL&IRKoE&8bEIe1ASEiKMiV?>#Rxe}*7vRQ9?_vESmh<~3JFmpmNB z4I>K2s9`yFLu$KhmvWko{d&1-$n-li^--5EFS+lh0xNS8pyzHTXqnV<{fRCK#&nCszg@K{^h;Ne~Fy_;4JGo94U0J+<$Cde`@D9Y!uZ?XO$M;M8T! zb=fvq8eEViWGM);ARt24DIg_>L?n@b#54g&m|EoBX>u_{qmF3aoZ=T(09+Yj?#ik0x*Qr%B)UY=5+ zWHjxFw(rf~-j9PF%QTMe21W{~X_QCwzVqJ1hDL#D3|9fhEC>S#rPTztHTO7txK}GE zHf2?snJeouu#e5*pryQ9Do{40S+EQ-q)m05WLNuD@Ob2~ePzNPseor@N`R*TZ#IRf`qh8CO{Gb1V91+0002BwEO!?B2yA+CHg}1;>q7L;UdGc z$RLUqZq1NhnhBOBgCUW$#E^n$Q7nR#b@RR~J1W}x^73-&_5SOxMz8_V)X~EH?YpN> z%>yo2N^1SK`PW|^eZXjDHK^JsOTv=g^YY8n;j2^0v@BT)jqc48MU=dg5?mOz-21*C z9v{6AsxD^%vWT#>L=Ca9L){oDIYk@f4v_m8hVa0uX#KXft*_aGyQ!G5V4%4ht<;8% zgTr7%-~kT8c5QTL<}YJftTKxnCF7VnS?tdHapW+BSapix0Sb`t@}w4|ne# z9_uiacKKib&o6xQ&EfVXM&}Tn6hKC9PnS%}5&&Rw(b!cE_p_g-GLDDCDi?;zyj*+k zEaT%8*XQRLPP8qS+xoJLVG;!@1yLzNi3B78NSYC_06;+i01zV80`r>#K{GUUH;|U8 zbSD2!o`4}y5iStcJ*;GqfGIshr%X&z9)zPCgDSEt3B-0@QP8&6<@wV6?%i<=@3fvN zLz+K+xO_3N7)c>|UoWrqyKfGITXzQ>mS{$0$|7_7^kYAM|9-XtIx9?63PyqmMG>gl zJ$&DD-RqmTRn$nrP$3L0$uJ?)7z~)4Lm!~I$I#r76w$E8-rBzGZOQ0Jl%%s-js;e0 zFD$Ae;&#zpDKTagOZHekKSuy%I8|L2I)-OKxTmuF4$zMd08J>7w3WaRbI-Q1{HY0HVMs0-{6$OaiiG@8X+> z0pMU%Dr81;Ai=`lQ)wPBqX@T7ARK84N^XKm_q3!5lnQ__LpTE(0?2Ha*9PqSeqC=n z5BK**w@7o7ksOYXc_Y1qWuixG>&sL5-8Vz6$igtPTh9>EnC|=f&p+|uZ@wE5RYt(2 zlo=2-WcM^!xF@{bbWOYa#z{HN93)&Jq0764!>tPDDBU6R?%R;Na%;No{nDd_I>QYm zt*}}#O>lEC=R{Eg{mUnggD^5kn3zisC2TBJaWhwMTNBD$dO6tB^4OoA_tpWEXwwni zZh}W@U()&l4sPbXk?WRL-+ws2XsZ3CC!pzWRtzw4U2i)I1R)OJ|M0f7m(L%cqSEL0 z-+g@?OKtLuJ@fhxe}5T&`@8w&(=A@MB-B(`~)OU9y zTHzpHwtd+;(Rg_CST48Q&(}Ua+{Nqi^m^?GLW+a{5y=FSOhST8Vkv;93?dQ+Komh` zPn0(grC2Eh6sDeVRT^3Oe~`=)Bt-(0A#+F(f(1!yMTAJfAqz_iWE#CWC3Use%dMw! z-}-IG;rKXsafej`9<=(vJxc8s%3$lu`sHQ%?fY>^NR%*M8&Un>$2>$-U7cY^NMBGcz}1m?P>|#89j)a&1drdhbfn zvSfw3yQ5-|k;McznMw@Z1MT@^LkXAY;S5ItkdZ!!9Rs&~hka>RYsiX-UPVe8}ZP(`| zR3E>2%ora}qa{aSYs-?6vF~+0J`BtG@~3n6@ohPO`gCbYi4h4wgeV9Wpdf${4Ao7P zJOKg_L4g1w!tdT4=2{KeQ<0X;q|YRk9?2ERD#Tur$HsoidA(GD)Jh zK~Mr}QuOP7PGv`L>n#o+-rrfF8Q>`pIULxfni5*D@7wkBgrt;qCb4JfHA-?dWZMoN_CU9;LLd z!o69+@!`YiFb{k0uP>ig&hP*1!=M)Jvfq|yREWdz&Hf*+wf2{n^Iq%0XiVQ;Uv^0# z%k*&k=DYD_yQ<_|hZ6O8H_ivy5ZkTwH8R%~8V@rs%k}xZvz#7Y{^Pl2im6}(1%Lq4 zkd^=?3h-(!3WSI>2mnZq#ru-OoA<-)?md*71-+p}@KugFh)y&FN_8`%cyJFw>%ggfS!|70b z$_iMGv?L*e*q1;3!zbPU@YjdIVL%R0pjTL^Sd$g)@RYI7p?zESczAoaqoKH!932d^ zFqKZKZY3LUYO!B&>#KT($rw&&bz>>4CNDX3I>Xap6sa7jqA=@azkIoLV>TLuG%zU` zr6}n_)@ir}1Ip?P>ifI)<-B|3ZRCw&B~cFVTlBtdStgO!C0@=Moa*c6 zTbG0kNGVW+2oWL#Pykt+E{Y;3KmZIrj`Qf+bT=Qom|N^it`?F7BxH5t-^u`C=meRb z!|o6vrllbu8k1SNydy0|QU;@eaw5_rGh+|6eQTG?Hvi_U!vH`ckfkGis<)fdu@c+bw{72|M?wJutOXu z!6d{qW(@>`9H!Ft06H{;PREp9mL7=f)yK)~?*8lNmniZ?+Isu+%KJxKw|%YOZdZ$P z$cw$ZJD+b|yUjA(x-{8Rbeo8v=}F0r7QM=wc8TK@Duzi9sUFTWeA#H2x_x+mqCr9ptJE1dAailrUi zo$5xIg9Y8JM`!PSZCh_!>#dte(op(fC}m`t!#S9UBG6S*L^w!OR#_NPT8X(2+@5~E z!h)h8(1fH20%3}RUUVvJOQ}zb$1hIgF3*FXu*<92(-~^BL1|#CB=( zx33prI^;UP;rX@%(#D7Vb+0C`@9s-iabt-bq*>L>3$6EldwE%dd#%GfI|V9ic)0uW zYwLaML@pc4V5IVVYe@QGR_k|v{#V2E_Hav}7Cq^Smy zidgnfzuy1maUKa6L9><)21~czuK(*luI0`5fALtyE+K*;nY33J!g_bu%4 zolA+@YBXV8_q{E#wP-*_fomW}x#xi5v|=&`a|B%uCN1k$EQZ|(BnBll?qfq8h4tev zt3zoirpE3ez=Hx4%(SdiX{$O?ayt&^SARHN&aLYfWt4n=^Y--;a27G!%ku?xIAwcn z_ixTi@Hp){otDe3Q(2GG?FAk=oj%+Tt@U0C)C;Q}3WKZz_w4O_eLde$r?C_#jGo8$ zxS(IoH}dGf(%mx2XcE01^TP z$rj~cAVTkcvi;x)M>u3t)Nl14@PjQ|LDD1vf>m{^Zr%QCPLr8?uQfdeQB=~ZWS~cZ z!AwvRt|V!YXTo9L%V0w+5AWtsf<1>0CIv#UNPw9p^Np{U;3`FOz^Uu7!Q2{G4W7X6 zesjF^f+9C9&mR_x-5%$2yV*a?97l#<**qT!*>=y${D|I;V^^!fn_(l|-Fr zZC&H(@yl@rhT>-KS?|8yeRw~oc^RFC%j3M5ln7d&kbV63|I@Drr}4+7mvufravVpJ zeT^kAJ^S+cG++!Sp!g_29ESDX&(A%T00B8Df&xIKi4>Vk07{C4`F6XpK~ovVLRAjc z=;hKv*-mRiBtv9KoF@K90Z1cUCFz!s$tA)<#stk3CRobo-Jx*RU{Z#lDSAQ;ipbtt zKj+=c9VUTd4H-JPvx|XIyA&LQeD&7L}Q1zAI*NdU!7FlFO>%kgeW=XxLtj=jXEl zW~$q}@|X$wNM$>Jn5FS5jx%rboV8lhb`yDAQ#M{rwzfDg)6MJMXsz|=Fl6at9o1zj zng%n>ww#~7JfEAOBn{g?|3yB`fM*3M&JV|Erf!5;HLke%{r~tf2d$4^R*CI_S=zGD&4)H6!)!H z_4Slp>$*foS`X9&rp?6v=qkNHPDxHsrf@+X%+hPf3`wXY1Wyx9NoFgnrFD;zas~k9 zx-Q2DzWC~3HCa1lVF_}wRgk51^<3w-AFsYXZ0bk?D6myPm?9(BPk;Q<_qV_N^`xOw z-sLV0nV<_LX<~}qGqao4z8+g0Z>~2#{Pd6s2y{@UU|(ZVa;PYG-~8fGWT)k{rs#tR zOSpF@EsF@I3rbom7H}lUr13DAYH!`n^ZCp3xGMn#Rz$3)rV@!kK5n+{(k^X1b93mY z9_s$$cVB(@?%f=!74@)TDW2Xh(5B{P-fr4lhgA3NfOc#>r>o6XiSzSwxqfjN zHrZANdxvMYVOXr0o|Yv@rYB)y07Hn%l9U`20i`R! zOcT3h4bV!`G-K&4Sem?<);hG>0`-j`)gY+l^F7~lQ#v5{^vFHnzlX=`s* zHXDm|_h-Mm88VmS=W~SDp}1%yVMuqS*X9VYn?S;X-7R5(G1|Pp+U)K8{P@0aHiHB$ zX#Mi(!=-BFIMwNTyII=$@O;GXP@f;0GW_-Lf2V)=yLXo?72BJ-^n{yFv>s2N&+L=8 zhRx8kT6Rq1;Fr^R9XH$Ssr1iJyt>_vUd5I{D0?Ifp`cY^GnK(=ThcV2R^wXMb>6+) zomY2u>(8LU!MwWIFHh6W%U}KG*LUk5zVuILtwd`2h(vb@5yTjgD zIPh;|Nq8?wLwYAcWXbe2hF}RI+2!Z}v(n&(LK>4GGiauaW-U0!@$qu?&At|c076ir z?Bgs_U}A0O&u{HlSDQMrNJgb`&1mb>PfyXcg!b1jf3>ex$Yu$`!!lWtnF!3e_IYiw z#;QoA>i+KT?&jT_hoqy*2wRrZ8o84D%~h$VCw%*FuCX4U9?!>wjl1o3lo?AmxYFT* z01I@pw1CwvHu2@W z^nmQ;@BhQ${r~w-j|uvI(+bx#+YX8r$79QN^ytH$phH=PvHIz0&dp}C+57Tz8eR;; z=%qnYc=FO)5~6_Ca@uajv68mb9%_jBa-0ueZBDDDD-$VMEG>((cu*bD&*Sys)xY_- zhqr%t$OE62VI0Q3TQh*AIds0X-mGF)8@& zZhL)K`ekl87Au2~H0voz5$Zj9bQRUjVJIaF$p0Ni08j}7vWS_W;2A*)NYFud6EvjL zlMqcT-IH=Qirum^GTVB2JnepYHF&rSY26s+Lw1=EP>=rf)8qZCgV)JqDN^bYb9?&! zX-z>YH`l-Vx^hU9C)f!ov2e}kv9`8Gk5Gyk4rLj}``f$g%MYK!NMm{|m-CwHbvq8@ zFqG4$_U&(mrAF~?}ZFdW})Y~=jQ9%1N+D45_POS z0uhM$cwBIPK5t*$OyhREJbZY&R?g?O^_2jx|K0C@z5bW~`eo_B6m6VlCWn&inysTp zF%5Ncw_-G_W$2glin1BUsZRdXuWEG$V3vr6b3_xE9&FDS9QHS3F|XzUXPcjn$MyQf zxL#mF7}KgrNG=R0(?M@z-G1}0{%!g0pB}QFKUuxs_{c;C70LA&OfZO=CQks~B=pP2 zKYTn%BIMmDPmp4oD2f2ZG)NH2ix<;4)xLIi7B34QoXv z!4QUILJ%X1U}7;1GKvN{OcX)Lt(P?Ps1Qi3p2`sVdU|}kf4#3ITm(gCpbWXDo-yy} z001BWNkl_SQ7uNpak3Y{LA_)pKhDk0`0tf&|0)phXFQ)1S zYerFiC{cB# zp<6~WnY|r99S`3gDx8u*!7ObYdxS}d-qxp&UkyFV_4U{B>8BMiL@ZBBhWmb3sqF6k@xeD<_V;(2 zVaWB^*30uV?p|(3IU&M4VBLgf;cF}zD5F~iJJy~<*=+}VTCVwgeWpEhX_~Gw=cxYi}eGR7syZh~5{dxbt{_f$@3l&`gm6@VIrIotb4PFdzdKvFt^&dXX zU3K%-H(y^b-#tt=cpcZM)!OsZc|jK%Gp*0<{G{RPdYc?K#l2g%)AJ>~vTy5H%IIbR zTh*7|np!gq#j0bt`HO#b`~IK43m!kVVH)=Pg9suKSkaga_gt)pc>C$so}Lc0x+Rl0XWEto7=8H!vuO{|iL`1~-@?JZUau ziF60WfQ5-6W(lLqOB$G@3E>uiXE4!;mdpI%@&5I7amx~E)`=uGIfqBLw(9cm-tKS4 zsnh|?l40Jn<#K%gn5eCGJoUwpH(_WgTV9rlH7V{vQ`=V+}os1wnm zpVq8fH}g_T0mPOqRm!kGaDbO?8GY@!_CBjCJK*kK4zIqtdVGIu)-PhIetXj?LkO8r zHvwUcl8}`Z-lfj@(?7pEDFFbMqzE8Q5G+Lq0Avs)9&Wc~03lLj5DW_tf&fr}3>*(v zhf&?F8~@irkc5R1fLvgDcoqeik;&eHmh^O)4J@@llLQdfo!zBGlt|Bt`06#(Nv9KQfWaF|Ll zSB;_~lX)5yXv$)?KU{sad3tkJV4a`NG1dJ9T81$9<+PUF{Wrha)xogzJRQ;33|DP!BGCK!^Sko$P=>L%6Dg&#uWKAn=f_iK0_Hvp zG9eT}x+G)TezHw!~9?Ip~YsjX?H(EbXU{ zD|K^y*km3*K3u&#{`mOe37$OwLXr))fAK5*^N+a7IijQWH8G77jV4$rG|~r7#VeAE z%lXoD`1W@%%lYB+$4ie(+}vMJT9;hbwfD%LT-KyOPCR!oVI&}!m_F=pZ#Q*zlsU8a zf-G(T zNf2Oy5q-Gd4}}av!qOoz34{rdcNr|qrrq_fIHe^2MueqVx)L%|TBdr}y7tVF2ulx; zR1rld+@_svTy!a27M77r`P$awr{%@%U`C@122Yr6qs=B}TOQt??_Sh06mvv|)~0!F zYg1O}p>hkW(oXhc))AfERggQ=_ zhfCzJzq`Jo*;Mv z3^OEU_3+~A$~&=U>eA+R(V`~EdI(5M(jdqHsQB*oem5kTo~G<3c6K^sV$iU9L}{8& z|Mce6k)Q~Z1B3`K(=<3-B$C$AmNw^jwcW^^As0Y|r6-^uC;;+=Svg!?757FY|8tgv z3$je}u9VJeZ;^mx$s{3Tn86aBaM5A49%`YvptQ^k31Z9n_yxB&+foU^q9ihX*tTO( zQair;IK14U2<%ujNAK!A6;kQz8UU>f)jSj}&1sHE1I4j;5-R&jy3!?!ShIJx;pWBd z%k@tmqA#a7Tpy;=&vBklmo=kI`|JIF+TR_Fw7&M`xLT~Ae)z20Z@-=ZCoBgQFhBhK z)4bUf^UeOcEYA<8X+#*=Eb#EltNGl+1;G=v09g)q*9R9@_UOxeSz4^>MzNeY*zyZFQRiwAFSH*CvtIi2SfEOmP|>^8$R+#aemi|Fg|4Cv#V z59`fueo==h>KLP=MLWLxm!EraZN+M1l#EnXY=i$ zM#+rRu{G48Zhcb*#Wc7r8S9r%504EveEniRcBY9)5|%Xi{^rG%p-0O$ugko&E-GLO zKqh1fh(wYiZS(SeKY=MyDFzaPNg^o*gVKtK<9BZ!5i~E`u@)4;#cEYf5k$Jyys#ln zZeCy@BoY7sW*UM(La-#sdf49<5=!wu0WeD;q;^@3JqjvJf(MzFfQb;{D%}LgWrTUD zb#QZ}F<{-8Da~I#kFU3*xm?IBjS(zce7r2Jt@ClUi4YQoWC8-fWFRb&Su)J0;$7>C z%E$tcLT?~wrZVmh)k+YN(JrUfjN{!``>Q+s;g3gdZ(k~w%e7{^{+KX|>q8v=qy5{jaayKMx)k3_3l}$+F*;cD^+8M6E7Vxljr7 z^bE>bNlvVTArVVo89tV}xJS$@RNdW6nQAx;FqYH9vq#%G@W}*@VECCAi;pK~~YE-LYBB`DZKqfXTJ%K=EZ_AHAo;!UUHk(?B z8fE~7G>D)GWwH~p^f)!5q=7+4Fz9IkDo8;DNU(0N4`b~JBmWwf5Rf_@&r6~jjkE*> zSXwubK$<9$a>>$QMtR-rYzV2&WSXdK^XD&zmm70WpsTmY9yz`0-#<6?-kMbhh%~@b zqzERFy$eZNhL@>Cw1}Z-$pV7n+69A=rSA5V1L(1?=N2mTX@7OQy`6se?sL03Al7wW zS`YK>q0$V~b~|jw;mWBBPIfAy;u#m=X-{dFADD ze){~_Z?1hhZ19Ca6;-knw( zuSQ#*=WZqC!H2UXw!I1Z}G+w{F-j-OZ zBw9}(E$Ar*DL~3VS6rTs7geicnf7BDl<$El4;BmmPH8}AX5+t zP?Si*4*MG)6Sk25kwUc7<8!ycoDQQ1m@J?YOj0le3?PU!rYBt_$hsMeTLi*-x3%@t zQ`?yVAt@LL3cml=zkBQ%i^^!Ei|}rUWEv9I*;~LwiNMAk>x^2|XL%u*NT^tVkaHME z*F3LThUx~mxs6vZuRgzd)3Rqu1hSMO!puwBFzlwojh!El^W`%41jyEH`_+H@XNUFj z?jOHDt_cA$E90wQ4%@41UzRVYww&VC&HUK!USDO@{guAFzkWIN`RVd-nK#$NcshRm z>F0+g5Yv8FOT9X5HbejT<{_-^JYvmIX4o~m4qS^Kug=~cW&lcvJ4*Xp^R+G zNMJ;!HCRY}ZSAQ=DhZLWvYkLnf=pwA>Er(O-GFw9dFhvVT@^_ZOh_szQjC%&rkt!d zcefk2u;?|2E+(LK3xm+X2r05J%elkra5Y{RcgoiJ(k#!BO&|o3odQCUv5F3X2~xCf z2(npv%F-YZ&i(#sYay8UzpZGe$78dhl4i8@G$sg8kU*Iv${-gg0-!1Yz~N)HmeJ4@ z5z*GB!AuZ<1gIc}-Phv}k8AI1siPS{CJc%Mm4uktA$MDvOfESPBsl2*OCC+=em@(OO3DO=+bo({|tf_P3s&zx(?y zOEVFayiQ;JS#7shbvd0cvo7o5ReyLaufIBQJukgqUc5f6ap~u7oBcMPK7V@uM++--Jy>yPixX|;Nq-12e>l@Wx?S}9J8^o|rVOWkZIuasc)ATrq!Yv}<@Rthl*p3e|J*K5r$xi)q{B=AEMXEzq@`d|>98;nmZCvLGLS@)tw#p}nT+h6 zql*xXk|_a}?N^(hKDV~?aYHl81i%mhx&VX8$6*@t<4-Fvj2>$@N30n>kcy;{CZvH*EhUJhz0qMFMra zc~x+8JLdVcBy!fv!|BsA@9uZ?`0(k(&5QfP7}m?Q8 z(=p&)Kr403r>9x98cJnP1>uRb1Xd2yVK zG3-Y-nnc0UDCW3*c|T>&ea-VF=B0-#Wgq}i1R+3#43d|_{oQV`=nM~}1y?TtK*o&H zS4t6Ry>B*G(@>nHGvoOA603Xpa!Gb1Qr?xMN+4w6BnbhOAps;zOeRG@2FRzY!>%?L zg8%)~@sc(S1?Hs8Ld=phrHP^-7!yfQ9$*?_kik+zr&_EX$pj!}Q>jKmCXtYktmBKD z_wVMp)vdW94WO!tX+C)Maop_cxbt%P`@ek*jbrATZnCND&Z?Bw!THji3?HyU}RVw6%3Too1x5h)t-&P_69tR|8rHH22%D z?cu|hR<@PP^U54=U%%RI2W`A6o}a&beEZ`=s^xgt_q9yZv?-VKVsuohOr@+($2BI> ztDCZi1Y0^p(1-o)Xx%ng+xd91$&4-|3Y>x^Y-?b2EXnE(k96ag?P10bMW-QQk2gb=#{-NGAXV$#(!(bXAiOs@|dbCh-9 z^!)TdT3Q~@0U(({F=b*YxNDm)^Umq@?K0K$Zl(ED*ytw=N zb}VTH@a6IIrysum(3GnCLk}Crabi2KsIWRL+*Gcoi#Rl~(7Z=DP3t8bEIv;ALGFc$ z(>hSY0;HL8XRc^ zAWHy5&;a@V{{99YX%K*BLU^|<$dqNOBYG%f_lsR+i4sl6Psh0zMg}fDB(akgWJ^%Y zbg>u!n5ELClpzUU9io`D#4v8Jx8|v+{5E9MSV)6Wj-=!ekTjVfBw!%IBtn#&X`&Hn zfJgL}DMF0w-Npe-6R9+2f>6D?d;0ll*=(IwhG8>pul9%SVLNO%Hb%A0Q2q4b)0g*u z_m5BE)v~9dXM@NAY$?DXdR3Z@8{?(7gv)MUTy2UjA3uJcJ77r?0w7YN0HcFQ000C7 zOpan^f)VmqqbF6Mgy@9rS3RJ|^3x?;OrBFnTrl#Ijeezy?~<`eqkm!JRe!$V}s zH@irT(^R!AOL#>otyEVN9GMO7rht2OolYGF_p$EHP`8YE?b!hkA}YLEt(DQ2^L#w3 zR7Pt{c7;H9la=jul$l(Dr2*k}yuZHjzMiqfWu51hT^2M^W&)#TdWQif0KUDyyYsN1 zbq2hNz4kPKSt3#ykr7yTzdlS!WL};=pPMgc>TOMm$s{aO5glQYiB1%RfdC)?GIdK! zxRO+u>U6l?T3UAJSINy#-3S9*7H|Pf?+9R`dK!0imyvH0zxnX_;rqY+X{k>1@Kj4M ziUX-G04z?*tZpophT$-@{_2~rHp|1O4^OAZMIt~TRT82=!APf+WHJGIH_AymNU9hS z=n17H!D>~eiA2g0b-#t(Y>}&C+FebCdOf&VRLaM!?fLV+{PBIygixx7uo(yR^BgpT z(cB;}BwAF`EU?{ItmorJmc_=wybfd4vU<)-kL*&QnAc&qA1>?hIIm6P=&{bdrFHf+ zD{w2@?EsUfC5;KdINZIsE;09|wYklg2BjE<83TbdF=3{pbw`%X{oT!&5)o0Xt9$oW zJB{6xia^UAXq#W&)iK)o{PeU&q7Br#=18S$Rn1ttGE+i|W(A%G5Mf~^i78VUW#O3w zZF9JuYAS=g_p%$Th;opKlAZ*JFbYf&3|Nw&#Pk%CButicic)koDA{A?W`F}oP@qyE ziNm$teA)eXzkhMHy{Vfr#>R)OT|S@ociZ^%^B?}9|LQkak8d7MKmC7yIGdBvX&D_9 z9SCGH4Nd^YItGby{pa_~htuZO_2Y3?|Mc_RAtNM+6w?v}>F&lBX_5d~&B2yvIOD2$TB+;<|%2-#Q2AaSmkW4WNvfk9UkGucy zKm4NL_3IrneeQL_`*-$Y-`@V$|KlJ1L?;d&&ngo~xQ<9*W*Lt-ZKYv;~1jrHu2s%t4C_*wA3{q!` z8DII0CHO$9Wu|0dNF&_pu$>IX^fVFd1by7! zzZm)@V{YqmUbIR`nqVRY7y+iGD?xB`^WxqesbDavME5k<>T-0ojCJjiIsDnH;qq{7 z3zkr)=xEKSRiW11d+(XSq$iTJ3e^9fAUNyUZcDF&JjS@^Tx&hg+kI{AD%)kpt|Yk1 zP6C8RgrIO1M@oeFiy%1Xlmny$BnXNiN^nAya&^1k``cgpzE88}yzi0gG7})mf?MS6OH+}9;hZpR+bDu2$N&|G!{WrVKl<6>^7-daleg>n&4)RvOp~Ai(QMI|WxIR! z>=i!$%eT9WHX*|VM1e2_qGYpxq@e@=vH%*Ij>{q@T^P_+Sz|o@@addm z)C|y-RRdN?vxreCDqTe$77euH{jHzQBn*Ti44Bw)VHFNWw@|hM|Pb z*)cWuy5=RbE_Xk9JuZ(=-IY70QZ2^RzR#hAG}AV$B|T7H)uJqzG;F#81pqWq1ZW;^ z9@Yf|k^H@FRTEVZO$aE+sv?;nstpl^mNZcWQUE~8v``HuCTf<)$;TcR05ZXH!A6n& z#pw;U9w5}9I)WLEK-vcP+c#3Rd`-L4HpwORHTCzimp+HNXun_unk5R zwB6m>swg38kbrx?dHLc{`z6Od&iS-g8KxuY7PfoZAOt0iXwP0gzlC{SoXV+!;mAqR zm{(u-+D8toFYYq)EX~Y`D4}_rFEfqk#F%1PEv_S-=vzP*0TW2mX33D{0!#t@;c(dK z0q}29O^`ss0n28BG;JUf3DKtAl7vANWzdM~X{lH-Q8|1b*t%?rrio%1lSRj~_~G*M zzx|76-#q)zHvHB9{LkL4Kl>+t@7Z_X%?*F~PyfYRJo`I;_owG~Prktbl>h)B07*na zRHyN~fAhYYGKb_*g{%Tt6a+$qqdz;K_FA@wy!5*T^SYm_R3qm&S8METiFMmz1tmFUSu8;%$n?y8KNmV_ zk;(?iMA=n<5BKj+H-G0J|BWBo-+q1hm;cj0e`f^zxdKGe(|3? z*M~3PtfznXRaWyHFsG}kl7Ik;blBkb;uv`v3eA3}V^>w*dShyT`*v?j@2xF;U2k9B zY&bM^&-1VUpWi&@1Wej;iA4|~n^=U4nZrez5DXgL(~A;28h)A{X( zPnYb0%n9{S&pEGYRg2~wOY1191LM38BG$efx4YoG`RXUz!kifU`O~{^-c`BLb^#>= zCJdlJqByG(6ag5mT$bU!MDcLS3^9d(`OQI~l+U$V-CSYa}X1VEc6 zGFZ#qi&yth!l@zb(%zglQJABy1!z!scz7%G~k9qss{uSI9^Zx;rIPappMU;c0&nJ&NpZ52&IY@iLG=13DD2*?Hqpxr#HecM|1^6b^!!n&S5 zzWaE7+Oq{(rxOwMd7ooh*S57@SE~h3D+$iWPuI3Bhr@PQ>-nkB^U^E}zbq zK~PGliUvwW!vw9O?W}?b8InDVgwZ?u-K!tHxV|~f0>Uue${`+}zr1PJedMLi`*|OB zLM9A_P(+kw&;Skc;l=Y;9vQbShk=1?3JL2$MfSg zJkIaVCm-%#y9l>UiZf@KNPC~38pEw_5S7UGOlCHOFmt% zbDBue7Pdr1saBI z*#dy5KuREt+OOwpnFvC`7J;l@sPe?Db-V8O|LH&aTOB{WdH2u$ufNjI|I0skwtw^W z`@5qazy4v@!wsLl`TF$u@wAuWIVwEMP7`FvGOb|i%L3JT)Uq`|jk^DXKY1?D*yr{7 z@$uUqKB4tzH+L^@UcFcvh2rD;FMjiMsd=qA$H;_>WHD)?C@!Fi3&X3Z>Rs zKm6obySzI+T}we`T5LqQT%d!1?@`@Ub#!xJe33#4!jsjT4 z-j}UQbcTU)FWa))1VF%|fp&Pd^;rq==WLLL00>7hBLNj{5C94?0NNHmZFV^erFXGLw|Lyhu^^fnK-v9b7?{xb9 z+owm({q%Ubj#M@SH3#gONL7d=6%Mu5y2l>at1pX;VAK5{{?RioKxWmOw{O0BJCiPfGnCsKvAIB!zhGqj@aj(427#$ z8C0RF>{(d)VF}Ts4IsLt)~>0C%r)Ph9|M<*+2G5_;UoZ6r)h%#K!7&Q0$^|&n1Whg zgz_8(g6rM0*RP&$+|FP8a7EFy8HPn~4=)~;QD^V_xLnT{puki`g~GIB8bPzr03Tof z=ynlyRCx?(M;RW#o;CC8Ry2~uAV*}XT~xD8OHLHqptbLpOqNXm=xvKtOqEg@Ae?|I zgMg6&OSLY~j>~nJ;NNUgGpQ7djwH(@P)ecbD3pqV4O%Q@kOjMyP%?!`Kp1nfx4yRC zgR#})_wN&hs>({P?aI&o!QVgr@+be*XZ7*dUtMwk?ELj_zq;c3>Eq++T(ewBV_BM* zdA*d{b*3$}Mqp_{la#Nspy*4398$ae>7P7f&}m#BKYslBcOSJKSD&!LSo@$&FQ2IZQ0Ko7_#P zgh}-#wHVf2U%0>jc!tEAyDi`R>Rlxa1PFk%WdMW$z<}ujU^qdwWf5IG!)C4R4gH}au`yBQO*JYDAqOX>(j*2`>`7Up%!u7)fz+c$=VKwKIY;6 zSzn{&7k~N7k7)=XjS^(A0G4II1^~DOfG}d~vY2BU%d*|R`0Vy(iQ|az{g>xNF$IuC zlI`aC%N6_Z6ZZ4@oU>q*fMg58FaZG|i`Mb^^Ve%-Tv0W05Sh9-(+PV_a0*2t%Ct*~ zc`d^%kqun!;(i?i$~2Sx@cg)+0ijeGHdLxe8_F< zw{^V3>@V(mUNE!wCG&h5nRJ5Hyg1kic#fR=H8az$Duhgs5Z4H+9xo}j5HYh55++A^ zK!(8e_V!?XdN*&Muj;HEEU!&5o|0@8${Jp7U%hzsY!k=(r~Um8U;glEmJp&xl>jE0 zG61V!LKK2cA&h~A*;Zb6w8OK9hx^-D)>_y!-v0Ju6_bR+$d+BVFYfPhuW`wF+0Q4~ z1y?y+?|`8a41t!#^?Hpz2IxiB4iOQ(k>usgx1XHIa^{@v5EtM5_#R`w?78o|YuahkDAIsIwL_FJ z)1wL|fHGQ?D1&9F=z`*A!zeE;n%cwf{q^TUT?ghq$Mx-RzRlxZuhgb4fpWcq+PhKpm zFbX+M=@_M(-`o4{NcL~$=KFSm-ip5%mP#uML=v)^uDdVbp#$C zpLRO7`?@k-J%6!nO$tP{R{8dKA3RaA01GB#d-41xtIi{@bDYmP?NKw@dZSS;Xf~4s zWCGi>*AK@SS+(Oj(_@%>A_FS!H4-Ig21OflmTSVrDn@xCj`qK>)B^1l$hCW6ZN){(=p3vqhvp0|sGKlq_Qk1_9Y1l%R?Q+LToU5U>N1 z1AW`HhE@o;e)#4GBg@k{GSYziwe?J5HX3SJLw+ zi_v0PgWGo4x+JVk81KLS>T$W<`my)=@cCEgLa{_h5sQ{(>+80)WofD0pT2v0PRHHx z;96e2ygjy7y^OGd>*Loyj3Jmr2{C)c`r_HW#_mh**XyUT&l(Yj+xy%8`0hM8RT4X*FjS|D0?|60mI2B#<&77XaE8Xjmwc1xEoYRu}+40v3rV zv9IgWw%$Un`TqOwCg_>hJ*{oKTwMSGn}$IECJh3VUBxCk0TV?V76c|lt7HfTnp7Mg zwwm4(vmfq$`qNjwzJ2@nlow>?^)zl@ysGz?z6xb%(n=du4S;1E7F@DKMT^#{%uH!n z8A_Jx$WV1rdVCACN8JXzdr4VoBLzM z_VVGyx&fnAEm<{qJ$?J`X;v|aP=@GO9-h4_U-x}n_Q%sD)o<_bwwpD^_h0W9pSQC&7G(b)*($XWk z6l^HQr2XdcYMA!v?PJCL{bAGN50|vHwxyydsXsfe;>fDNPzJ*A*u@;#?~ljjaBPQV zje~m-pT7UqxB2Y#^VTx2*VEI-51(f1{cz~p%9~(YNJWL5H7no#=BrOtdUkW5j;~+c zEb4~sC{|WoFOP2?&--MNl!9np9$r7^yzHmbd7rhcw-1MHWt8?0pMTnmuqh%85NtOu zU*9a(F>0Lalr@}kRo0vX5lg1ntH`-0xfdFFoiw4)VyppXUd|WA;rWZ3yVk?`@vA4p zno%ZGP*n;lWY9%}rnj3T>zXP48Q3xei%iNY768Kp41fkI3;;IW43joN7!V*^6($`N zupX9Jjtg7n1mb-&?IfyfWQLTq?!ebut^BWaEU$OnQ4F}MMRI* zv_xM)a=J3BKfGA4e$`PqPURgM7sc8edyiI)Zbitftm5^t(`O*y~zJZOiSP7ZqsN*k|@Or*)&u|!2|#o1Q-qgFhRhrOhYNC3`GSi9825U zaaAbl>D@QER-WD^FV$|2pB|@W0w5^bHb}sPD3@U&OtMSBoQSot>@lVbj($8WUfmc9 zqM|A@t9V#eSE17LntK|3T{=yO8Uyq~m7GBvmNJY2Q6P*2r2@=A=XzMT*4YB-I*<1! zaGdsKTMNW~;r6f{dM88>T-G%fpFP$EuE)3MtlG!eGl$p1A#VGj_jO$sZRhX4_%eR< z`naj4qrz(%=^)*r)GAP<#xf!&s-C|3H=kc>eR#gHKY#Y@unNsP!nJ@Vc-8s3=ghrM zGtaqK7Te+e=4NJF4>yOcz#%5U8a{pT^&_%bKnt=|VBEZXeb-KT$^CL3W8&g{f_=HU zKi;%Y-;A9Z*t1R(v0|V%PBY*LDqCY=-v{FOaDQA52!`k7iyurQ1tw7h=!BKcfKfs+ zmhHfOuQ~bWVm3s90)RryK*c0Y8??-#LC^)kU=g4NlSSD=i4M?c8R+ZQ*53Pqrw`vh zHq@v0tSjT@?#(GnpaFsrC|8lTOwcTX(OY+VTd%cS zilwOnmAK5^1ez#CS9u>LE?vP=3M^HC34-R3p(12kOu#`Rj&t5+q8@U3bm)UpI zcFhQiF1NSqCaEOu`9CtSl3v~MQ{U>q%aK@Zi z9@aOfN*S~TsfXsCpb=&f0H}7eGUi^9vn}fqbm=Y%W1`K`wwaz?7yys*Eycb5Q{`t=n-qidfxv$w92KAV;~AYa?O0 z&rnD)ow0S}GAqG04CEN!e)|DMDiAGlzWaE1`QwM1Lp!>OfF+E$*4m~_2yR)yY}YEJ zbN~3u-+k2GaXs8Vd)^nS7g}+K(2&cud(_;M#$;6%Oy;s}4{Mh|R_%@qkKNbvd0Z}! zpU#)P%4F*it?0OWczz$(({=xJemteL9&YY#<9OTKI{n@6K8+feybb{EaOfj~ZKsOR zaZi<84z4#h{m=m^HKFR<(_C-6$~9`wM3#vNEnP|4X3(^%7g>R&x(rlaE@hL3$p93L-da$qKvHH! zu^^KfAXv~0!?0|yEXW2Hw7xCtx-2WB$d#aDuVQquwGlLlsFrA%isf`sn z{C=1O1bF{2mcwyZHHdx7FIQQ$DZ$Iu>j(7KWhgdoq9YmTS z2vpQ%OqD88LbyeX)^3(V7DmNP*Rj<+Kju?%?0K5|K2LSc1Y5FX9iBhG!|D9=bh+$( zIou!D+qP~!uuS63mp`1xxbD{}a@%ffPiU&1;9cv!?2@RvS3x%0HQzwHUW^VuqB%_ z^V;ryd~B^wZ$Dn+a=I=xb9=RaeJv9N3lJ4V6$*eUlZwznWKNJ?yRx!eBuv8%V#owZ z3#@JuVIwL;HZ#YV(+Pt>qtv8mY}Ej(Xgb9ttqPH9GfGLKSVF2;P(e^Oo4u*Wy0vZT zeQm9yav-=_g2Y{dilAw*k!`khc1+Yha{unPUyf-x43EcsU1E*3qc^qIH{H9h!6l%e z(2`ba*@?OmiLsB5A3vPDJv=`gT3E;}HKZDGt+OOMLtbY^8M3gntw);BxuSgAtA6- zMA0;vDj5U}Mv^5HG~qT&O3|9Fh;RvD<(__v+JEYl4I z1AxOJA!sG-A_)gf0}QZaP$WSR1Ste6K+pih5CpfX+SBQz!C*vlBOsV&fu$tqn2zc# zR!275lxMIEm1!560lH@pVz8<8_MwZ_&C+w|Ue)a8#z3&vEV7yr4x414n%Z$&Z~C^h9?`)dYAF*j zx;tAZFO%1rMygGXeD~Ymekg#wJU_PE#qK5;MV1IL5D{(Tw)Vb8ZxOxZXr7)E6IaY0 z8=BPBO;<2gl*LrFSvxB`5h_NcT>)$%d1ls_KF?k{HbH^dT6kjk>@hE=Q~Lb=TG)>F z{eYY5;Skg&4eqU9%I9By`R)pxPqT;B*MQZ%ng3{sOc;gaE^Kroq2B@-YEQF0*ImnDwv z<;~4;0o*2~TFjnyJn1?$!MRL|yW)qx`gd>bh=881+ijH9aw)M)8iEZ46phhiU6$UL z-X$%wr}}inFxL@SL#8EWi50mNl|sh3CE54jY)%-(h{c%Pkyj6onbZtV^q04#4+aqQ~5xt^7 zB4q>%49rjjL#;<|>)K*f+iC%`30@RxBleC3c8o?kiWaIcWV49KeZ5Ap`r;v-r*p^5 zy()SBc&hEii{Jl)7l-ZpFZSgo-+#ZiQ54tZ*be2Ens8OGG>n`YGf!WA@%EB6M|p89 zv19|WVXFkAS#d+q!=L-3@C*tl%(SH99cU>l!<+m0T3XdLcu1JhDn(O zgOCFb5Ji$;g$Qskr(0XL z7U7_YG6>KpSutS9%Ekh%NdQF>wY1~5-Mx5y*fx;=fgpI@wr;zs^ZeG@`WSN^_LcyN zvw{tgp~R+?-2XM?SBirJ!N}%}g`_R*?^}mCM^X*9PXGWQ07*naRBx^3d5%5)+L^~x z%a3zv#=sm?yZ!dF|LgDHBCjIn^SLN%)vO|DQX&e{SaFwQaZoyS>O7wOba>t3^U$rA zs-K5hK>DdtpD&MN?~dB{+Xc~NBI+1rdNY^iok!lkeE0sz@%qh=e)1i&q4jN12&00L}53P0Nk>h$?ou2GUCK{3e2qRetWF&(mgaok&mfC6UnVBe6mWows zF*DsW(s6S+n(J}Dc0vlvNMePGl#0ic?3pd}T(4D<$&ff8sWJQPiWof5##;tIuWxU! zKX`h1e!e}=5gz@?p1ZrM#d_cpg!6&ty5{@8{;PlaNU)KPCs$Ri!*z@uIJP{3)Od`? z$m1+1Su3|XpM0*1ml1B05bRFF@O<$SE{mb{?f9Yt> zdj6e1`kmYNU+U%67l{u}o>)4!G@j{+ovv~B-q+*v`|rM=dnK#SRqI^QXeFri^7`~m zFJr+7Uw2)12z&LOnnWWWsFFZ$A@89!(6eT(TPpLZzi@#?wLze06oH^IEfWcdHNdP9 zL4X2L1jqvwn2M9(czQ~s#kw9Jk85yuLqqS(Fv0Q2iq@!tkjFTVj6r}Rbs#(YYN&No z8PIs>u<5L2)$*!V`nvZ_OKvwL2x2*okXS4#B6S?~WO z+t;U;l{${Kj&*yg8^?>6m&+w?&9U7R&D*bjne#XgvnZ`nRW4~Z_kREW z%l%{GMfvvf?GK-%qvfdDlsW22T0^@psZCwx5zXVi_s71^j`dh}<%XtI$%yrI9JjBx z{d`}&9_sORcP@eN-by1zOys@pW-ES#Ol>o>uH!sTv-^wwqQDl!sW_TS2CM;j0#QI9 z;&{L_md8QyKm--0f{5gaa=!XdZHDWo=}VC3;Oi3R~ol`qquk9D|=sk!9J#seK?dP^}a2#ZaJ<* z)^Nr5%r#nE)%(0Mu-(;iCmGtK%Y!dc9rrq(sWtid2mOW7xrGIlC`Jv6)JQ=PV2XeM z#}n|>)HWXQpje99aj1#&dXv1@%-+}4SGv;)2@%m)Adkn$ksuD98lfOZnu;4jdha|4 zx;^E=WQ5mwleTrZWk(paJHQYR%!EvW6BLa_j+`#9!j{KMTw78`YH0`wZr6Fd+@8Mq z_UY-o9fvgP$Mg)82JIDw)zYjkPv-ObeEZGUx9{J+T%Y^8x?6(~!Ip?%B-yT2tB$2} znW`x&DHs!|!BI_)TEoag)jr}nbb6h&Znt&g_4&s?Jiq?)fBWaJD#}zJ%g1p%9X?#A zKGnk^RpmD2x~=Eq^LnJNB>`b9p3~GN)!fdCcQtj56en~~!sj`B@K7{e5b2f-B{80)+NnLtaSs}^QfHMP{N{h^&sRmEG!jUV3S@**({`pN zt36nQ2PvqJ400PF!V?Lpt-*27xAJuFzGga;NfI$}@W_nhI7*Eb1t(TlB8;XOQ&uOv zAD&nlA1C|TAB1(hzLbTlcOxN0+^}mTGyrJ`v>*o71Zg>>{R9aB|Ds?lQ&?x`(*0=;^#&!P9q_H`nyu5pdqg50in5?vj& z8tU%SC2D8V?G{msnXAxfYldM~p0fIeLjU}~>(4DykpU`boKz!*XmzLo9EAvK)G`Dm zCZ-o7w?Od}iPY9;DKnERlY}9jh#({YCp8wDYDxnLX{5oBHIiJVxvoTIEhLnE|MGrC ze);Y5rl}H-hXqtJ_QXSt01aXWDQ%?)R5)WrS#8WH4kIOw2Gwz#uP;Bi-EObPlgfL~ zW4kZ93T@fj(J^cA^s(>yydGb^fB*jN%j4s*_m!CthC~np(Q-s9!ysW|iCRahs*bAD z@)^lewG`o5%a;27c^HzKzb#zbfUBuG~9#0iQc4Hb=~rHO=_O7N}>k;19te0qI-eZAeDmulrcb5AC^j-XyG zM;1<3P3Zf4e0h6&`||$&w(r;O^e}1xj0wbXqJ<$7+5^Y|iDg^ZE_)q5Dn~6*f(ly8 zS&w}+S$ZCQ6ETnPe(`zL)9?KFkN@(gKUn|#w}1PUOs}np1{F0@3XP(5L}lHUeV*%i zR4*KGLd~tK>Rio{B&&JIwIA?nCAKG3RB7Ej^%l9No$&cx&*eePx{P zqJ1&7y%#fUE7qWOrE(qRb$$HJKj_bGc}fBkGeT_vgOga%)P%?ZQ4wB5`sl+;iGQH;Zn)+ zVkwJjx^12KT+-Zs{l9+m$nAIk_>caNzx?+37ys}7^L4aM)WOj>cw`T9FZT*-FQoH& zk##Grr-QoXc2cKZUNj?Ci)ic+CH*L9CwP)|-5g!V^qTU1+TH2#)K!ysQh^;sd)$+e zMh&BpuVd23W34;Zkg4GP%fI412yuG|W-_9k8y&pW{Yw~o(8i`U& zJUZLmdw)E>etrA${&s)6&z_!65|N+)L_iSWV2q-WG(?It0^61X)<9#gT8CwytX0LW zB%D#T*3wC8`j`Lq>w9iL`pNJApa11|)-QhcUw=Ipb7QXtOtP-RKx$%9Eoqd(;A8vf zTghi>J^S==N-z5uTb4$+lwPeXMD8v3E!S~u$Jdc-UALYZKScWYoHJ1~m2vv$Yi2#h zWK`Lzai=ySDO%iXi#Wgd+kcaOdZuF5NQ&Y}8LLGUA`p>?H8H(LvqpnQXmErOZ9oBp z2LVhwohBp##|bzfMG8p4bSa{mJwxM#U)EX2YrnUyn3_?9nlPdF=e3)p)*;ue++!(@ zl(sx(Vnvcik(3%dl@V=e#CTA&IG%3L-#))SKiv+yF6Of@9yPt92IowdQ-i(F-Spc$ zzP|nD?aRmK<9^NV%tRqU1OPl5AtG=rgyIYt1|s2!M6Fl^p~<%FvcVS8S*zBnvPza~ zSqpJ}|1aNtWc~Jk_{m@V+Z;z3p$#p9q(QKRL-o^#ge zmE(!iOUF~Z&Z*VMX!Uu8i9GVwN;$*2EYg)VGt;+8nrcr+(oCr*vtw_eiF(n;RsLK< zZD}W0+|u>$fA>%KSN(K}GDKP}IkS3d6fqKks76Y>k5&}yQ9$Dem>S`UB$PO4G!|(^ zGt(lp5pG8yD-j-{v4|N%6YgDxJ%2cTpr1C?N0%Em}jb>l_{d)WQ_WAzy z@xHI!nT#etG?E4}B8mWv3{gb{LXr$2mRlND2}eTA1lvdgh%BjCRi|A#t5$hcoVTBU z_tE~X|LqU|(_j9{`SG`Z_0OMQKi9L4cuGe{^32&FkAzwtX*|V5B85k2obigRB{(GA zShw;`hpeY?FfPH$95X(e@~Ex*I!0HLsmXCaCOP73M8RZ=BWXs~C?MjIJR}vwAj1e}+_vKD$N;q+#iI%8)?|bd zdz6ZZK&kNuZVxWRFV1r^% zAD1G+M3}Q~>-p)M=a=(k9V^;<-?g>n9(mAlFA9wXMUOA~^ZoPv^Zn!V^Lp&h`#nv< z1O-3@06>Fd2^lew6cHs!CIm16;Z;HqoB=8(A^>7+FtVzAtaa2{ht9{Bw@3FkzxTWU z@jw6M`T8IK^?&~I^Wx=P!CLmpSfa+sYE~mMeJl1HAIX;&&&>9DK)?>|?N40lC$n<&~YxhyJ9%k22o2}XYbnkrmtAC2_ zzo(y;N^<0>sS#*I6pxhPxTTR4jS~s&3MMA0uC9qaC}@fhpcuu|B3sSi#G{xgMUEsw zkiZe?$0V5y+sArYOa0I_i6$#XhUF%*#Iox1{i7vL4r}$Y(UN~Bfi z`SSGb?fLwsR*g1~xwsOUD0VwQnK7wd^mKoHe7?PZe0+Y+y=P|j{mKLg00000V-RJ> zfM=!vl#nEukwD4_X<3pZq7eebBmf`*K|xqrRX$JOfEMSsKmNV{@(0hq`q|(8-LD?) z+Yxv9@Txi~VXC$8xLM;d#F2JILCzp|Ig^p2h}up}BqN=p(kkg#X8AVjc}fefRgXjB zcyzgIreGAuVy>u1zOU7@re<}!hU(GwbnANfs_*{hAN}KBy#JnlS}ZxLV0sNgA#oJI z3^*i395krfL1;wAilfAF%cIl?Ad-?9n1KL_MmR`BHKe8(DW1w`Z;it4Ubj_NX76i= z@R*X~QBB2&Lx=nGb0|x$SS3lcg<{4LaY(fZ8i`qcdV0FOesjLwo@Xh1_MJy%b6h#5 z&ry-7qU{>k$L>eJ-QVBtZy%4(bYDp_qkim70Ei+8A_5RD38XlXAu!`I@DkE!B zOu5giw#3K^3er$akk!?rwda^tBxOxCjobB1I^mh$tF~ND%SS1sSay&zC7KYfAGzDd)amCzUGsCt(~r{Ca%S`oRwgy%_B3PpYM;i&$s*I z@wq2z2pJ`FT@3-EAOHYEVHpLDBQhiwj2KNw6NphDt5z`v6cSKGlt};yCk$c8Xk5s` z>pbdMw{L%Z`}p$x`~5!nsF{7uB(6hit#urAtkRL2u~IYg_{lagT`orDk5zh03r`a#EE_Y$-T3CD7$PkH`Ib`~0|nUXOk4^uA`HD4>SEc0&LF031scnnHq6 z1WDRR(P&7*i~=}#&{3s~MU=$BF(H!y84@yNNKhn@q}y3#olow5Ts=`0qz2+73Y8%BB94STHOIJXjaSMkFE6P) zOQO~>=ad%Kh`KLrF|!|^pa1l?Uw-%p>)-xu|B?Po2X>CGwj=3UV*)}W(FSQ!0U{hU zLI4^N(l`pKKtv4VBu1IEneljpAQ~x>0GuF2lcOP_$<_`8)9l_MGP00F5K%?)S_hMo zu>nb`1Rzvr@AR^tYkj+(U!R{|b(FjwyYrc;-4`g1-%Ki{s8>v3JZ zyVaA-ojL|I4$NG82M7W%Miv!`aS#vyGC>}VMj@FbsSzTewJc!3W5z;3nhc4|Ffbv* zAYvrq<5*5i~`EP&si~8{&o&Vpz&QJ8G zWfhW)Vb(|iClE6tHEK&^T1bi@Na{Ee!~r~r01(Xzqsl}bq6{8ILSs-oR1zzrSl%d| zbY?On8k7k;!9tNi@hC=Sm1>R4Dv*SCDWnoE_6&YHUY@>refs9*s5;t2 ztXe8B%7v*e;lPZogb@yL+=|8HXsCur+LVTx!81}kP?v~CN=k_p=1h)Rwo+bmi!HO1 z)%W{ve*UkIAN;}ZUqAa7{!o8vc_FM|$%#bJpz+jD6GvhM)zwIer16M2a^s{j(r6qI z3K9ghu4;`3q>)CLsflZdM2G~LuvOkTledV;&F^n|V zb!9ABvcO0Yj0j^fiUQJRVnG-U36l^BMFAf&7*K=~ffy4eFdDUzFeHEg@LH$Q2njXg zsC zA`T^_ky@2H*3Zy^QfpXy|m5Ri#a;y)MWL*?tQ<%Jw86)Kd#5#lbw;z zn*wMA^xQdRJi$?GKYC{pahqg*_+*s8i8#^Ko|#WLRq$rmbe|ZPL4xI9USo-m^FB$sSu6S zjv^EhwGe7ajB{BtmC4Zq>uw3@TA3oK1~X0xBADsTV}E}4@cGT{c>L=7{I33k4og;S zrHF?(-Ze&%(2V1e2p*KIRw!B##;quB5jlz&Aqa?6a)~5{L7vz|AdM3-auAbf$oAky zpePy`B~Fyc12WO5T(Yye0x1oLk9FRjj@NU&vTD?NYwa}Ek`m9NS7NaeIk9KgJCAw0 zK0ZGm*W>!U_LWIUz~^mw6uDv4*Bpg4fWmZ-oEp`{BaIfq3tfJad@#0nwOC{7|2OiXEwI#e%?W7W&k`SMNOR#vKedb7!{gj5hy zs!Y}>CEPVL;d;M5K0ojG$9>Oqdfz7_q7l@3TE>YqguZLpFh&WPAq5d5MVg=yph=t% z1VA!l2^dWh(lCmpM!1$L5h5rEqDX@V0uz{oFrz3EfX`)92}lZ=RhnGW)7_vpvV?(n zAR-Bp#YDxz6%ds&6jh~DwWQlJYYvOpLLL@Gy>=xsYPUve6D3Noc6`}E1lo2VjFm9e zklmBay#4xdzMSLXk#7QM^^@)eulxQvpFpYRpI>2%;K=npBUX9-JU)I4s zH~ZRLzSabskgAMiXO1Svb;eii?(2O%_Q(6<SCy|hntiskrQE5h4NL9pCtyT7N$v&J~2-$)fq}0r@V`o=J zG3`;}D56h>cveIJjT&usl5qXzi(YSstG@r$+x$>}T+3Q1lrjoAHX_nsAPN<=#9*ja zOoT^4iEt}&V&ZM6T?~MT0d^uyxnq98^QNe!a}L;#}9c16KWCZsb86oGghNH|l2A|M_V6f$Vk&Lop) zkRnF-T&^*WS|R8x_0E-Kj9Vy!g!UN6%xZ~5n5&No@{s7#Rgll`zCOR+%F5@@-}evnho`-$RE%RBsfz&= z3o>#d#c>j6uv8Z=TA0hoy-B1V%$G0ECglt_adQfFxHLVFh-)OoFe)Nxps04utd=^x`?@~%=lkRSF;}OjXA~48sV3JB z5k-RP<#dfi#?ju_Sr~!{K(FmKiY|j%8c}IY;z1e(H1udCAci!Vfy5#a91BS(Fa>I% zk$_Q~Nq`xWgdhPi>nI@sm;ug}-94GUXcQ5lGLF#Dkw>G5$`BS{;n5IFj3U!LR$-ym z(y>%qDSK%hK4Pm5b0`ZyKvXS{5)WE~H1YcU{MdbnqfTeCb=1yxzs}3IhgA9ctMBJY ze{d{Ya-3ySA{qfhfx`b^5FF@AdYso4-TSNRo*9y&499`t_kXtphNCE!4RUV(Rp(?a zTd~s)wa7}e6(J09+D@lglj8KQ3|Tc!=_B>7szj!>;JQjD8m3TMhL~EFGIj1M(sA}T z-{0Qv@89=p7w6{b$CzlQE4BG{?9zzMHgoaI+dt0p$MbwW_v`t3a-lS(qOcf!`jigX z>Uw>9Z8O2xd+vHkp-#CDZ6z3kv5Z-vHLXU|DQP;4p{8wnDIEY{?Im^s7PBg~5+*I7 zg|?w)EEHX@HB7XMab9q8+ta5P+o8IgfKX~HP_Z;iSR-@FD2a9Ep4Jg5S^G7^`BK-+ za+ZsiYi94!S)J9Fbg?`esvECi_v`=s?W;KJw|{z%ce&`FfB(m~-@m=5D?k7JANS?I zzVUbWU0=(_8&ZconV^Y#2Z&*z*z z$vK}(Yp0FWK^8O3b6e>Eb*py76D3Di5e z2_R7GdJ6-c0-f5z7$`I~5Zbn_0~K6rjZ+u|`rheHILWzlEr>W&Ee)M&YoWHQGJ7>4_S=tly883~{O8Z@zwUR}?s7$yp)g&y%V!hI0JMXxMyhwFOaa5x)(K$k+|fIP zDO&9HyvSsFXWc0l&|>t4byx2oAX7IWh2j7J5vfT;K~(Bp%HX#>@Ar3qdA&E=_V(~h zZdUH*>`pPSvod4)zS(UTX>*=)pZmw>JkLF+_mF*(=iVc-NC3srfu2)p{-D-S!)g4GHqIM`Gg7-oY%|Y>vN~$EKn%U9U027 zUT#*0*UXw%){EIX_WbX^|Mye$mw)@`_sdsy?%)4@fBXH%KJ5Ga?LQv;mv6IYca~js zD&VvlZPiu^l8#uTSrKgawaVRG*BQ7w0&~|zhP%R$Td_h`9NH-r8Ya>uP^i#Fmadnt z_qYAy_1^F4diszY%dOPA&t5%CExwLUq&vNn9M9)@zRvSA=Q&MoDVIWfKe;fhaoN24RzkOG1%G~?R+XCxaFhVT#uGU(Z8cI{7luqPc7DlMY_9g{|g1(oCoht^c zW-K&ub{n8Mmpj$;zQWlw1Z;PqMy zKA+oBN1+xdGG3Wk!z8$7vG494XOFA*zyI^kQ{cz{{OgY&-81vY|NDo(|Ndic+0Xy| zk3aBVUVCr59OpVK19Sm}4zWs{XcPdb+FAs#wv#C$fgvlvv>k+=TBl}RJ4`G#wsv1e z3em~n>%Ct4{cZg+-(9_P=_g!zch733>}a+7zEAg^X;$WW_2IdB^7;97Kl$`Ylg36` zgKhab0Y^iqBkD+=WV)E;4YGgvwmQU2(vNFlC#J(?pj|Dk6*6&5+mr%Oshrz2M!2@u zo76H;E3Z9vYB3gzfKc0(HkC_50g=dhtu;m3)s}BVuiV@|hXSllFWOpQ0bwGs)-LXh z6HRBDM0Mp*DAw7prF@;X;+mFdI&n!sW=VR(Yh*GRSzVdo`TDw6_HVy^-!HfG{Ns<; z@4xQdt~r1H`##=H8p1`8uEHe)8mU z?_68xlxRV&_c;wZaJ!%wbYPvvtQL_T5VR4PD?4+FhB{pN`YF# z!%_+@2|bM!fiim!N->6^uv%?1gSVYF-pNu?YOSY-2yp>*GR4ColLR0finC3n&oKo^!5GQ z?5^qakN>*;kIwIU_LIZtYr_5yWu80O|shuL!v==a}dfH}o;&TC*+V-^s$jQA=p$2})B zbN%OeZ`Wo-M@A{NQ^b6Kh#4wirv1%vVvh+F-~!u*M5E5 zKi0Qlo2+|}eF#)4W$I+Loi5r}>h=sPm-ZxIpM2#xPrrK7gjizfkQK3#v7IuPJkKpS zdSi9aVdObfU9<93=Kb5tnG1J)c(UEqYHO{Xv~#VonP#M zytY%OblPcA0LWY@fz$?a?O@B)*K4Lz%!GEmRMOLPpMcEJq|+jh)v=|ZCFmeJO{|b0 zYC@-g?|C`q^ISTv>IIm{1}TftoSbya80nNiofN5Qb=m=6X;4>VTl{^$){8x|?{?He zXRd%`)k?b~wT8jyTtN(!$z*D=9lT;zluRcBnIx+W8H%jowf1_g*L#1Tbs=Yx(>^*M{(`JCs;SDvQrsT3kWnZlTvp9KW@?yrN-a_?*xjQRvF28%viE+?>)ZOaW-XGod8UW6?o`A`Ux@?EzL|A) z81G6Ym*hO>`JCtU$?5II5Nb8k4lqTs+D^qfB~0M+oB&IEK<%!J-E*4GxL58K_b=Z! z3#gaR>9>V8NF^QQ*n%ZcnU;_nnc6V7$wFxj^pad!K}uck#Z*cKW2Z$rF(y#j(zMXA z($0D>1_RWa_uDBX=bn40qvjSXtrZ3+K&8m2D7P34L2ywkowXJt{drObL_0*CJ%`o3 z&Tx7Yq88HuEu~m%jfFzTA+!@H(}};YJ+E0aGw4F=v_MBn9q;M{%uMWEaiw#o3Z1JU z(M;(Orp}#6C(An-$o2K(`&!@ET6;+Qtee|rVok=m+TGIWbZ1{&v0~9PA2|1U@;vwR z+?-3VDX~@nz)q=CK(Sb{12pSSpXbxaj7h7iu9@Da!Es(Z0p34;*-N}Dz4@8fTTBK+ znSz(t34vB8lZ3P#Xux}Mmr5+R4W}(i6nt&5QZOqFk#-8fUTJCD20$UhdaWfHVp8_E z>BQvRdv0;i+emHgib7WkNP$^z@4==;Nr^JuM8q}DNd7$SQ~}4LtU;W2Zr6EepkS~f zmdq8v0+m)flbX;ei1?S;>$SGb;H&|rU2)o}5x}`~H;qne#cRi6rdTsT9j=n0c47rU zgiwF~=O4>vzD&~?+m69G`?z3{9WtV22fDJ4oaCN+pYu6S&*>9$JfbZKcBm4O$;9E# zkZDCGLjdXdd>Uj{Z-7z9S?2SA<7{ps>(}4jBidZ@^~^V=W`$blG}BT_ZS5j0H|bDq z%hNUMk|C7Pp4$jgF!OpbMk)Xx=@jrnA%?UBiq)E#_fiBC!t1S_-rSpW&r&ccNG*ue z0$9-fT6^+&+Jr7lOBuQ>!0cHJ^L(A4Vx3wX(Ba)$qZ>) z5v%y?daeDEExMNlRs}$^;!vtt=}2HTNGDn%DlH7{wCgIhA{ijSfSK>V{V<`~x7fK# z2l@(=xeLkc6xGmux!&HVpXBR&^*)yyE_dAmMF9kmX;+zASQ$zeSWqbx+xGK$s$;zc zbTKZ?IahV|oCN2WzkG9Hs5j5g*)IX^YS6$mgb9?3V+q%LqaA8^?%DvQwwE@irD(y< z-V3BqrVuB#f}vhoXbCAW6wvo_CX(q5@AnAd+}nFnSBBeStQ3_3)au$V_HuK6ev%Xy zx2*z*)b(OH_I#cq(?I}G*uyxs-g~JwR)Nw|D4?}a1nJs6w|7OHO8wQ>-g`DP$mmQN z=v0HCnP!J7-hr%SQLUU!u}Cdd&kWhOGG0wr5ty>x-2?-wSYp}n_iUy_jEgff}sgFw@|6j*>6Qdp@Xf|VhgdQr6ja~ zROouWu~uv!_jjk3a}MW3r!-IipeR5Q_IhnMYmI*W{PQNbxWqE03^03UT+QdxBI^El?_216H(MJ5SmuP6qtVwf1W-8J#Yt19VkQN3K?B*IlheOQ#*~ zNTyU^9XrzOn}yXO|9p)8dq@?9V=T?#Xk{J?Gw@+?$eE;-nZY z6;z=^MOaa=*20uwG7MCvjkWiFK4~3>CaAicwfcElnAt1$zZJ+bW`$Z>R z4I(ksi?p;|Oj6R*6^rHGuEE$&AZ>cmfB@ir4PvnX#a2tDEtCt;Yg51XL{Xi3v^DRiaS*?Y;vSO9Bl6liN{OG8U)$J&hhJzCopPS^JxYfsO;#}V30 zff8cHihJ!nv)1aYVVL{;_s=ggNefEt6!spR_BkO6gHt1hQOa7jE!KjQo$TR5BaY%=Eo#u#?sO=fQajbcq}IFd(C&0PHQx8BqG3#i3U<7U z&ilJgvJ2x~UYXvJo16RGhY#oUmP-;S0TV063V=|=GE8MM(y4YJD^o_QW*8`A47T^r zCn=R-Jf-8XXV!Rr9_q{*@9E_Im%qF=L;|PJ&)F|&CG3kiZLQAqT82u~KDBgc?p-6$ zT1rTBPbot?g}t}X2_QjWGAUF7ZK0vb6say>A5$llZh0KZ3;zZ?X}tC ztDD(b)0KSw_~S#9Qq@Aso*A8-Pa&dqx-(GANb8KH6q_k+V*oltCQPB=uqb`!b7KK1 z;;%Zd*V-H1adX!Wtd>q!0E>W?GMzgTyU)5vF?O&r*IJ{@9m7;I1!fAs{8&R5whlb3 zPvI-v`{X{oO~WM?0<{7msKE}!tS}is7(~I!R2Xb!MX&>6wNkIobNd|98HXF{k~K4% ze7+(xYt4N!%wK;0rA~u{=RVKt&BW3vN!xU_tE+Sxr-b$?fQ_H}>crO8v`unaI?-0< zy~fts$+Q-*0F41kT0>}=DlM<~?%0UAUEgH_eNJ^L1reBb+&3|O%K%J_QiwH(D| z7j_WBYH>Puq7lnvI?YgQ_U%w%O6{U;T?OdeRSjKPcRGTX@2@pLpViwFp7u0Jl9Pm3 zXe~u*rk1vdHB)QV3>Ji80H>A#sW=s^TC76^D68l8=W|*eOs~vr-7|B)o{P-9?1#F3 z`Rk8m<~rf#JkQ#*jBUW1lCE-JIGHO#W1cetg-_Nv*Fvx%oaELpK*9G$JCs@gzyh>| zme!Qe0vhSedxP!J_O2h(PRKpYfuW_9iM60>yOwO$tjSiLt~r>`*Uz6nQ?OG@bgV3($xY}GJwU_qAjlr#>iX|Cpe8A<~8)DH@V#2&hcZ z?)L)e)by_R40A6zxfi823yie0W@eljbuAV%jukq+x#y3cU%6nJR%VZD@;PY~M8TQq y4XTLKPA5QtD~hU74ZHDj|)MknPQ8b&Cg#3o9t44dQF=2SOx z%Ao@teAuv=99P7gmz+cW`u*|!_x$;vVvNOs~9-sn{k&%&iu(!d;$ZU_5 zk=gcj_umH@#fAsQGBO%64mOt9JKJR6#>HJTdg}gmm%*P~Vb8;O7W>5t{jsG>l_I_! z|NF^(o=y&WbF%wLAeV`#o+>LZj_`!ba?ugsM=RR%iD&_V#tkoM*iDB7mfdNA3zsAj z`uoQNv?#}8K4O!Mt)QuCj8aPG=qfjaGT~`)cAKR?7cZx1Q`T#sklF_hakXEjSC#X! zXFr>DpPrh%?)Y%8$^MTq&)OLXQ{B@p5CKZa5wM=)McGDoF1IR_zlbr_ZAt~6u6h?; zGiruQn+_Y%#u`#sn&G>P!nJ72JXrXENQtVV$dbJ1PsihAxdWdTo(cAu&GoVBDAMZG zF>KIS?{TX%?Xnwp ztq>6a7$Gm41)jvwN$jVd*d}SeZvJ7>ItFcGPt+p463&0c9I`C4@4-BbFYVv6Fkl}1 zrQTvSulLrgrHR!+WfN041s!*A?3uy{19A+pJZpc`=DM9!0`AV-imY%|w)bLHBPUy( z4YOwLb(gek)8-{%)73j|A_#T`?>W=XTq`UBr6w>Aj}Rhk0xyRQG?)Z5F7ERrN)&n< z)=2(8x76FK6eo{}mO~m|S_BqHHv};cLV1{Bc7)K7E-@@dpIejiDIcEdXwwY8qg&x--YM(?1MiQ-DZA)W5AM>H@hJCYMkvXIj{`JxGuTx zVg2?`k;)c@71+}#gkx#meb&QjbAROJ{DOmJ2hlpY{|W#+z}s^jD`v;4LTqtrxHbx9 zIvkAS2*VG$el@=cXKK{bMyiK zZ=anyS;3w@P}yGgzeXv2=yY*=Oy~@1wJaHHXOa?`NMahE?I~zDb(0{YMbOm>nTo`C z=|zR-e3|)@!XEk2OvQFXXZgQxzQqW8%Od8T-d%L^qkyY0}O=FU%%U+|G&Js+wN2Uo7S9G4WztnJPI-R7>0c?vN=;k55(lu`H?LlQ? zE5LKLoTwF>a@O7vuw8nR`T!Y*d#nAGX9%6X1%dqu4#Q(j^Y98)cS@=n#x5vb`>OB^ zNC8t17YUVS+&*Tgx8&k7o85|c1U5Yj_awZ2ovYul&LSAgk1&uj)*hVAqT%k40Xi>> zrMzdZJJkL7To;G6+FDXNx-@wAN$3ggmk2P1#1Qjg+yO?ye^)6huf0{K|4Tv@HpD5w zFSEs@t>S!627RKrld;5OIp3B{TbosNau{so>a#vkKn#lJ7t9 zJc{q_J{WaSZal+5LlcL1=ntL~>is--eZO+Xo~7?}-5*Nv1SVg z3<=QUhOfxt^b{0)cL4^fym4?5;*Q%!{#_@fR{-L#VZXiR3yWD%O%M|EAg48*^Sz6U zuwl-Oj?T^gE~6f_hp5N*p>ZX7#?YuSR~Kko#P$Px;vRU)y_SU7?o!Tu*`ENHk=p0_ zraFSrPi<`%O#-qkea~JW9p&~|dnEdKCvsKZPkKS~5ql}QQ?enc#^I1 zQZsy^`mNlQ{snqK6+DRk3I3h zNz&P1-V%YE(imyaLC0I##Ik3f%6K1zjm`s_HnaajhwEH{s{v*o;>sQS6o&)ICzY!IgKUWnD!m^ zE0js^;1a0Nu%w@l-E^LQ{*k1UieH|#V2!FX4`9?A!xbq{^JxDu{C!STxy1Ti7M&s; z@!tYC9v2;9FC%1rwwLlKo*^ow&^h+J2I)zIfH43<(GV* zqdff+n2ujJON7j@uC4l#vn-oseWIqF#)YW51H=HX?~!L{2( zz#^aXMG&%`?L6#o;^0Cc6)88%ew~O1#Hz$~!0txr6}APpQ^?!`ihrQfp`3ksYDasZ z*4Q^n_wY*1;&U@mCfUkdoR3LA8I1Ntd(VWac{zPax55q8Sv^r9$$6H9J;BewaAVkV zAP&1X={aizu=GzC;{x?T#fh!~S;=c8d|cG%rZ9D<2@lO6XB2*IJ1eMbHJ>4c4oD** zo-2)roB9DKvOGo8We;XP9@{OqL(j!K0NSYu$=hQ5Tr|~SF3+01F7*@U%12))#b;q{ ztV~TU4_Nw3R7mI2NBztmQA;kg& z;Jbkck(T#!^ye%X0t?0*v1sR8`?avNWtGVs-tm##L}43vQ7zbs5Ww6GDludibPI~) zY{pQOkv>vymArXn@hN5@1Yw%6n~+Jp=+@8jl*iz4D!5hGfF;wpEmz;WHM-bDD4IgU2tzsc(v2^#zfNcNA+m#FY1;36R%I_%1nb#SRdWsj!+1FFtKYoig zftP*>rno5Mx=`ajDr*PN2%TED2B>a*<7Ql5wN(aV)vIK}!8o=0+4CuXtFv0Y3X7&3 z|L261%&!$qRsjj`M{#tA03(iy(-^5R9jL=6(S7&f5n$LD${%xh=e3Ag|(+N!_v zr>ucts?D;}^FBD4m-x=j4xqfTda)FNqfu^xGwHFEdgan1-0hXqUwwCp07%#{3bvd+ zc#LC1WiJb*he~}cmsJ^~ep(|$wF~fDdQWq)9F7(*?}Djeqs0pcS%uyWUwNUD4M*Ji zG(@E+UjJTH?ntEhh!5(UJW*CuDTICPse4vn%K<*^ORo8lFTyVZ?#z*9m`i5gTFcl# zG`>SUFPpum&M{@+E0Ul?_sOackEj_Cs85Lp`m0>pluU#q5b!~vTL8qQI-JEp5I?iH zqIbdns!%*Elz$26bqDSxI%w_zZ8Jrs5k5~MeD?>#u(QrZ-rjzww+VA4tu`F$x;YV5 zfJk*-%`=#11(Z`ZZIf05nW)&&uz2$7@vNP=jkZ3KIc2mU&m6~V>*~f+;WA!UkPuCC zGp8Pv1XyD8w)z0vz~_37mciA%)Dixm1sXfyD76bvTkJ9MSXxmtyE|3%Dts~(%GpO5SiF05gsl2G zlj3-^3tFl@;Ye37RTKceC&8%QroO#D%LiP%tlnK)Iw@0pOdNVP;1Kwixe@XWbYW#u z`lRx**RINQvo@VV+*Y1TBJ2~9Id|J`AisEE`k}}aJzjld6@}W|m)3|`&D))%S;Ccp z=CA}RFYu)9Do(r%L@f;rU*Pc&%~s!$)d!rct+gOjU*z#hdul-tKh`KHjOa0qVvz%cbksxf_pn`HH5~<&Kjv3lunGB|d{f{YACD_uCv!A#;~NRT z%^ntwSGQe00zG7S322$tC%1qynPKr>p88o8Q2;g!pEd1!GHsj!#~gh?x-w zo8uZ!8J?JyEs;2uUydx%=pOZs-`em2ot3^!^-aXfm6b8=)ASYjSKaxXpO;^bv=;w3 z-&6n5os2m)zkA#4B;8OLFjuYHR}Lf+5b#0-ai{>V@YOKooy~}$%Jv?-v$U-DnRFXG z|JfxI&dNg!YdSJP9a>QL-cVr`OLs*lYCo%_h+Rp#iPSzhL+l#{^ZPCMgzFy|ch4he zma`q6hf(|_b$^2+!^?Kp?gX4w#?6&25CY(*qV4SrEZlo9l-~qXkxtW5mj%!)D!~NP zEHNgy2df;iCEMTxh}h~7dR^!`IOL!o#HVyO7I{zo;%ao3{Y<;GOj_&0+Wb|nMjb6q z1-TOX0{0u>zk?{ikBU6*m(v|JlBoY6o_4amLD+zjG;(#7wbW@gFG2;f<7Q8PVb|Tb zVvCE^&)PD#`08&XHQrP%mYvxM9Ap>JpMO?he(CG3R|eLu#24J5GycW@?`B0(_UTq^ z@WD@H;82a=dCW$!%|L%T!qjiPh(s~%Pk5R+c_@=@K%5%8AX|Bm_Z@-0~(6X!x=y0Y#-eb-6QMVxy$4It&kT}kiYEv+% zH%jeNhLjwy5{bY34WCqZb}vTBzz1Y!n%1#nM|Eh)sH0NY^X)%-C-1j3Ock}yw(7QW zk~n3JK23K{__7cuM$enj4KEi?ryLL>BL&z5_HT#}5xc%xccmC7$J!1%yU^g1AZ(yi zYjT#3yVzyTI&dHC(>+v zK%3N<@%j={DUZv_2N&wDAFF&!sHGcme0c&w>X)gQ6i;uZ!Su!1WzlXd5kfD>=u z0jO*uU7Htr(+_)TYjAOVI+7(MOB=W4kiVoy%h)jI8!fLS9#h&`DK5EMD~UM~YG=8Q zvFOfi@xcW!Z9z()uIuVp#f>(0;X$5lo{?C*IJ>K0RDKy6?%D&xxC6;ppyj#C+fu;; zB1-^@%?&J{b|@&an&Y8zE?tKFI(@i_&D}-islQc~`U$5{Y0?*;s-`0|FJaMMO;O;Y zjV3o*L-ZwA&L_Wd^JHP9%iMLjsNeryYHh1_(E;787PR9+3&O5gl$RE8Fd3T*r5Y6h zStvQ;INZOjx?I&%g5Co%(xPLOu^ty_efbCXT-BG)w8vdOp$sC9gh`GZ;!<7$;M7u$ z`4aoL?ngQ{4z}c0LzQvdt9E-^X#<0&TkmGY?cfYgI!UDk5_3~ll=Oohck$j|*fff?mX((qY zZ<;1b8^de^_|PM5Q-(e$eBJ4CNWZ^wEs~Km1^hV|<345{T)^_T)ebeGmp|-z&z&~T zL&im>4kO%X{dFAhOeE&+Fk10?HHlfSD-iW-`ZAH0I^xPV{{w0x-CY+5e3J7t$HaF$ z;6kq{j@5|rP_k@WZ(F*xweO`>+6AR#cWGF82beG+H5OW4G*lW~M1t*0GPr$wONv_y zxwM)5oBQ7b59mHc-xBq2vvoGmx}2GKcJ;bOu{!*1o#}vEUn%OZJ=r^wE63)j3A4fh zM96{Vb8P?tl9?x}4mqnUN9KeewZgW)JLIUjaV#W0Z+AM>_))~WTA`(Hd2~_}ogThY zhDl+(?;=^=6kamvla?u_iu>ptD4P<~ZUSX*(mE+qm^Rkl#&^b-i2o?GR|nrd-jbev zfm<^7nP7f;$KGNw?lIQO4n1Bz4jFBieVASF!F#GT4sZN3)X-f=7wM)zn_T5vYlyHJ zQ{LJdnSpS%V>Jh`Z`lJ?r{-In=*PV;^~V^LiE#oMNS=$z?4Bl!n{!WgQr z>1PHRh8W|%Aoy1C0m2dHzO?Er%)R@LdP#ZdQF#9*WdW+RZgF-W-}&*UE}y;c+=`ZC zbSetJz%7$7Cb6=*Y-EUtsAST%YL<+lMit0lefMAf%vLLvEv5crQm!%bHtj0kl!lcb zJxK!$xYf_3TdBsiP|b8=*L z5%Nf|xH93L8_RXEEEA&!$2Lpo2iYWcgeXOgL5pz-LxF?ITy9w7p%$+7B5GYR z@=1IN3ncwUAs>TJSLBY(VH?>+itiao5%x}nvCyJvRQ{QVNLOicK8R}MQX*(}`Rd4A zi#Y~1=Bm^(PnqIW{ZA+_?IXGdb@^MV7fk2AZ8>Of6T;CCz-b?h>0m&C5l*kb6mhJ( zje1-uhL^<_l>)}g$li$>u%c7#N9%C`RCN-gC|2L5Vr5nvj@kS^pVZ)tn*P4Fr^WBM z7wcegQuMWhzs$v!q>1Z8SNV+&JjPsWK*QA)AQ0VI=#tcEI~8%_yCT_kbe8id(gjOu zF`P3#>pbqkexG&N&}V;e{AM(Coj}B;fATLfR-T)E13+;y&q(>=bQh8IwacVVJ{*H> zSw!{S9I3WiI&S!RX~{1r@u#`xA+A|+*6z7hy}T@~c3+932e%F|bqID*82OX4W|C8K zX{wCSNO60!?Bd~>(D|Yudelhi!qeYAZ5Jf;5&|~mc-gSUVB7}U#?9cW`|w3W-eV02<- zjZgdf@?F$j%g!a2Sb|tzA&LqjFtnh@&BX;KA3}wJ+MUt0-cFiJkNT$+%AYo5k=QtN zpwrE!-}mFczKQ+sXN*w*o2_bE)Ug|* zzJGW{^tF0`acJTGnXvB3k_vM=m3jF7 ztRwE@F-?#xHK^`45C{*!eX4ncMJlasJm$A$7SMH-%IUARcfG&Gi>{An{GKyoer+n&- zZwmBZoo^dV4@n+f-BlJB z?`-P_7CS%U4D}$E<(V3fy1p4e?Cc>AcBca_(4fY)yDG3SDq}K?QG{$@_H6rEFF>qK zUMZL1XqH%mC|b_!?P1R{ym=TPZO~S750j~A*IcjDt`%0-wAn1~&jr0)_HWCTN0h~f zS)<9{DMooPh0YIw;X%?fDD|_Fq)ikbb6d&CYFDU?KwJ5nQL9lyKAa8wPN|tTJ8KpJ zX!W1kSUdtPoomhY81_DIElbbX9H`rvSb0AeGbY%IDufgmv9Lj=fh5&OMi8H)q=zor z!sE^3nYQIX=Zi;v1J>iLpWA^qW{}JgPw$>>+uuGmhILNXF)^EW0y$?;G z7mWN#dB47#F|QBBo4U?u*%&Is6mJcuI0d1BG}XFo%~R4UG2T-9W$w8cUd}+m2kl{R zs}r8s(d^NBA;5|_h<5H3AEVC%i!JUO`3Ef5{Dg3o6+`~1FxhI}V}!$+w1tHhKV>_} z8Bah%{z#-2cFtIiNzmQYunNV+42wU(l(Epe`(PkoZtn(UYj*SeSY=kG(cF@MP(c;K zInH6BtqEuS5Ud0%QGBpno}zM%vDx*9o5~&%!Dy_I8@X6l%gHSc%LUg)JP!8C5H0?c z9PwDVbuWB%(CBMAfW~Q-vmfVcJ-BVNk`b3#$E_phG}YGyoi&`x_)>yM+uHXTPyKx= zbl7J*_z}S|Z8bf2?2JUXat@bFei%RCuxev-Vmtz!?9qf{jpS<6c|N;9E80hFl9MZT zfWkb7zc20OS|tAp(@@%9oisRN@dxT|M|A+6JrZ)?%HXZtiJ(yyt>Y}BuwI@fXwDSL zwIBT&u->%Evz%9z!`f`cG+v52g;b*ihhdyEYZOYveHQ#+uzrmHHh<2AV}gRmcS3TH zWkmk1DN}zH)o^wz7>piSvM6qC6(-$)=IZAEa^OIFEvY<2Ida(h!t%CI>+9_UvpDXx6K6zwz(?h^{nN+q8g=S8VOiL2DZ*Tw!?X$F{!svKOlKlJ z6Hobo8~Ck|k9y}1tsm(%cN#G1#5te4|H3F{V`%-cN)60JS>u5JNu^yfNG(Lu{%i5h zvX&ig@_34VcYnXnNM(DYj-nU8dEeTNq>ca8gM#ll+fAKzW5%fB-Mv>IdoDY=E3asSVlMvq%_C83N^kZH2c))Q)2o|R0()8Kc|A(@c@*!sr*C6b1 z!YjXt;+E3re>s~DV7%-rownZ_d}R`B^3Au^6&*o`L-4llTSbQeMEl>A9)6+3CJbsBoIRh-`}Q#x67>15 z#Yi)e)b=eR=~v*nRvqqst&YyY=@}VJfOZlG_dSVICym&7`MAp{ERT1Ck%OQbCz{Ln zF1;4=%;>mW-$i9MRX@%iVxxH|emMLn+`jF;9`TavE0b6w*oHs1#KAd$NzYd1>p^AI zymwXq9}5t88HHOGQ|y-`QL$qPv%N`|^M>P9w$88jPm;y&0c%?b_~p6=qb=)tur7eo%2a(dIZocXctpM1J(SK@F;0bBMy3&>LbA|7_|`aJ&yI ztUeNv*!l4X`le399^~pnymBbn;h$uMFP{WT0^O5}xg(9ZSd$XZ!uN}xJD>N>*~D+0 zP)L&{jajX*vp5NYT)v6ZjfBX91;M49ZjbKYZP417r)Yv~U`)w|oXr`_=KA=(iVoGI zbDbt%5&^KX^KM2|r~fftd33gv$@*ZYQ%>e57wsCUhbViCZ2ZA$l$!@)qlf&?E{mJ+jP4R5GIS1yRwX8 z^wIB+YAB7)O158o!f6nf4CJ2cy%9x=AFbhBEkapAG8oIb2zdJLqV$Q+Bt_0`{kctb zmz-l2cPx_1LbVzQ!pY3(e%hET0o#HDt<2g`lUbD`Erat%;GGZey{G*Li7P;4)E$tn zez2KZnT2u-GcQF=)PQm(o{x@5b)a{aY6U79&kDi`4Y__s@`7WAM*gz)G#8J5ctGKQ z!92hF=o;K$zRkQmDVd*r@B(dKyxeI-WS3TnS!=4fzN^=-jm#O~l(vIq8!enduVWWF zv%t-ExhIhFH( zw~oXruW)J>%XY$|s0F|7rnlb%D?byv$Qn?7yJLH6LFT2Z%C#L1!=?NsKBO;2zq+uN zrkj50R4m~-7Zo#pGH9YozuY5d-*2p&WsGgF{1MdlL^grFn?BQW1HDcAgAud*?}8o* zbuL>&${RdOkK^Ur8!UK|$NC%zpD;xBSl2G(b;9VKy)7T*kKWb#n z_69$6O4R);W$oRA*_oc4+Ovko9s%}1^I!%;wzNEKs`hZ{ZPik@kSrkpgB!g>;=H*M zz>O}~J^zFfRKQVGgEjr5gWX~V&-XEKM#Uk6w(niIChW$vAPye!rBA5{dVVY!{B$Jm z5hp`5o{`R)v`}vn3IToFn4_DlZ|0M!zsSuFbkTn5&HDA+844U?EHquHK)YI z*ID{H3rGF05sEq>x*dbs2jWzIl!%qf*@PYbCw}d8m2mp);f+^+x&ZoToN>-}wX!n~ z<@a1()Txip`fzvSJnY{3^hMzb5fsm`eZd2ttNwSR?8eB>6It)A1$`E#7U4IIa9v__MKvvqS$^o%}-eNmk}AIUNcwD z|F-_-^Z!eSF-gOm5PCz0s=}^%IOeJDf4gWR^v#U$0e`>-_Vy4Ax4N~iyPo}PLapeF z&*<(jJ-wg7yKzoW(7#UaWWI565JC4??SZ z{;b(5{r>@_ShuN>?j1}Wh|GFM)WB0yVn^-Ui%wB)_|DZTT`(*eJdFMi*6Y&-I&!zx zKL!HVjt+{cOl=t6ujO2wJO1QtH;rHvRYA$h$5S>V+FQ%Tzv^f+zyf>N>;9QX?x@rO zl_>)V3ksji95m8dPq)SNB6L!CZYx-t?1{+pz681r`P5r4!CWJ!D>G* z%3?V{-Hz91iNQB`{_s~JBDn%UEMim4o~ZRqIB(*mWO3}#n5ixPl^2Pab-G@8r>n072R^%Tu0c!&M6(XgxC<9HrgUXSBX z{KAJru)V0>`P!jJp|hdRrgDc84n|nK*y9Xz4j{yZs@6i43Z~8OK6`erc=$%@+%^p5 ztCkhOd2uJse7)b&|8E>ExV2;Bo3=IZpO&S$_08GEb1}l4Ue=_=^ntunk62pod@_9| z9#XRoF9jxO+L8EeZ5qa6>Ei>c@lQ*kVewP4XN(MApw)U=sY4yKu|qz;RwAPWC;f}d zi!eVnaQ!P=QSL@8x_z)8@ZVL*tnC_1TvMpnbX9Av2IO_1{z!%|BW*OSd5=nvi&xT; zM+Bp56|(Pb4i=KVA%NC}^<;VXGFCH#b?fdxZ;x5QS4`3)_gbE%Wxu-{v~YJLq64=j zv8>LO5brnN8yrpIId1m9eHCx7v0l zPq9UwShbcT`r5VNbJi|}5nS|viMe;mp30Lti4{vxK&LU|Tj0?KAUE{C0bZ4+%9v4T zttFth8L;|opgYjm&F5&#nK}mw)sDm(@|CYcD3{G16@)oVm3~m zDN$)s`ZM1s&dyH4;-fJCuF$b^MOODzohN7qz6A3DlpIbhi+6kH-Pu7WA%w*?j<7n@ z|G%xmmS`hG0B~hWtyYL8o$@F16*+%q+xmZPnHb`~i-J4Z;b4%706$;v&5-3alE=b> za)Xn*9f)FmNjSe`LAm)@G48wjq{HlRWmmqk981^c+Jp4*r+2!mP?O?$I|9 zn%ks&3D2yHrV#80FQy3$;!!WtBkB7ZyXzHx;jNd~S2l>sJ_laqQa3rl1ndjnI=?w5 z|8eC-$2SWOCw{&iQNu*0Z$0mkXK%VmOF|zr&IOCl>n22n-lik8Y=Q!^{FrOk!M@%7 z{HJj!!~|^IpakE(TADP~yX?3FwRF{{biVTlK@-$+HQ3YYel?U{<$TTv=wLU#K_{@? zDU{%3;I#KNp7bTO^|)26Ce*0C#=egK^DRrMqKADt3lPK>R$&eQVxe$r&(XS=%L=MM zoeKeB0v*Y(ev(tiw<^YX_D5&z@KBLDMbSx_wNM2%4AJ5u%5Bp;WxN#B(6d0D#X0%i z9Pf}B#*An!)oIK@n zC*oEhOAYR58_e0UbsA2rNjm2?tr~yMhu}88l@t)BpzPACvwed$xcZvF#fVv_c;_=v z6BEbpW|cBo_Gv6ldih#?Olcj5!$Ahe?nb)~c{}g+erIxk(iJ$-xv@wydK8N5l@M}o zux_?03-W5T+_UC=Zl|TLdq*EBFnMv1RMb)md3~>L@Yhb3e>BM7u(Xtbqq9v=-3S1wY*SzooossEJ) zOaEGq%OM>W9E7yGAAH_*U46bjgn5phy|||@rxznsm@M14e4_d4^1q=Kkt)6=L{RtE z24CF9Hwf1qkAWDbK6)2;u2z0-8ryDmGYLC|H7dq9g;KFzJj3f1>&%}8-gdrpwFuhg;Ymo+E{IEZ%CS)~wGzzd*aZs5YhIB6+_n zB;Rz)t}g6J)r9cad53v--_@PZ(g@t_-J#DGC2@SCVXY5o%Ac99wpJq!XnXdUxMI%= z5I!vD8P9t&Mn<%od=eYw;*+{hnUdt$R|{!7KKRl=6v}n=O8ve5UD$7%Cun@@AV~>h z)sgZSZKI4~eXr(UEPjq3bZVc*iZ9&8`ncas^FYHn6Z8|I8G9bBYQEFM-SklR-QOnB zfI|OiU-=?x6*F`=or8EwpUnwdHkLIp?Br+!rMar=cMYcVv9#$SVNa>!Cux0}IQLL8 zDi2OhU}@vjxhwSLgBWYMW@AD7q2v1EoibNrn(jrXZ6RSwQa(EYkN&$fH-#DFuzyq^ zj%C&kU+C^{6t7-1501YLvTpmnbC;57H1Y1gq@=&4UXg0T!SFw*TObt8 z{f7z0LpByWoWwgf8Te3a(>|H$Y;u0;%T3O;remkH$NC(4$@)?K>nm7XI#NyB&e9wOq^+;e`6lT0;x>2vqN9=aKhRFea%83y_jcobfnmc_xCQgM#*1U$P zovQS)(d~VKmPkLjW`-!AWTc^D#n^G_6KToLK5tDtLHG!du?n?@4<4!74*Jdb^_ZM#Z7Ze6mqBbBibkck;BeybdO@}HPBZ1cW+^=(S zwBY?^E)&=d=X})T>Dd3aZL+OtPR-=X;-Bj|BNip?I_g;ttJ$Hj4mcjFFRsSua1>lQ zPfRFxyA9H3C0Q=7jkfs?b21M=!UwX}lUD{%3vw$}zb~3e@ET9EnjArbpw@DYW}Wk5 zJQO#VEou5xKJ~{*QSBP}`Ee6a<1@RPAhm-Zv8%K9K^Okq{dU0nz+PnsQ&2$C+!bY-ljE-8N07hx(U7nhHE7tuutJduf$IhuKskne08 z;5m)gjpSPR(Fqxm70%EcjH`3rLA|=xJD>+114!QL;_#ZIE@<&sF#x}@vv(QR6XmS0 z#Ddzl%5$7&%^hvUjf1x@4R4|T{t5e(`+o=oS1-{5BNNhjz|<5#$ukVVCjNT;Pz5{9 zBIvA=4yr|ha8}Z?s4K+6FDHDNRb3r;dnPBhks&wYXKK0-5q|Jcbt0VD1qPM(=cubp z{ezvnnU#Mg7|cD41=Y7Dk@i&#f6W0? z+HkEPqOhM)J|~->i4ReOpP{t|KU^syu|2v*QA}Y}DpYWxB<}$6->*65{8^bRcF~J5 z2lZR^=wbHqG3#!{Xp~x;jsL_gS{FwdSZYX25D_~$|23rN4m0vNzj#HQ;_9~PhT`8b z1W0(s=Yg(<5x+(OYWiy)$3)lSHNpJ#YMPtFp2c}lKuG(h3F7kRQ?P|A?iF~H6@;yF z5hWSIPxyeIj$&{+5W;#DSbx?{Z(l$H?5DMHUBJ6_q^1D$m#cbH_S;bl2rs@QtX>7N zhpxJptKvRQS1q3^dh~_JVrNoot-aL`#re@AcUWH1EAB|&TVO&N^nR+Q$H7f>zUG@G zcI7cghffiPkwXKCA zRuktag$_Xx2m`W@!|wr4*=EWrpGH*aj_<_JW1X&$0d_c#sfd?J zJYD4?J!+)HL0ghNt5l6CX8LriS_DW-M(Lo**Pky<{Zc2)O4dQo02!=jRa#cds+8PE zo!8lr1yuD;=3x#s;&+a@B73oSmKCcUa!77;w-Qx5gmd5G!RF^jq@5N9+t^fUchua6 za-468oILue-`q9)E72!iW4{E|BMZgzk@hj3r-`>wU8VuIZ2FD(l<@} zqQhm-*>@nXx}NRPJiV+?ZM!TTj#L>9!z-W0@D~|y(gc0YYyKHM)VYGg zc6*O*AwnLEH5j&7c*^5m$uti_YyXGcO>l@3z^bfBkkn;cQ>o&Ee^OVAjF0q*&shu2S5m}I^rBwbNg!VitOzswht{H+*k#BjTw(c z@(N|Z2QVk6dZQ1jJ;ZX$ZIiKHo7niKLSd9-xYMna2vQcPzzxR!Y%XglpdkJoZaE`P zD9b$7K)%Wm5>ZYQ73nWAfjb?hQTzj_R2qZP@bUZ_pA6J=E3OJCD z>LH)IZWkPm*_EeM@%MPB*P_@^@1U>V^im^HJY038_Z1i<2fC=376}Q;Sr9~eTBTe% zWN}0YY3#_7TL}pVBAfSI zp->mq0+=;f)Tad>s!9BK!Z)LBERMsrxw0I_+#U`^E&Un!mBpTt5trvCEkkxnipocH z3aDZ-nM*As07$6M3_NMDbbpU~-`hdIE%|7d+W6GL(aAli0&nL%J7O?z{`a*6PnSv4 zOyre@HuuLt1x^oYtAuR>mlhaec8CaV`nfC(pJyvo6sXTVbi;-)#BsQNYbNo+dOipz z_Px#+T})Kyw&1{U2FWWnri#9arsyq8{;A3(!ECi~xL8~gr77B8CSar_Ur9K9ccYoxIA z-lvl}J_(HK5(9nzEfl;=TYVZIg|{STTO&_cdRr=Wh|JBTQz#7BG{rZeUgTMC{0e8r9`wxRv%PxiN9zXb-HBv;Sh+$A(h0*+Y9Mui`JA3~^9 z#WA5QfAz?}3@Mfbz3n+?mW;g;CGsDF&Rw2xeS|0_efH|bCs;|`{#dmFss?miE|{3@ zg{7v)_2k>PnOc6`PAKih{&=j#btM|Qf1@Uat2tQG>N)_@*|LCX5iaVf=p<`z0Boj*pQRx$cRdXIc3nY=oKHZwh`f2m^w*3FY7k4y$&T zO(y-E{YtL?$B;s1-aU*6`40(03&w)87K8MokQ@x0cNC*0F$*2h$1>-D=<&v^hQ$Gt zj)2rCk&o_=(Enc~U0N+9KJcb4DoRgs&fzWsseVA61p7avku8+c5v2wnDavziSzCEF zTl@W}MD^7N8_HkAcCj?x+~udx|lL;$hQqJFNI$szGmTNkQoD8#rDSad-(^1bpBg9hnfLq zg<{^rIIX{JoB;|f=l4fpgInwLp$PnjGOsH9PR#v3jNqru>`_8lrZ3Gok?qn;Y3a|F zmMD6_=Z?GdMuoU{E92R0HLg|K3j%Satn^oXF&yl#jwTy*y2*j6c3R+O9cN22*XbX? zi&(jO%g>IejD21Ex}z9b%5$59SHIDhm9&ulfA9Aas&*WC^}Gd*wEaE%a>uLWz3qDz zMj*0<;*I)w>|0dRsW4~U{n`-)nqK0^*!WE?Oef(T^E(S}BRRHpG~!>wLYxV?xU3E1 z5WsTwzdr5NEe!O~sVm2Uka4}Uj=G~r2X!o)xJCbEN{!SoEY=?`9o9)70;OTANVM;> zTNy`L!VrAvP|fa-xKL&NFz4+Y_dY8=eB?2HB=SSs=N!9$JkkZ6`k~6k1D(V6a3Z#T z=1fas&R+>LHHF(m7n(l-6wc2!s$L?aUMxPv)B^Ln7BBnYwCfV~g-JuMQ0GJLjsG7D zfWY|uKbpQfobC5}Ka>`2)rbzINUcOmtF%SU#!8KBwY#h+YJ}Qq6j5937_pLAwKwk$ zrKr?y&DN+AqZ(WB%jb7p-~V3!y{_wA=Q+=P?(^LDK`cPa5tc=Q*K4@Y!4R8r0&Cghu9}X0>tOb{*+G#fB6n&s|F6t&&I06TrEPzatYie zy`EH8TPgE5OWbhons71^iZr~~rmZVCX)a_ar-&C?+F+Kx5&o%DWb)+qo*trZXfPQ$ zMw)5i#n&~yoXZh1-7y}kv$P7Ol9!&=W}A{i5v8=ow_yzzmnnB?leCQesq(`$PpYxi zvX_O?)7gs{>s*-xE+QkThBN5craPT-b8j!?MUIeM(fub@unne?4Y=`UEaM~|+sC}p zW++`V?K_mp?N6|%g|I3-ZIh2P>S~I4q|p#{*&geMDz~is)9Y(0o zuRTwj6Yjrb@tFMv!Wn{omdTv&TNfWm1faYA-nW6q@MjRU2)Dq~f;H1@+x9tYKFB6r za!nnu6IdsMwm%D}g2ajMz1%{}J`Hfxg+44*-7|;&ujtm?o*IIiWw1ovSAZ>cCFQ_Y z2h)#=6j>7Im|9Vi53pqSW?-aA=6%08xRPZDe`jxP4b2kI9`#KdI&3lbUx3k7yW!EL zbP}CF!|7zza3v-aMpnLtE}La2^TN%KS|j2LM)`~66BOP5i*deEb04?s3LTjx%xg3( zfKD8~*UCW5)w)f6-S?5oxe~$DQ4byDWXKP0Ln~pT{rF;5n@SP!7-NMQq5|IG7Bw z<^5kC^~V%TusQ>l$l}iaGx^+FGqh#XU6#kQm{B7->tdR1j}%I%HJar@a8Iu`Tb%C4 z^77P_(Cf^IFEwo=|NCyPLeShdSFyAPik!*K80!Zk#VscI2A8mg~I+2s?gl~ z4Dz4Fi;SbPy)1NxmBs~?+j3V{4mqf0!7nll+$>7c>Kob9cvq~|@R;Z)bze`o35UgS zFw6m4P|K_+QhydtO0>^*S+oi2qa;&w)aYSl1jK=m_YrdUk#dDn#|d`_x&x-TsBU`L z#qH(QUs+fmRGrP)7-`DapHqI|$WDaN*&{lahj#>At+FmuWFDhiH7(0a(hE5VAd^Mw zKulgbZ8nFlTvSs7$rCpt@U1(R`&2uqJC0b?di(*Y;C3R5i~9swf|Ek{sS7=7WtdlD z+sqX&gvrxD<>a1HRvOkY=OHS&x29!LvMO?g{S_e6;=^+oJWaLjoDczZVKv&;I2#-z zSg;W;Iqj14T*%Yo4oHffHVfNR(j$Bb#;gh*ptxJnGyD5Y5cy*q>nBDkS0jsSgR|dxjAVlz^T$<=QL2vkj#qZmTcfiwg^ex=9 zxq*zbjla-Ji6L}E9bLYdXkO+*&&n=mXe5T^l-BQ1gI@noFSKzA)?iST`=`rGWzUQ> ze?9yTdwl7K;o0rVBa(&OBW#q@SW-PUmSE+Cup^EmxEqrunlZ7%>i7cC>n^8oVkq0F zqobvgQzQ?$nHb$}dx*D8_Zb3X8N-R2qEX)qo&nbjv08Vb!zL+9=9(pQ4jsw9)xr{? zV?qxZGzw*VEaT34h2r|X5$ag2G*bjTq&P7I>2BR9xk+bpmZ>v?0BIJ~^pWqd6<-sF zoYr;ZaC8Ut7Ym3Z?v#8rTuEh$hWN28DExOm`X!HdU|&DHe0Wb>6!~~cfoY{G^yTaB zSoW61v!GaNu8~N(PxBq9o9fMVB^zBD^uKe|HJ=pZi88pMTlSmg#wM{6%#0HI)gsWy zf#HX6WE*(V?3jYG^3C~iUqwO!(r5yOklNwaeG)%@ z=cDT_8+~2Bq_pXig&W~^@j5)u) zf!aqkHUE8I^2_ES?9W>Dm6dCEiVDuFAY9vq%x=9aq^Pe$k+(*$P=hIV`@|%ZLnV+b z`<;^x{+eSiOGP_Xg6D;?+Ce!2lko3qxfIo`+_Zx!Gk?z<3{bnEhNR2FYd-CQtMg^! zMs)TF9v^?559p#4PHM)(V`aIn@~zhoOC7N~vg#JbqQw=2WnH}9f8IUl^+Gjd;?8ELzf;51ctnEg!%Lp0Z( z4~;oLf{W2w-Z3kaO=&dxUa;$H$^q7kR?`N8dU8M=(E~nPCfkMmP#a!)xo*O=F=l)s z6IvDW`QDm>`pn(6oN^&wyML-`%f1!A_f}(bUA#5SfF(ZP-G^&vv+I1rSwc7>ibG8| z8u~nOjWSG1v5;ttv*_yC%dJ78uJcy~OdV)pA5n=NIGB%&r9a`?oEIfDq-txOJ4to@kv-4^F zUXw}|45wRW(&7_AJhPl*zkQbm$>EGU)O)pO)mMWigdie%LmZ zEdAp>6NJ-^I(?^feLVg=zluZMLx4-$cGlaP96BLt`RhO~0&axKAI%pEI(Y{&ibX@f z0wX%0p(RLZLo7yzN4?ox(WL1~vCOAIz7NiC|Z0b<4Rst*E+(biOf+ z^(s7i{cU3f*L!f>Uzg){_qI>b^OM&Hdj`og8)*m8F8s>xr+M2ZvR>?eaie7$m-RW! z#-mf>8pz+u{c_37gPI6tY>Y=W&Z(T;e|+dJ(&WXgufQ~nM1Qt_gtLDq$utom78J9T z`FF1i{sc;vI4txDsyizbtS=j8-&qy*nbq{UZlQc%kIoDCPcF#Tj3mr{qH^OTpS79a zXBa(rrZ^G36V&MCotaI$Cg2a6PBimB@KCK(#xOn+vAD8kgraF2SI-JsNru;;@-@~% zf5kWR>u7D#5vKAtl#pCQqmg1q)kn**0-UOewDTWLHW-w2Rj9pP(J*0q(euW7-zTC) z)e!Gelmq|vsN3w~_D)vEDwi2f8(oo7N0E6=F)68P+0HU}H=d)+8<(r6=AQ$R@pxmS zFu`Ug5GKw%oKatzN6x!u7?G`xRRcth7z2wcgY#B~YeSq`7X~_<`J*WDT-# z+1%0;a+glPq_}%*KQHbvHwJror`Qu@i!rV~%QG!Cg4+8`cN=5Ry!-(K7U?%_{-Qta z6)p1)Z;*q;;?7-6>968O+@Yw89(0RF4l0nbA8G#mC1+7|n5%qPmBhi43dPK-lHA@0 z+~29ik6_0Kf~hxpn%~O}*Dl_#{3P;uHe-5`JYVY`xiuuIhsI5L6=$6&TY9Emo(bNn zsio0JHwdbjqD4r7%Z%8uO9oFD^h7zQ36VR|0b_?O!bTn`=^f3$m;9b0z4S+-aZ$BH zq{!@MhDARtuJ`by-%Ak!jIBgm7>XP&b z?!URcYQ0UUQn(G>AK5o{B&CzTJ9Vmiaykms$y~w`=2bD;;5A7>JES(0v&u%m#@7_G zTC-ML69&D?a@vs_Ru#wyCHK55?8(Evo$W-=E)_3HK26+aX4CH{d zc>U7h!~?&LZ@hM+5ZANx&WjfbR<#wX+L&|(wZhmc`JtR0OUXGwttBVk$DGjRmPZ>r z6HnGAC;9wqHTMe5==`I)T>FHjJhsnxnV}`K6=$oMU)+NR)BbvMZWI~S;!aWqo2v)I zyvPQBA5{Oy`K04J*eukn7Z;_d`{LXao59YvGDxSg#@{%{ay7X@s+@=X+KSbXs!aLS zydhc&F88gv+VDpZ&{>KCG`Oy2&+dh3H4m0$nc}zCAH=tbydZuoc`s= zIB&u)%!U;qX4}g_qc{x3pL06xD&b)S0%t2N7?H>O*whLkd}~aIuavZFG4D(I$Tzp( z*V9Xbc>K<^xo38l67q0$V$q3j*x%-Mia1ti7;$rwH;Xx$X2p-OR@p!PCit}nF=CMQ zBTbXodk;BDj3PJ+0iLAL@=1gJq*Tg#lksRdxVQjXjiAFh4*nYLbV@fxPOsAivuBrg z`zBs>kT6s)XlKEuUH+N?(uII1VNZaMtLZwC;@4kS%QOn!$xL~(zwMOo{cn{)Ei$N4 zu_-1G;*z-6JeQtf<eWjHxxw+8Dz5SQdda47DF`}9I z9IP(apC-<~m-+p!_tj3nv%Ac2Fd^y&AjN37JAXv!W2hk`yR_A1Cy#Yy*(?_6!RVeB{5@Mz+@nB{M9OyYPx>H;RC zw0?33AoiMT5}}#*)#SY1VVp`V=KSS#z?g$pcqzAZk_kUp1dW>lp4jIoBuBXA-Rfz!!rCq zW`kF}H29MUPpGrwM5L#&2*t*AXKt?Yc#gi_z#$YK5VSj&d$hZ?ms)kOHD|7i)~rU{SSsH^}$t zBkk7fdG$Hjb(c|y*ZVkkOV{0i#4It<`K&20Xg&6)NzjoEVYXk4xV=IM;k zO7BxHtDB9r>rKTq6zgJ)rbww4*KPQrQ&Eq|K>ov--p;D=^OVxEL2ps-I^ObSN->Q< zcrqFKih>aZme#%F8m-dO^%=zvT!X@?Hu^%4C(2(<5@eHM?9@r^%Y2PS2&v`Q>?q*l zRP#wt+Z9LS`SXx`SBwuiDpY_7<8)U3k=KYtSOE>8jZr*wKfl}?muQJ}qNJ(;5&CA` zC_-mJ8HXtRI(4|)dwk3+)w(Yy0demaJ6_ZLx4TeP;$~RoZSOv(x=o;AtwvJNCC3y$xVB`0`7`1b ziA@QmD7KoMDSG0N%TdG>CU;Z&MvjRQ!)8iVwXPC2)LIM=90wPzjA2v@N|vrf*U}oc zRZnsAQEcjXK`rgG0S=3b zSp&Li+6M12>6ZD05j==+rJ3Wk_27R$PrIENDN|jFmMiyCFM{d2Na{}w5jO%Ze$_C$ zG?g7-h>sMC8!nA+6QS2dHVdN0#>O=)><{aA z=BkLgAcxWYJ=_qN->k=Rm`fG{E{hJ6*^w6WDd)fC_n_gJ_m-DZ-cy=&q);?Oi8qF< z8f`^m`417OH>A1|uePG|eJyWzg&5r%)1GJ+3ei~*R)7*XK*&iJ8?|{&3rvfl)nltt z3-@<-Squ_+bN;kjACHsF(Cm#^cym(8dB!$eLYC0|(Rma;Th$AYm(wR=oLHKaO*gFHD|8xe!yWEEfibnO?#smduwcj_9-#6W*heWbxC2( zYka6C)(n@^6z4guDu-EFiBhzTru>D;L1_j&Q;5H*^Wn4h_S8YUM!Una9BG9iScyr6onzPuVONfjyV@VNNhPMbG>;(_o z^<1%uj7r;IK>@XvnkH+=kJ%l2C7Oqu3-@l@Af$IKHujWi3vhIZJ)Osc3?dZt3+P{( zkDyc>Txpx$SBz|lw;d1b$ZI}a3H&wePH~pWF~yH=1#0BlsMH#ihi0L!W2VgY51xN) z{bTh6V^)BejrZ3RZzL}o9T=pWbp^|Np);X?RyO@Z0H{7cF?(b~Vu4d+8s&4-52 z7~Rf`T{^KAycgyHi7LOZST4x@XCqaI#p5kPZ82E zOSM9YX(mD`2_DRiqpAkK>Gr-Mv&^x@z3qtj8QY1F&3~Jw!N)^8bBZIazxK-@m5dtq zF?GvV<4t;^6@|ljdS9CC=GpB=8^rt!i-zoe;o0>CMfZE%2I6h$k~wL=C!OT@(?;tw zO@T`b&DsKIgO+~M{%#k35u~Xt7JZ*RPwCA;dZembU^YV6x%V%OEem#o**lf1nm=ef zt}DcsW6_;aJ4)euN>8vXtS*aOsKt!GiD9Y8c`kh=8Cg`Cdas%DM_N1)8Ufc~ zVOzOUPuYk(b`Wl555W7>yC;5+M+ahE(mQ$5X?JBQ)fuYAHiBE&@_Zt;Tgkp6ESymf zk}lEip$JOwpT%KrLpptReBk0AcF%`5n`+SvsH6fLvsxiq7}q1^!@kA!4(mh?z@LEQ zCB^`h7-oQ2XB-_(7o0oXox5}HgUJg$Dz&nx885`)Ld^V4CWC~K!fRg^yAJ9#X4*BX z&wQyAvpxD_e6YbhICxEH(EL|&{HLp7k*~PjtyeUR@S_YB90cAXAu;cQmL|bwD1z&i z2R_}*jL(x3+Y8;R1*Phux1~n9v7xA*Y%N@mUcu&*N+L99RGR|rM{DqnweV}B*WF&N$ z8zOf{z>lLs7R`uNG^6>)T20bu9J=B>TeD-9N{^xM*ny|U&m*`MehB56I8Sj!zokSPm!3a z3x=d5C$?gVt6L^MxT5>v+qH>fV(PaDG&x$+_p~2{t931?Q@=|0{%tL`9)7uyvDx%N z)#!rLB0IS_Phi?6O0-qdt$fYAQb+_V{@2QVp^@iHhL=?Cj`Gt<7Im*7;zmVIRq%XB zx~Zg*(Ji?=<@6|nGHDa#2iC-7hx=16r6Oqff_`I<;N!rCiSqz9zk`h~1)b)u4C}^# z6}Jnel_atDVA9j@X9ETtG?YF+#WnD>EMF}R`XFWdS9{Qb(T{sG9j#06miG_MjyLr4 z>4yx3j+yJbtC~kcGl9oFkCd91%X}zcdN0-Th19r_iD$(&ypO>rf9g_V{ZQYU45JoB z;{4()J5xVt8+>r|VL;G|KW$I){f2kJJLQkKH;(aZY-7EkA zG)uy%`9@L~@?{|?sNF1V``7Bk#Jw(I(whmG+z7txyqt7n&QVs(4#=!nO$ zJ?PLcPCB)^gE_Yowtl$Eq<2i!W-0_9(}S5)@qU}CK<>zK{vT?ch4X0f)JBK48Wpqx z`DuPdv2|J2aH+oouzh*EiYqCJcyQar~O4gZW z?pK%kCB8;e$ezUSD!F;E%953VPPCozn%SzC@Dw6{C+RgO)pp13$Ldm3ZpXiWTP1GQ ziY}tMTML9_lFQV0IYa+qS{4d(6WyHgt0}=;N!hSHid0*zim=&6c;K_F>4Fb4`JwAveAV%jW=8 z5uRjC%NHH5s_?t&$ytDhMn8%YU?Vs9Jz~$Jg)xa)*t0(CN83R|>;OQ>=E6BQH<9qM z2LJVggVNuvPPUilncJ;x>Wc<1Cl^6 z1LW1xvfgpH(2zx5j6Yry3-ZeuS2lvQt}W%P@xAB^6&mtp&ojRsNxyCTwkW(7P&Q*MEe& zG#%_6Z6pFhWC0=nwjwL9JicSR)^*TzdVX;eZ<(;Uy}iA<+G~L3dhrqq0oGZd|L72D z&n*W70*;PsBuNUI?QLJ?*-|QPO*@nPZ7VyD{xPl(P6Yp3?C?K$eU?)5f$3yxwF391 zB@@kDMfnZK8ws0;TFbzd8cSWCq9 zoL8;Z2nfrg8;@wEBD50R3f2o_oa zvRDz6{x0q0lUY^45RUa!=T9O(}y`jEJ6{UcTK_xijO&YJS>-1NE*bI<~jfIn2=~#}4Sh zz8$*EL$RYY&48U-lQnVDHCRt2fQ6Gg93G>a_;^!M=S5H2!O|}m8SE6& zqt;JjI)&~((~o*`!ETG3KrP-W6XxGWSKKP-tE)E+sd&KwRR&O7=*}?HbwkTBjMdDB zJRvILNVi`+F@(<%#(yr1{-%HT;lR;;ZzTo)-`ej!(am9G#NyUU^#1-X(^j+Op@!QG zi?GNNIpB%r_X2US=g$NkE+msLqA5>*7e31-EI}0AIq<;b8ga~Xb*NCSVhorsV^E~W z@1_#u%Qlla*I<^u7^7tT zfJb-VBqNH9aO5H}`owsTSLBuh6FgF?{s+om@a@_+Avyh<>0^v*gyeyoOq&2XzAINd zzYIaQV@rkj1WQS>5Wg?23FA$fW4^KI6vE$ti@L`}GJQD_Fwwi^Z?@jF!`y%*k@t+l zw3%6NnO`byZW^ba7iwJprA(_4J3gG5p)UCb?QSr9G=WsVwoaeW-&ZKkp2vraE$Q9A zdvJ*tnt;kVMY}l=X#-Bo7kI4)?*Qn+0QN}{*3r}o7FOJBl03mOX$qF2{cq6>d3Nvb zVNB`fU%$g6oGvE?0Qmjy&mA{6RV(8{#Y-UcU^KCY$74dEZQK6Bs#}NKyG%FDAk9&O zs%_tc<9`jdhX(3$;C`nK(QGai$7%sNhqY*hn%8|XHf`Lc zpN&FWBbJLIrcX2!`(8U`nYe~FISAL{oH|)-;=h(G+v+4>NKKT-#hBFuR@9VOU)2^` zUzs$Vd_2iQf}!p9p{a<#Rb8YW44;|<&z%$x$VX#z{la_!RV2| zwhq_kGJ1mo-Dt9>^>0Y1{ZiUkYfha1@u5Of4_u$4RQv^wqCF7092J_s8I7DTii;42 zI7K?w#mULy>r6gL;hKEb57u)nX`!+JfN=fqfWx1?I*qxxxfeqI7qfr=HiyPg4?;=> zO4#AysDcsn=~ZXVnaa4e#r4zO%&mCaoObTw<=uk|q`l{UJ3;No|1^VUX7`qIkB_?C z^oOFOQ}PA5;+EOu>ADz9p`Ewuj7I$WbZac{of1!Xpy!6~=d9$1Y1kIl#)B*TqKf4> z+o5De>~}Krv8h=>z-l=n3c?AA%r5`a%qjG4lAZFhL`pzcO~SSO_{38Q;d7$_dq~Sq z&W4Z4FyP;fUX~?a^lJspBgszA{84`7*tp~U3q9Gl-Y8xog4X>iu)iDCa7Tx91@aYT zc|zy0B$r8zWN+$xYY^G5BBMoLEJH!=^v7Ayr~iUQ0M4%{I_^jJ#cgg6ynN~%Pk7$99IbQx zoc{Fn1^FLs>`djy2mc$h6%#`_rcMqmAk#mfV0PGxA2pB_UL;q8OWqEqBbb&HRVSsi zn|2=AkB#A4de0y))8?xEo9`3TA_h?*VajX6l(yREU-;`m{6_u|n>DHi3QBe4c{}0Z z+~Dl9+-`tIT{_G)P8Bw6%apyK43&~g3Vlucp{}4wPax=XuHg*8(2IR_X*07o1@a?!96$KFI0t~Nf+!HM-lQft+JWmsm z$YZD08{u6KTH{rHCtm9W`ua7Xad!+3z1l<|i<=sO)9uHHt+D%4>&FX2DBc0Wtg=*o z({qESf|TnzT~hi;pmfnk)Y#_t^W*BynN+HlQz&0&8+s9jtm^q)Vc%t#1gRUEMQ`;5tuk=%7BBWx_q=^iPY|oRDPU;$0QqFT=G+TpP zl~iu}bWGU<{ln)Pbd?+*rky)i1Vi1VLgW&ax-JpfH+^sVy9miQL{CcJu{C4;d;$L2 z=?!EYgEhusDF?#k{`_al59~lnm!77L*S&r{J$|kqrqpX0TIl?t?D^=8G@};5OFQEH z4oBlV5#vDNT-wJP70-^2!+(rL6PkR-5ktZB++ZTi*CK>( z7zi76DSDs6?zEklIN9=OZSmlQ9d7^Ho$3eLRFt}sJSq0Z=O@EC3n6;MBz%4S8=u*0 z(G>~g@QJ+IScSNL~Jfd!&6FY|!UE3JfyLJa{3{H_~>+Qu6!=nD&EbRWK9*ILSw&&J8g-=OvTWRxa zH-_GLCET+R_jXe1(KGBba<>o|rqvDQIBm#Er-*D7x5Wt+lm8!?sHoq)AO5z_khr+G z0HGj2VejJ3ltuoF`M|wDNBMj=BvF~epTi&5%PbzQ9x-q8lSdCq7;Ll9)Put@pBN+Q z_VvJnz14}pyyc%eGnyLK=w8vD-u$cK#m^*LWUXAC#Zu+8=bkQ)q^TPCsXol{v{gfJ zt|EaCQQ=pftrpyq-=f;ro-}^N=Au%6`>%FsXpe6ANzO{Dxn;54lqI1|+{N%J^8DHj zU7VzG!d5P%llH>Y5?%0qPy;lH1rgO&nA-uKd@m}te&#!w5c!5`t@U5Qu0Brr(8AN8 zXC8+%RAX_{<<^}6$BZ}6bWTM{CVRXRC>z9vM5SaYe$LM)*C46lKMnZItYo!R_^Ef{ zsc*MenBLQL39cwE04raE`rC!$lFE`V z=b881=rg%2erao~q2z%fwz^6|Y&3MBf8xc2X72GzclgLq$IExM%)kgGOb(H;7aAEwx-a{B>(#Em$Tb*>u*G>7Td5{1%kQ?HF8x&qy z?$vYGhMVevk^+AZmx-%g*K;jepLz;LkzScgh9nIhpO$z3l=G{brmJFyzB-?@z}uyM zu+7x?(OuEo)m7m9E2`+3X2`ib;&)m>ytob#RP(L%0wIt4L{UAa4sJw*wGNnv#A zljyvB0Z6r@oDBjk!Rx{=Y3{!BOttQEFXmIxRK-AwT-~jPjEPl|P}cT603c7RyzB48 z6>&loAVeGl_#J7wUO|)&@!s4zyq~k){>wWaugxx2=Wh?*X=4F-;%7k@eu?)uD~IR$$VYc#)FMkd1U_`wc|aN`-L4qJdJ4r+U&; z$&*#Vt=DgRW}H`f{h?HLyDctM87Y~j*VWc5gk<~mt?PS+$vB6<)N0a_HA1oMC2vVm z5~{v%GcRhx?!Jl;8GU9gFYzc0@}&QU5LWboyf}i75srjxG*5MO#P+Ilr_se^X>G-= z6!Dm6oOZER4*0KHmk4okL|N)oH_IE5nd_z(UC0sLP5D}ZexrJxKLm)9&+9oU&R(Xt z^q(tVChYUOZX(y!&hOLyGczWqfc$WPH~|3I+Dgy5!^X!C;DbW>e((O-$cLPkdmr%6 z>r1cv79)P5pR71q^y{ym|MBm%314QAx~Sps&={*k%^~rX52&>~eSIq#6AWgq=6W1d zbp2?dK|xbknM1HkNN;&HOc~*1r4Kr_74|(O`KLKZ`}fp zek?AgSvR*y=^!;VL@m~*R}VBa0)N9UkVqLnz1xre9j#txLu_q`bp-hho+t+nWk#GH zS0sK~pqwETM@{lpuX{GVUTzbZyzI7q<&=l*EY|gI+?^$0#isi$v&l5>F%v|K-7|t? z_=}&kiN~EvSmUbr1G~98xe#)<@@F3K1NVqJ!J_v~Ko#IbhjcS?t@1bkA3;E$ zDFnjg1=F5gpu#f<;>7xdH%7F2ozVo|j3nerMJ%x3VIH5#mDdNWK@;385TvE04KPSM zwXHz{aDf2qT;lA@`x}Sv1e`8p?Brgla3mdU?Nz4TDgCJRrGJ4DdC(i*Ky7YQg+*JW z6~vjw`?iVNc8qrVrnAS>cUQOn9nu@la-r9cHsa~$obe?^c`%+Pt?ka@YBNZ2TmQ+* zmod!NE7v<=&!oD$`|U1&l~jA7?Vr0~-JD}z$?QgJT3r~H#mE?INsHb;o5%r+%NVW@ zktv$MSJ#Jty$ABzs(Y9DoSqe?zwdi>wBAk_UqM*2hFznz$;9D8_XtYgv}TSerZ1t( zG+y8Or;RcWeKlpgt;N{9G3uK5Do|p>Zk$j&MNEFMw^*qB(uyV~p9@D4>R^K4G#qy!e@F#ld?{_fDx^b?|TFctBIF z|Ah+kH#MN`>P@%6P4x1wvI0Y7Ibx)cOH>~WU*A1qZWk|EU#_s#7$(*{g(Z1ZG;+lu zFn)+1wGi9(0mrhA(&t4iWrrqtg@Ok!#juY-}xZ_IwR zpZBPC_81+yFQV#U2ORI>;3PfBh>J<-r6vR#jhcrhyuN-GSFm(LInwZwv99NLm43>m z+t{ZsZ${D*d0r*;a}`0@1rf>2y~VQzBBC7uxa(^kb{QYo?`%lxLUbM&@U|2UTJ__- zk<@Q_x2(HbkM@QxEqxPevVy|Cj1zf6t%==(Ky6N&&=vMx z+b^cq+;*UBS|bwxK3xEyF11Y*0D!aV7lI>?5qYjlDx&Af{@ek>?(dG+4s$`Ha2A_1 zys*)8xQZ`bbdli_s=Lw=(A<7+-SpVQI#dKx;<22~O0BMj^^(M-LNw#=-rQoz){9MC zTpMT{rBi2?{dCgfQmkKfeUHXsKx^tBios)3FB1pH{*$^Xy5=fMqSUaHQc&`>rl&Wp zEr+o@^XTdokNDct<}Fc$f!VcSwGnq57Fm#a+bFBwMr~PZbLPTn-+E`v1(lQejRoSB zT7%}68O-g~wGi*+zY9aEAMH`1ut!TNvEuRsizH=Z;}#IK47NTVw(>M_Ow+cXa_w^? zut^nFrMg*Ykk}1CSi}jRZ5+DY-KN_T+4J`YJmkT{i)nHveC%@-8jJw|C?HgvD^$zb z&Xg;JU6#cffY=|3X#{CSBqi1zyf4m6c6M=(Py%v?U9r#eCNo z3&Zv&6{EVtBCn>Rp8Ajmsokz(W?Oj%C==F3LxY{n3Tj|Rw?N`Z*w~sXr!sV0-N03@ z8qjcn9!;4Q7b`d;mxx(Z6grHDXuo_=nF`ZZx1myj3*k@A;C|CS9rT-Xd~>ftaG-uVqzqA~ldQBHwsP#-a z6NWBM_qufdyXcDt1K{|FL}Ad`_mXTgp3}CjapeDDa|DnLko1II7Q~_i08xO4@wpm~ z*L!t7x11B3pX(B%E9~6tHR6x!)ezm9m@s)O5%SgV-@k`7dw(a~A4*){T+3A1>1}YE zapUIUR$k(^A3^-tO>9e&1|45tUwQOqr z=WxBY?Xub=$Fx6HRXs&D;X*>L`q0Z@kc-iHBj~DC{>8ktNQ1&Q?-Flwj}Z&>*d<(!hMlYUw)_b_egpF|%>Di(PeV6t!X0Kg9_pmkXWc2XXZ92YI3frisfi@z=G zLI1T$ypi?{4*br`iV}}DLW_@0{0soV6#`(*5tcWG4oWFpBGZ8{RcULL!K+LSCzqIk zp@prj+dlmyNy%_7G~?%zz!IY}{uFAdQ*VPhvG)jP z0fZ8(1!`|64(@z#rmtF!X(av7MG7~}e#*axb{jtr;woh)q&Ok7hHKN!#d}3Oub?72 zmF7%7q?~E2exP!p&g>1+^P3%a+AH*r=Mjp7ftL<I%0%HMwjEWL>Nl~V%$!?O(K3~m$YY}H^=!NGQU4e$(pK? zOdB0e6_8ZvE_y=%OFXbqlq|YYcR7|v7nRVozAT5al)f4sEj{;Lq0%9g6%`Pajh_Pm z(B^03V^u4DfR=iYZvY?IJn@RArYO)ia3rV7bY#L=dCyzTbOOqq4$#`!@-qzF+gd_G zEeuChGCVsDcNyy~+!1`?;RdSHfqPwPB@A5o5`mbRs@>u|PD4GLQhTM z%7l*dG~!?xIx=U?9d!ABbf&n+MTIw7D{iLZ95Ob4*exbSze$J1&%aNp zZ|1;zy&|%==@%a?T4XNH-}bqaRn>#EgQJ7WMi74)OpPoEpZAcez5fgwiZe5pGDau5 zV4?;kUVW<3FCF2%gt)2*OCmL-jooA=Qz$?iD$Wkz18DJc>F{ZXupe&j1(p_|Ki&%p z@_8vbS}*9@vdP@rJG01i$>InVFaW@9<1NHAf?swBqZi&PH!U8}H9JTWWLcu%sRFdE z#&Pc$3(%zbqbm-o6xE9oCq5JO+Ioj4)X!buaguF$D|sQm`0LwB!LuHxxIcK^Y;%qq z&by&wsEbt@xOoFwyu7AlY+khuW);jF%xzx_k}};_jeB}b1T*{JffFSwG!Y;lvZc& zi;11yr=?bpeIP4OFDRe4|58f?ad#US@MqTf%RYAb%q|vYW!MRutLtq`EVg@`I1Vdp z%3k8C_mg}LGUo+hI}_|HLpH^RE`V=UVM`Q+nvgZAMT4$s+33*>=0UHMmwlvS`YzY zA(#Y`BOvLSx#3$?jxrA!aXDB0r;pN^+xx04c*z`4P4!aiKdM0r9u%_@S)>LnCcWW^|xV>K<9lx$% zoO|G{xf3T;JAHVhU^{J%+q*G~*6&FH>7sVRoZjuWTFXZwTVT=1`w(Tv{R(PZ+}Y`p z($Z3^#58L{eBxSY$fWjNDY@l@%miiiqNT($E1xzxX@*j+{Z|?2COuSLZ^VEqYP^^0 zu;mbLGksO;i=+*ED$M|lg@7eI_YZd52}5=5M;`oDNqznK8}3)KTE?!%Iix8t;I9XJ z1l~t2K_xGuVuB90cTQd^9kzxedKT(KV?qrLYmmE&WHiKZc6iUbCL*Ht0aOx!OuYOi z;yd}%#$iAZ{(pgrVrQMT>uLa2r^iB>A&=ApeRHM2MdAw^X_yBbN~~8Txs|?F^x*at zKsq4LfbEpl@W$?zSO*0*vi97b-b?RjZ~Ly37N1*bePgGA6;_p-=(dBy2HP21Sh=e1 zPse6!L#zB-?-dW4rt#&42=l&~|9cD2#jvVT65hg2)d~U9nO{I@b2m>_+&#OLJWBPP zz3O^FH`AyE^8U0@lfh_$p>E@?GH{Y&S5Dp7X2N?B2lMl_CI4|9cA;x)ZKH%B61-Ww#8=)=TiYK9a^sHJK6XruvXR-}c5=Jdfz^aY+Rv%gC zqjq*;Hf>StgxadHB+7=loO6pJ_hG=kinsg{fx|<0=c{nNFZ+wWnJ{l7ny-i z>v7K6cK4v$#&DT^2lYZhIHQH^IF6q-@# z!^=~sqsyI?a#9EZL)ZXWI$B}@)HEdVCV@&^_<$^RZhVqF?iRE@RZS~r676tQ#EZt7 zRi*8fMMu;H$IiA-WP$d22?J;Jx5{p4>@+wOh-px;0u(8tgiSW z+E;12r*84=9&-P8KYG!4VSt;^*_<0ala04y%OU(y`^cJkV}50-jOS2s$7dE3){L2T zcirzOiSHH$!;UF=fcLf#F_vT}JRQ$Us7ye)=JjgtB-Q##y4lA^+DGtrTK-UKAU3BT&SxB%I`D^o~G0AT}fTn(Xi zj(Q84s%cr>)5v(iRwQ$2YW56x1*+@Y8IHz;bEsZrE9?;}GXi20gPJq%k=3CY%gR3J zL+E`3oNo2}+*Zj}gvc9FQgi{&gqEYJTSV@!(E9o<-2%;BX#OV6#g$z0#EZ@=pKib3 zx?jO>8h$zbXjdBFx5+09wCoD`4V3s z)F-)R?*crLL%9ECy^gF?tHq=(9T9h{oVpJ0t{LWZXF#zjVkxjO4aB&0-JXlz3lq zKI;AUZpd<`4YoZ935=~U&Z`&CGixNPKwKS}X)RZmEp=kH4p)rI?xB#YFkCr=oX8yD zd8_cr_wWlT?hG}uEf+?>e5rsy1Sm$PDtZ|E4})=s2V<|$vtXNsjP^&T zZ!5_FD#^KuO6f|L^xD_^LWP`K##^XQg?;GM0`|T0FJ?U=vlDxWGI&Y{`&rwJ?G-A9 zO^}!D1=_Z}c1ZOT_W-1;rQqkEb8NS$(TCIIE=|+P`doUumotMDbgea`Lh>Gxl#Z+` z_p;1|$9=E*c&%GTN|$0Mjm&l2jq`huaL?k9iN6QPe)X5R5{n318)T+f zlng6ai5ld%sq>7G<)wet+VWmnt^=?701Q5s#%aXy;y8Z(2DZY^03IXmM71P_(FL>C zOh>-ooP=f>aJZu!=I@me;`}NgCZ_MCBCR8BttUF)8vt<9Ml5e3r{U5U5MLLCX7cag zvmMBm^Wn6<(oT93O%2}^YX9$FBfq&V3v>NbdXbn*zmN%bS&7H5P4#3yqLtXy zQlDBdA=T+JYO4vP@zg^N-1+4UrT^-_G$H?Z--LS06TMza{yvo~&(VyA(vDxXlN(ib zu{)pl!DtvbF#emw9J!`o!5{%sSp9TVxf?_@Bh={$+jU8i5r8>4OxWL?!?p9?WJf)I&qtW0SyMhX`6%4*0 zB;Zs5c(5`8;$;Nq!7xcCYA#-`O-}M;|G)uZGLRf?Fx6fY1VO8A%U-s!7Zv^Qu3tKP zqS~Sww;IKRrJayd2sCDPcDlpPF>Kq&wr3C%E#qtLy$o#H z4vwZL#;#@CS}!;`4{)cfz|;w;x8&I5OmVpn$VEnhtod|?th>vYmvYgb-wCv_p7DH& zSwdxPMQIu}`NyWzKUvr%wwWmhfOe=Zq1oRkE0>mdD|QN;n>Tc)s;YgxAw}f06IrI}eP%O0j2to@#$I5?>?lVmJ2ccS z0|sNFUw}oc67nthtE&Ci089n*D$6l3;r;}i8_rWe*XJg_$ihda-#GdU7!IaIc@j<`v z?i!f>dzZ=Klb`%)lzKnf1gZ$UiP(57bar<;(d4KJTKc<2us&5qpVzoDY@J_h1YX1jC+KlO}&0(NT;;lK(MQ<(yHarDJ9mm~1{Q+Eqria*x= zYX4#{!+2;`Hlw@2m2Kzt4%C!W#bEqt^;j%{`RdPpO;X-N#`NWC=c$ooeY<|nen(F0MK z%PXtbtTES%`m67WpH>{HSh7*dHG*K^H`@nOR&pDqLC=$dCW)A(a=fXg2kMDBf zIjRSRw7NjmakS;LcyuU4D|y=(i19YPF?^LgIB;4+EJ@Aw)*@s&DW|1FVw2IN?saJ6 z(5J`~_t9Dc#J#4J-xpRR95b?N z36$?5p7ER*g}?K3A^8TXaV?c?e)anb;<`ymy>~Dc2Nu~uJw>Bx%s1l2w(n09Oe9@i zion|~+z*{1Z-YWW+0B|JO>bI%d$|r&TTaer97W&XS&Rb!TF-baCWY06O`C${&c*@w zv0ASla`roJ?zHmw@v((T&MR(i3F<8)4p;plgNnDPLC`oBB;9|487@V8TP)bb641Z@ z**>Yb4V=5sR`JC=N3VmoSb;a!d%+*YJ?$jlo+yYM&O#M@7Sk|F_jkzUKMOxb@*~A+ zRtl6qiGahSt(wD2gqi{}WJ4YaMc*Ug+GjwZG!T0^XbQ?u8f=4&U5x+T1!JBpOBLwr z*KKTho1aV>XYR2GPV9}i&l}Fi`5&^fy%FjxsyV12ph)jL`D2$!3CnM}kCS6Fc^Py% z78Gkt6J$1@s1iqBFgtr#_3Y~aq{&JpEHl;SHyJ$D?@N3;ZOIni;)eHII6;t^-OQ1{ z#_L^$`9}mDwN;pSP4cg;JfmtB2R8tHSa49>3Q<*26atIMOK zV+WtcCes4|7dAsbh-)m5=g_9t#{JsML? z=JJF%`@1^bwF8Ih=_Y%!KQwG!&CZOb{x-}CF|M%qJ}jBS)@T?f_iTA_uc6{q1lr%; zA?1&;`pmfDv!p<-!DpNTY%9FAE&&~M`5G!fL(Tov}5qjfUT;Ki1n@;7hY~PSXkv>4sG}4UxtReSR7kUM z;Xuoj+aM$d2v90n9hwD`gXX#ET|{Q5Moj9Kr_RF2RJ~Mowo|c*=Np{zo8k??Q^M>| z3=i!;G9S$s8GZMw*Mg~cdE4rHgVlU@gyuI?M?*H|lY_EY%u~EP>mPg8S{h-hD8wYY zJ{CEqhu@)$-gso1SPenMIlIqVW1NB5+j6Q%*%jNn+ks-U&eAP>jc3J$0ne;YVeF=kTso5)L33WS@RhGU zLw+*Umlqe#f!nvI(=;>Cq2c>5CP~)j@)v8<^*|4IAJ_bt{dx`xM+ zuY-KlzuQ}Co$^W1vLV}>HSg(3$VZKwlhTwuoKpbi<5`~?d(PSxCA+tQlO6E-6ce)o zO&lFMjuh>-&dD&%=M>qi*67B9M9@ZDqa+`)t--E}rGu|;odb~{LKZ87&GYwh9)zH& zq3QN~_D!?nzl;4v#1ronW@9HyvjFmw_DwQ~b3$}Pc)#TT`j>za{K-8et%B{$A``AB zZbt}^+uX8ou2-AqqV$5IU~c>7@-j-;+Lps~K?Gb}9Gv34$)3;wE$fNa zvy|zV5_fW@U7aA|AIti%hR^@{aF~8~YNnwtzox#A@#!X>#!|a=!+l&9DQJJ>c#h&f zcfeaduZT~^jx&phH{g!FX)2zOlF0tC${C+;yB%7i^z$s1+-YP^-H(h3Kl#gnf86fd z(+WBIV|V3dpfQT3j|ZM*7kaYhXZoHQ6@1inN*5Y5UQ)bA$&?c7f+{);c*vU>pNrSb zZ^17X8`5!e7XYJ~W|k@B6)GlX%L~^4jdfP$6bkjFRAY-vWO%B>>eIH;yas)N`V9lu zO&3m-#|+xMp1!ykgVjt-I;Z$I!+K_s#v#!^IUZQEcuLu;k^Itr!WZ4n%q0FF@K!z$g~hD?VK zHYfQRsC-K439!JRBBCWC)GK=?dUT!U4d)^=oz|_|GRtjOLeD54m`6qS_baYk`kOBY z;u^G%tpf}i@x;0X3t`Jz|8q(M^aylk$uXR5fGEBvnylQKm+kU27bZH>l zLlkG4e&L&0Op(Wz;p-E0eRI|(FH7uxjA5WUjfDSU zK(5pSGa-40ar*d;a*i1~n7B9z0l*9})dZL7?*RwmG5+sF02HEqGYT2Q)=1;v5s^dh zKr>gjQ~OHnPia7zN>KB~?L!y@^d`6&VF-eE&mOLVSIy&Kd#9%!CAK$yk;&Mkq|_f( zh)`eOe5VvyWrnI}m<2fc42X>p3&-FXH<1dz)q;DR2wU{RYriHf_H2Kx+-KQhGyGc+ zCnj1wrW#ougAKv%hNQQ9K-1J5Bp5?|+uGoLjZd=KWKB+j)Y&OtwJ_xw1RF0%32t<{ zRZ?2bPo?T?Ry=hHSxCPWVOV_sEA^Xr$%m>TKC!XM&OzM|Z0RCxRN$WU?Lo_ciOx~B zxNqE#pF!oLe@1hv&Ps4ZQ?_cn<#bXvEEul5&r*u6A*M*=dOgq3dp zhM3pA@W`bRQ}Zwcw^2pTvzf(J3mNLMNSQr_Ri`l^BO;>s1qlG16aYrR6at=oj6517 z0C1A?SOS#OFw&-K$@OaAN=H4mHZv!`)QKDxl@FiInBimx_T>JU;7G;g(w^p2{!s%f zA>rW>Lj{J*Hm7HqFQ7Na2km9$TvU!{f44GysdyF}Xh+xPtud&Ze_#=j4>GrZsW{Fk zDXELNOC(MzNXIr04+f-Y<$mkIK7W61=#ETOh*&wo_)6oPg?Be4mkmkcrZijvku3#N zWcH;6r}yx(weoDynhSkY@OC{h%NQN&Q2~ghs5TlnGBY;=D=o5-1q`oZ3tALFIlNx~ zJ>-7nQi9pv|0c=+;V}LC(;)UuI;#UES*S~A`uvGY*6rfH^J5erj1|UbT#D3YE>>trO8_FNazi z2%iD3cXeD)IPzp&I+I-5{#KqXWbxw2irM-t>*;ng7^Fby+xqj?Xu@%(H)!~JYpcJ4 z(;}y_>S*H-^$!7jF|%@mg5peXI?Jo+0Y6DUafk_K&^J^%tPrnp#>F?Ptl^neTTH$ND44r=7c5U#B z9%nh#n=?0Y)}TtUDoZ-hS0<-t6z+L)52wQwOB#RAz&R*Ceay(JD1G`^{d+sU0clRz zsb~!+PqNqKLbL1&#j@{?bdwVo<=M7d%nc$T@)vExbXld-!GO}B0&!!m1{H+WXyE1o z-SRfz31iPbr;Z1+yqraAJFAQ^d~DB!0+*2Dt|wMHS_V! zDABXXG~7Nu7|S&E%*mHau3cJQgd^JSufqk>5y0EUfu_l+5g#gnCVaH$eDtm{0=Xde zG-HM(N$ADPkcXDFgSH1IVbe_=T0tPMzEy=!XFKc=b2?Qm#A|*(EjBa{zIxKg6E}gO z$RiVhIynymW;&2HjG_u|zwUMnh3HOw-hNP1=}K;zbiWNzqKOIF*QQC=?Nf~ zsA8~h=&8pkRh>`V7zq#VyD2z<c)qW=gun3Ls{zC|RN;knh>+}|AR zB)HqE7nx?GirG7q$ZI8R@!griQBYN2emk42HhSHr8823`9Kb;xSdnJ~Da{jiM%!WN zMuFj0Ru3!xH%7BO2Ba5j%lwc+v_f|h&dUL!_y*g9=I{2A7?@VHRBcVh^#Tt6?!Zs$lhz+KXn-r{iYB|aN_WJLM?9;I> z%pe-sB4^oIDUjAI$>5})KXY4-SC-(JsU7d9YwFH)ZBw4`xts{^mmzBVtd zWhiD^k~&I`QTj?EHt-`q*HwG+WR~cd>&uOxA9_;tH1=KseB_zdaix)(nA@8_R>Zi1 zbP~WA`xiBxM}rw#+{VrQaCue5V0b7vk)E=f%YtnMj(F1DJ~V7|e7-vIY=0M2zQfj9 z)q~uJAueQF?OLMl?YGJml}dCboS{|+UfF~^t-xEqSs3`xrh#nh{_-@2Pk>EqY~yP_ zxW(okqd{2#Xd|1qi~;Xwyibavw{c>~9_ogMMwg6i9&!Q^b3q|rGaPd% zCrxlR~9#q|VfOl5H!=$BbU``>k>c&y6?Y6`tsH>{Ve{XJqv(*FciBh-E8 z`fbLBzwsjp`6hRM&hmOCs=(ewOQG+D64){i!r@Ej>gxKLFS9kbh4R}RH(82HnX|b~ zB4cE55dAuz5E?zGPavapCtF$AU{B~0@9XXpDMv1g&r_qO1> zzV~OoayH1e0Tf6g*2o~Db-3j zptdZ6uuf52E)_dIUECxoCbClo190*HV1VS_n9$H%HMXbj&P~6Wl zEev~q#yGY))G*GG|L*)+{dhFQbmGLeljf_rJtk{rXJo zo^H)0@Sdr;hg-lKPT-i;KXbSWdk%4SNDn5hOw0M!w0GRsuUgvkxV`jUbnw!!F~B4! zRC8T?etv%D^wZsw0;dXd%$&jV968HzvpJmlkZk|dh2+3K`$VR+eI#clnOt#wvMO5- z{lml~KPs4n8xEG*(%E2=l7W?c@}S6VFfw!vdUs|U9bEv=SD&7I=O1 zloX|r&h)0#-+W6*wr4|c&!@Ro!OfzhyhkncQ+KG5@_N4o)o~D2XwZ+f8bQYcggZRs zEM3ZBI+pO4vsK%1mab!Uk436NQI?Cba#Pp78>_eBdM&}P73lLf*YNi{DWW#McF zcF6Uz-Q(8j*QQ_Bs8^LXQwteo(w${3v_d4O`;#`lJDYeFEgM^%gh;)H34Nf{FvDly zJ_SO2cJ9zX)Rp~n@tLav(OE<$6l!_W>{1Yy&hNf_1Y2x2a;O}+#&mW_yy9HD}~YLr#HX9D*}Pa zvrt8S`n@|a$JuIS9SjDUxL8L_Q=N$nD_mYf+Gh8rJ8%&R)|cc}Ei;JmlgU6{KrE=T zcy{jog;)p32tEc|4^2-1F^D&oASLW|=xUP(V5wiER=(vjZB0tHJUlX&cgje6o76is zBi~U|Dm^!$;~UI4nazQ^m8GqPkaKvrUOO`7}aG8>8R zbt)3_bv!N`l$UL6*5o-`cr*r`>HQtG@K)4zkpnoEB}96aBjcFXWO<`O_z7}z%sfQB ztHyeuspGAXDNFYTzTYGv&l06$rb8c4=_maDrkDzr+n*xC%BX2`0EZ5+sc`W}+LcHR zW`^I~%Iq^rq&o{H$$PVqFi@N9(zpcN-1+&mw~LbCqSZn#Fp_EBVc^A!>F1-9BxwM_ zx$`ZIREmk{;!H+;y~VZ_n`D0YHOgx64)sZv-V`Tax9h!v!u`c%&{6O8zp=~dd3`Eq z>Fw!AkAjj7iweQQIqEb7dA4)7{MdHz%hg0C1aUlhD!JJA2!6fm3yf}_8soS_ocZL( z)&!U)F?=5}KQ1W*FM6_PzLfvPvPJuSDyOn@Otx#u926pBWr&wQ^*4DmCc4jR#u8R} z%H-cNPkXfQ!rST;}}XZQkOO_hnll zviM8djM19j%Lm>P;K;$Vw^jJB@=vsMl{w=|lXRw1Hr;vTPv(UdonT_4$mSDG8y}z< z7Tb0|BULI+gShIqa8%>~qFuiJq8N8>_{f|qsSh2t@iR8yZlYt||1@IOc;Xd169g8S z%@pbCz}Y0AT@d$PAP0O7Wo2dhL}ma^+;jF1L!a_@mO7Vog0@=20wLo}7g36D-r$0j=S5P7@%2T$s@thjJ`RhJnc|i8$WPJT(0Mn%a3S@Q@7{-Ux}@){;=-J z6AYUPd0BHO1wa1nCdC9v8;A@IBsH~f5TeRqC#JR&sTUC;;;QsB%Tcd?U^aP7D6m0j zs6q#Cuac`6GnvTgqL*S}^}jeD8MD0~^EKk}Hp`G^WDk+x2?VJGS#7^d4Zcx*2FWo#8mC8`~Bb@+p0B45p z4qHAYYmdQ$-Q$@7xZ1A`ur~tv_YyKO#NdxcL!oaK6_@UAZIF4-bQtOOvoS(FZ}p4 z8>}u76@$yD0zX==Ny(K#R{6iW1&@m}1icLV*cGuxV@$@-5a6odd$=0t{;}n$`?Ryd zm*lGmk}J1(A69QcCzSnRnMwaXWq55h_+gc*ck=?&!}2?8WbcB31ez_h3GZN#sXl`V z^i_70>nqC)(}#=2v0R>Hw69_I_;Vs1SvP?4uVT#?7f0x%&%{aU3Me7X~$#i{f{(LC6KA9}J=MD!K@q*SHxb^Xpr;&y3GxZ82{hJ1a zPuD~aee2fo0Gt$K)FgQAKILT~<@Q4y4WN%Mk%WYk5D@!oci^z5LVJ`RpIUcy_u7TX z;|#WWu-{fL{4VTvSNbdyi*hf&2-qlbW^z=cVMszADOLg#* zXKLE=6sz)D#WnD8;_Kc7X8oBQQa(KKS#WJT+5)jv=}Yy1JRe~Gro~U^ITYv$U7yH`Y z+`RQ%=3U%!23ST#)3Ve9-=N~VfxRh#I7#B>`$FeCTes1zBhnN%A;I^A8ofKiW!04V z7UjM{RzeAOQ_S)=7ZB2g{Z!|CF$c{G0Y)aqu!r9`4H{|YDH==?i1GTWo>~oFpOV%= z?_{Z@&%KtnK7$vL=}J%>=n?ez)zbvgF$(LVLTJq9;|{l1uM5ajJKv^fk$mUN6P82| z+vtI#Gid-^vD5&(<=yLD&Q!ZdXvvQ7;&Xr~fQdTR3{8b_;2}xjo!@Pz@CAWD{BN&N z-r_h^`o4n}0>k0su_GniRwo^bZ*a3m%`cidALkm`~CK%VMX>4jI`MuYIB*Gu?6|dff ztaxf%PTpzqzRPu1x57_Gn1QrjlL&7Tc?ly9V;}Dg@6FWoXreZ7JG$Ao;GZDV97Vkw-0G{N(UBt5Ki{6SJQ|`6nv+MAg&=Xz@^J6i;HVfykGHlqJKF~o zDq|F%#|9=!6oJKfq@0!bBNPDkvaQ$l z)6FxQmB#GJ-p)wEC z#2I_n{Y^OtgIsKre`K=Ii?#oyasPKq=H&f1Q6_CZfevWY3&(t#56nI!x=i9hra!pn z1Lk)#nBc)XJuTi$T=>+SuVB+i{}wWHmBUW&)r$i?U2#sWT*+~JYf2Uz4$eiZ>yM6P z*(vyM#L3WX0j3RjA4d75e*ie##}+2GmlhO=`9g*HfWghs~4>JiS*yHeAqJu!CbEXV+zzC$$ysF_mCGc zi!u?;8KX4(neKcwa^vWI*fSWj$QUF^c{g`6G}W`EWLXP87r7lEAVL9}^epj)_Oa9z zcFs&`*}}aR`IVw7QnC~;neA0yem08=)TO2SiX_li#*OT>H+E?!;t*Cpgs&Ko~(7k)tdEX_UC7mUz2 zO39*5rO^XRi=F+Gq;7|;`j(q{(Sn-z8662BhAB^2nuA|Q+T|P6t+wpc z?gIiy3Vw)ZhGwGw0vfx&eTrDmCuM%)^nq)Rw|OHs7w7|qe^%G{o#OyH}dQ`Jo=AWlmZXm?Wy8W8oW-7D41jl z6XFdC>4@}u$7O*r?lSOu6daXUA={j|y13sziv`f=gpd*r^loxw{t+)M+E;$#Q$rmj z^)4YxMNLxbJ>4AZ!}XsW+8|T@rv>kpX?KC%;Ay{p2+Y=`qSgO4{W@ zgvYJ^+K{?9y|tD|T}LAEeFL>nKfVl^GAk5lTIgzg@D(A)hkouuzWlO^B|xg+^T~S& zQCS>GdnALxWa{C2x46n`LYfa+MX8H5g$jZFTi$wrSbCxAd@*7|2f`rHNTB>#sI;V& zknu{RGfp5O_u%>JUfi<5=zArsx1v8Z(k8RcyN`L$2XoR^9utkEUf5H@{t@l*<$#tw zFNQ_PZ0^I|ZP_VTAtIKXPirh{#C42B7214+m%;Bc?k@+vrOur7-G7rTf_b!<@?erk_ueW%d20&bw4zGzoU6_-EQ$UEPWaf*P z^y8lb0+;d2OB_&fZHu})epqxT*pn5cmh zZRyee`)kO~mP%eq%=Hy8YH;o9UmGy2-`cp~J-6Ptg`A`;ZaYeezAsuI#`LB@ipDN- zUCi~w*;s#B{R+?YjXAf+e-Sh_Hd$K1a>)Pc7!7zE@kofn0t1byX(IDJluBto0-RR6 zi;!51@bcSJGwn^0{ris_Fxol*%Cjdlp(7CjHx;%{#y<%&d(-I@zP_#Kn72^~p(5w9vCzBS!1 z_{8i`H8nY{D-(IFS2u0ybZjh4m#P0lnfsawcOntRGIuujG|jYGm%cA%dVl~qxlAOQ z0J4A|UMRA&gO9%LEwtJ0*gz&QtrHVdJ-ebo*QlRXxWX8nSbu)Ih^|%w%K4h5LYh8e z5lNxp(OWF^^<(GBI{1DDpvtL`7+LE90*B4}#k1kZ@5A=5U_cy{jgcefA9`m~BndCP zz$DZ7C0R-HBGU&r!ubU|r8VhjK($C@FS^-4;+Z5{f5$10OKjqUFIs8-NBG#!ra3MRb!>D$`SssAd8v2p`ovg$DwZN6eaQnGJ}QNkr9H3^UM5nHIfj zzgPPS;`&6??7nD|{4{<<&{QSUdy9lGKO^zS)2&W3ek`s`P8jEBk|o-i9mG;5&Z)-R zBx>w5$%mkn+lGK)TcF~R586$)N%a)u61<=7FDrzW1)g%0F*M4wl&`ZW7zlL(}@6}26g*!35ln)1GT1g0=BmHW2IQI6ce}WS{ zv2%BOD*6ML8`>&ahyK|Oy+@s*eyA8^fA4$u`Zd0&s7u;)h(+1z9=%bltN+01;W00P z-2mAyO$xUh1sCt?fZyzK8!WIZWT$oO^RP&!WiI@#`CP3(2ZPNtu%TD8>q0?C_jAs8#?PpNYP5%>@F71kcX0kB0jEzqhSk z-R;Uiwl%orN++hrI-~FUKj?GhPSou}AOfdn`#%M%i|d}xUMS*b zT)q-prXZpt)Yf!dK#crX5thcGh5H7CrsJ99998dXomQIAs?FZ6Po$(tkF-%zHdI=|&fDH!kdXkZ@!hu`dqZKFLq{fDB4GT@<98<4`|pAN{za<4ZI+#BZ%6uUCjWMCMp+QEY?rBm=yNd?qk6Vun zC}Ga{UmYI>#h-&MBhh*f!u|2(R&zJq2j-GKCcQbTkF*2JIodScRPY@7M#=E&VO53> z6g=4uL@BE8NDLWYON>pERFvU#j2LNVBvyD_t&KcQ8%dLL>2f#nbQ+KDX7yLioqo@~ zBEYkp^kbSMBl+q?%}-*-Ny`2FqYXt)9+?V$9_Q=!F6hwiz{?ZMv+?VZ3rNb?z}f0) zIP-cs1gyAu6NStQ#XY#wquTZ`U^LDlFDPQ82YlFK>-YS_X0r;e{?{Xk+<2(9w1=5c zt6?y3si2(T9Y_6zfm`r8gDy_*YIlYQPse-j4|kwYYcP=pHS;#53a0=&N>mC5ro4^{ z4Gc_d`94OWU|ZKw{lHLynUgl7(xN0bEfc_Gy!gpx8+GwDBX!)#ZK;&dqP|f>QGUHtGClPn|RO`z51Rx!V_hHiZtv1Tk%$4MX_rudtMsa{id*@5hG ze{Tg(?VV7217G879x?60uOE3H5XN(VC+ToYHCT%u`<`UOzP&?5-Mu)(W}YY1JVwl) zBwXFQ%1TI3@8WlJ>n}kMA}l1HBqX$Uk8iJteU~KK-pIVTY>=dG_K*fZrn5BVx42aY zj)|%BF76tlD$_#+-0!9Y4m6tKV5o2=)Dvo-nBH$;!)=B$hx(>Jg9=-uqH%GiGP#=U z2h(onpMdp4T^{Q3T`U?xZjmn##0UC7+7*q(z$ugsSzaA@DMN5tv*=9F*zay}KWo8h z=jx@DeCuhOW@cM`@E&$=I1A&~Jr@=|U80pGY8FEB`|8d_E8IJAq~Jp4Ly;#Ys>o+1 z&8&=ju1UT2)b|Co9&>DM<KXmjpj-@*9Rmm7efcJd_(AAkF^Uu4?M= zVWrh+GLae|U(3UUx&&$!^4Yi|Q8~v#mxzlSCw}fq{~5q_>M&_c(O?uW+V+M-`Ndt( zn^ccZ2+}3p?^|l4_cz5~cmo!lk4&kakHNrD>l`vxhS)OhuEa>j%>pt1r{2fW;f0%Db4>KXaA&&&#CJ{?jf2c`{10B2*vv(Fa@pp9JY#uLEwC&0A5IEA&L0UMz( zi*CmRMUuI2TssrKJtu9&A#Dx1XcdgWqCqEtGleOAwmoV42mVL8TYrM&&fJ=d9#4kO z{@Pp2oWeEP5mt)cYYp z+&tr@%UiXvZ{pwhKjTC&T#QKEgqwn>?!sP7!1eL< z6D_Mut7ftheL8VQ+4Ee#q{aL3_dNV9UeG&cO*~~dv!;5yAf0`3n&{>2mA5J$HJ_8N zLJSmNSpuIVf{~`WM0Lc3I=(=gMw|v3Ed&Yve=NXOb#g15|@v zX)cCG!K#|}!?#$tk1;hL?}x0GSZygD3o;y;7sg2)oKMNcSbG5YNUUx8p6wNhXkfVf z*4?G?&VR3UekjJ#vI0#%9v7J@)l@lHwL~bbq zs6WOgD$@cQL!nqdwSY6_KXa6N4z+vUGg!o77eRbbY~%je8L)qd12HOA+pg=fb>zXt z=p5qy{!=>YI-)OHF>xW$VBlw8`Qq4j$_!(d_uow3tz?**);0_CHV5|&x(7^5PrnXs zU38pwz#)4#6`W=girYO=v`undfG!km2&LCYQKHFrkM2_N@OZ7ZIhqP9qQNcO@Ao{QiwP?e*^Ao*x|-38!Xe2aRE;?h5onrj*ip##TFVq^tf2Rf@ zv~qWO7Lr6tpl)a1+0V+UjLWq%aJU@UFn6>R>f*|v4i-@dk&Q&`VdDY~J-v4R{1FAC zLu=wk)?J}@*v0LEmF~e=L`^Au?Zz09mN%r0f|-sL?9@a5yTSKpcNVfX{gI(Apz3B_41M*zPnj^ zZff*Vc?-_8$reRzJtA?qBCeLsEu`?oo_TG!z^&X>1~ zI{+8}Aiy92$N2sa&$k5Ig;q=_*&S}qb-V0uuczq<656We!@vD>e}fwyx<&+X3QBdx z6y}&1nTHwC@a(q}-`(68N-v!!#a8I!aaR->f${QS*bDG!?sB*fj1kAWC;AtaI*48wpCJDtG* z?WIG_5zX_Lw$h@G-#rfurW|=R(37LmgC+_dQ|Ps+8EA1(nXezeo&g0Ay38g3!V1D5 zA}yK`Oa^2K0t^C0gH2{uMvIg1V!0J22)Mt#;OWPEY;u=pR_(puZC{p!+P{51AD^E5 z<;!V#yj$O1ZtnW<@NnBJ%a!Z-^QV{OvS|@|xj~=4U8fU-AwUEO42{EwKm0J39F- zH8JMoW#f*~lZg(F*=lRAAvLOdBr!O`2`u{f*RQ)ho__ujpZ>p>=?}kqN+Utt9tIU~ zvX>cgg3VL|V-p*Yg$AiT6b-O}Wh4I^kcf!2KHX9Fz!ZrvoC?B}j1ZQCG8Iy+u9o}t z^B#(Z&THJ#1Bjn&)rlpjsZc&+iVT3JEv~b@!+- z1wnCzU9d2wcwOxSx-TC;UKoL}8j%$lNjX#(;sqgN3^WwdtR6;}y+j-d-5mR%5wuvY zorz$1KfPTaetgHhJ;&U~4NiU-`}AISHDABYw?{EwIM;W4kDj?c{V^SVT%<^A>J zFU{<`5Kk?GK5v(68wePXOp-uj{O~V7tcO4@m;k0@_wA!q>o2}!!pKpHoe_oQq&%eLD0-^4gVIlxbYRYC1pqfBJSVT-Fnqj0Fq7wdz zpc#N!A0GrO2#JbQtuQSZY_cqbD#Or-LSg&-?J5$0N$yNHAAh_r5y@Tv!6ceAc3+lT z71HrAyO>#yWXAiiyCGv_`TpIyB&AM)rmi;EC`?(}mpBrGCNlO#?&b9NPuIx;7z80& zGys;v03igrLLvYx7g)Pedx2S8OuEcyvQkq96NCWTr?u#TF&5==y=)Ew08kht(HI~8 z_4~U8E_ooZV|F`x@4fBY+vV+YYKH=%I{frszI!+hEg30@Q7gkyfC!kQQHWXzo7?s6 za`*mjTf3}IA+6KPHpScB`z&T*@21Vvq1|(ZkuX_Ea_8+mKmO(Qtve6TKW_j1^M&J| z{_U}|aTzyW4MLqLNE1vLfK7HZMFrrLCaET-oUtwZKN5znBvFrhk3eg)j^|YI)P$&yNd@&eATLK#vXR)3?*pJ1TF*>zkS^; z8UxGo`*rQg5(OqO=R6!7f}$Z;E9wfEuBdak_?LfNx|j+RS0^aK5YbQ)Dv$|=7Gb&+ zCX`9bqg;(dh^C+rVPFW;@cH#Ret5Sib8eU8-TvZtPs`<(*M9!65}ouleEZtt;PdD4 z;n4T*9zemxef#w3JiWD;Z8q0+oG#bdaRLSa2q0mN$AA0n?Ex?bL$&FyQ?TusTVF44 zm(%OjpcqJxzyJOFyRi<}@IrG03Dyf0qRYdz%VN8_+_=7-j!(BM=F;-;M0m}cfeQAj zxEgNl(crA*lHqDLW|8YsY~AkLU;goRV!!|4XZ-muuULQn_v3!vj_d8h3IKCpIs_WC zv|K7(U1qZkT~{`O>gq6r8U8oWbakkDceifJFi=x8Xw1RTR!a`65mHo_igD@k@_8o+ zKuy?5>+a`AnQ{WE0jS~H=H<)z_QzYw=|qTXvK0mjuAeV5Kx2LX;ZPE2mVs8T(K3uc zase58LA671RJ%0$WJ)6#RIx8CNQm$%o;%Qiy+(DMC%{P;Z9 zQk55(t*imNs-sEuFc8gQAZ+z%f5H2Qr5XW1W1aKuSEFyHYZubw6x+t##AGlv}S z)&e<4fBT<*yDa+or-%OQUtas}4?nNl0QvtD1gBe)Tvu{%&Ca>*5s`Cd%>W5DhaOk! z&(~D9wDkVZA&DYEpn$5%%oyG8J!aqD#Aeu8ZDfTIT~TcWV?}e(6iA9r!H&_SU~i`R zUkJk{jLrRxL9;djz#7etkN_%JgpxoaM}(3gQht420Dv|E_K3pu`-f(VT+>;tXO_jf z)Oo$R-Vii8t;hkjB1jaMZ@rKxWaZ#q{?YVAu0hO!~jAp#%Oe<#efke=}blR9AHxnia=CS017roKOM%?!*v-MrFFl# zzis=^tJ}MG*BW&?ojNzze_J(5@$tT6(>5l{|9 zDTuQAP!=#f+z?_b*g>%sDv_n8 z90X}G=p>XtRv^kR-_{`PpiILOuRh+)GC+Y!Wf2}zuUfU4W~3m(wn899f`k3r8HPeR zKRxZTkwEses2IqIkpqe#7$X8AsA%q-Dh7Z3`?1K7qCkKn4bUb@R49NjMZzIXf@3g> zqyykY0B|KtmV>lHGz9`$$N6-*dV9?^GFg3{ujkY8747={-KN&_@vshVcz#*>D&@yV zPmgySkLBV`wEq0{u&%TgO>>4PkF(Z32FMm1T{Z|ZHsAm8X*;MI*{?3ZN}-@j!(-ECu>zf|T`JAXZ%j*fBRyi4ajmt{RJ z7m3p}Pj$Ksfncz``=?L4G&qq&h!rEcRK5CG*3;?u`rHK)F}?Yh?{9W>~2$^Ls%W=Ja*v^AMiWV!6mt(JLX!0ODwN*xTs?l!f6)~l`n4DGf)x2FV zk^wA2!|?w6w5&y{dhd*-9|uU&TP)(;pnRm54%~1F9%QSifrSs zEUWCtPfc&`XZFMM$@AlNy?%K)#e5mu?2>uc*2~zxUWRzJ(KVNsO9zO+bo+-t+$G!s zv*CylC9A5cE~77p*VpHh6+mvj|Cdi!G3OK}0tn25@>E&Q3WHJtpebSixvVei_Wm{) zHJ430yOzsZqj|HK>@?4k$|f`kARq5`4oVv}3JC=rBF zw3#wOMMSLgh_#^&8V+Nm_&*^dF^Gu!yYN)3X&{WE38NVl0fQJxI0GW0NkQ@T<>U}y z0;V;n&Bv!qh`fK#`{ps71 z#u``GrnWH}4=;y{^0s-}^waA)05W3t4}W^tgd^H)w>9L@fETXn>o}iI-@cqW5QwYa z|MSz;CZ?n&n!z5I47y6nHQ?#?Y68$kMhONK5?P>WD#koOlVUO0BRg$U{2zu% zvWU35O$3|}D1`+CQ_UQtpaf_)goK7b7~}lqLfA-qWCf~o^Xaig78Dpk0%NibD4c^0 z1rAav!y_s(3|{spp#UasKHSW)8FL{BI#4NL)aA?h=FL^m4v?ruBdU$f@YsL;(m@3% zm=;nDiXIVzgfirS84#gVlbRKfBN2|UNpd6vRMG|wb^xJVkIP}YZKsMvwl-wgCfSsFp{kA`iyxmrAcsQK8 z05ax}|M=mSYI8a$nDmlpxd>BPWKK|oBJnUv~$N;b@g~(Q636=Z1us9XLnxq3bD)4h zo4o$v&1TLHBcoIlqYS5L$V?eT-wd}A7#S?n9Krw9LkNl@A8wlI7Hb-EBcx!;phHnn zNF`$gqY^-vWBuhEB9!H~72slQf!U5Q!7!(3MM^b50{pGBJg23JT z`)G{~R5*+%fK*@3>+Q5@oeZ-kWi?O~LUsK7>uNv<5F;5Lo)mx(wuqz%hXXKB5l})- zJIBOuxMf5qJ1bxVHc|sAT3U`rKVF^B@)Vg{@@BmD#~b){uhHl2P-|oVbpha*KRk`= zx4TWMmghs>#rk@DIjwYm$8WzpuR^lD`?v2Owh=j_<|3zMQV3}!SSDF9W++@_O#?#p^>BWAn@uzTR#sF$ zUBKaxS`JV_0KwjOLI{;D8$wf|ay0wPUtTI)#jFqwt}u(vtKa=*7j&a-(Mt8kN+}Dh zHk4MYRwkQDkv-vong2z*iLinDyOs=7ofHv;(1Z#o+Dg!BCJ-YKAuuVPemS!OFo=-s z((dEKgoqK4Lz-+th^QP*3Mi~7!0B?p1mN|wcM&P&)w}yyjZ;=kWiW~`tJqPyFvE&0 z(qLFf2s;1z^Wq!(A%w>%R8W@woPw zHXJmsr#Vjh{l(X}@Xq7)bgnWO-2V0t?>CK+yEXt>&9TC@#AWpL{CfEH>qUr|e)Df1 z9ygex=886w_OxnaM>U&<6m5_(awKI+<@H;?yWdWoiP{d)igE6p#i~ddR1^nWj#ABr z)hsuMdo&l@_V~9myJWJ`R2zq~LB;gL4>uWh5`n z7-fL>_fv#T0;bVhCnr=mA;QX$6(s2>1|kGpep$+ik#Z9TO5D8t&>RJt3N=u+A_Y{! zplP~DnSxWuk?#A~0u!jVeRnk@Gjl6AoHD{9OF~nD3d(9qCBTaCSib$zIW&=jKxzch zDg>CUC@L6c6C(!7T-D*w;ynN?g?a@u-RRf z+}&*UFRyD{UzL-G*UP9vIG^6O>EUVTsqp33*J-z`s9G);W)@ZL<~rhXcsmt~9`3GmY&-Z5Efro*GHR1SnlwY6C$3*oR0x8Vd875qX9mFxWXR9U7|&(P3~{{c^bMK5hH+Wt5}k)V4hK`Q3GW{dQ=#v&7mT zG^*2#o9`z*zTGso@%rm|jO8*n`!eM4=;qaRPI^7o*j4REmWZ=a84P=rkuI>J#wzxmV02oP>i z0T`60is&h*l_0WJO@$B)Qw(D8KNxbDQ1Iq{N0dvvPceviW!vG!>I(YBIXZow#-OJk1UFntf``s zA_we7o6%dV#uA*meEEE;By1CK5TrmOC@JBHNi6}5&`JR1RM4btlF{zp?T`*uCOAP4dyB+S{ZCh*o{O$F0 z?mea`VYs|@He|cvI2L;ZfDi-?D!~Y=9lsq_5fTWI9rE<%{U!ww z6%L03Yb%acL4txZOt?aT%Sni}f9+YMJ#9Wb?!q&HC>BTBO4zO7Np8at0_fQZgav#3 z^5dmImb#J<0(uHn-`@8Wf%_k*{0g|@v%+8pd0{wsTJECxN!7Z`{8Bp z4{zK0%eQkz1M}wQ{1SKX+wsep+j-IQy1!5+GBkg>#r3;~+4A|j>Cx9e){9vn`t`}U<{^JBB}<)`FuS7`s028)6GBszE&6LR^W6k29=@1Oc zaeF_fg-PP3AO|R900wNPQ3R3<~k)X+qfuT@8?MnvGIK6+op5O)p9&3+aab#_l4U2HdO*z=EfQ%w)TE{P+_eMwz#9)Xa zh67DtfK#Lh2y4j06&Br40h&~}uHQUO;WUumUZCaI%l+d_7;^XdwEuj#`#9JB~;Qfs!EEa=J)Sv`{8Ei7CwIY_1HTFT9sN?UQd_3ZXR(w9y+(T zH@AFwIgS>Gi;ZYEfBNp}dfGN>?t_VuWt8$fbT}S<{_)TeasRLX^tfrWH=Ql)Ns7v9 zCLoiF5owA_g&Y+X<#9S)9v`lR6oruKT-Qsj0(-R9BGRty>IheYIkM*H4zA5HB81nU zemQlsLu{5qmK7P={O-dOCN+l!gO>n$tC-h;sht{>9t?6c5vEL2x$yrZMv^wGUES?y zA_8-9Kp`?iCV-$Q3Lq6g(qL@|`pe6j07n9%I_P-!ZWF{{VMseFpny?rfW0LM(4$Fh zB+6j~FNfoxOfvK1n@u!_IW1f-Yi)BCv!|m%flf~0;1plTIB7=2RKcpMOoi|@%#5vjL1r<&fk9eGQYp`c{!gCi&MLs zb*=gFUSE#6MPJS@XCqJ|5pO@n^xs3h5K1*!7+yzR_Aj4*JPa_O{^d^(*KIOba~lCh0YGJ-2nrON zQ6OBKonj(gs zN(g|mNH`cogB3*o_Ur<}5W==y?dikD(=gUSXN$p!h7Ar%Td9Fo2sRZ1;k4@XI&6Z< z-MhDQrkjh5Wj(YF0EZz)j2PK#COq0rYff0m)n7ipdNi9tiZ#_15u4;Dn5Y_%qtn7L zu$G9F0UFI7_H_5*q1%BVla6J7&imaK;yj3|)9J^b`R<{O^ZMoGJZHa}#{h5M%+Jr6 z(bwg5IAZMu2JU~;_~C7fOSS#;bMLZ-pkktP=H;}z+eQC&JeB6#&D8hj;s&Q)M#S}R ze)w>A)f%$YdN^bh>Voxhc>U>bhmL4({_PKMH_=R@1=6r#m}%2s4941OLM7`SW3QPZ5MgqCgUrk3YP< zQJOJ0Xi%a}nx%BGH<&%6l%@cdGY9#f5{)oOUES>xZa_Hf94v7n*uzZ%p^yV;FqueK zYCV4)jsk&@DAZ{0o^A-M_d#rd<`f(XtBRSE!mQR)hHYdNhR(0u4#J*4yqjlFu$k4@ zeS4@djp!NS0r!Z&%Gk6idMZWL{x8QKbg-x_ak`$THnkbdRlU0;M^(5(t~E>`3`9XT z#{Bf*2J6xk$qh_j+*-@ZNYhyR<>$Zmr^m^C|Lxm((`w2KXSENvhvzNFxttXM03ZNKL_t)Q(}#cmhnroZVG`&F+Au@aSb=IB5wS!X6aZRL zh0}T8?w{tG6)X^yg|S~pqk4MYq=MBtXs%PyQo)m8V<+QR8AwUj{%+6|p znFGX-nC;#BDf$W|z!oIbG;uVvR2y3N-~M`hdN;d{&(D{nCLFnuKD`|;mr*OmaaEOz znfB)U2j72avELs~YnR*Hq4n%(JICu#@$N3Yyu6&x2uidN^HnX&5QzElhd;i#*@Q5* z?j)8?r>d{#r-u7#Z8$`7bZW!D2#T+C&%*a`WByH$i5s zo-~}rqHQ2d*jWZe#>g^=(Ae}4|7?hkfR&qvDX3Nxg$->WItc|w8U%$PoJKg>2r9kq zpF7107_`9de0+aR8lwbOid!lKXcQq}o`N>P0b5cy%vzpTiq&drZ$8YEgV_`8rxnvC zU^JPcf?DQ`DsY}DOacr2m!B_1m^LUu^W)v7WrT@Q9^EQS1(YohTM@JYU{DoTZ$3`7 z66xZU%?NukJSrz}pbjrTf4zNk%RUaTWAr|q`ZN!`{e;Wu+za)3>L`0osM^POxcP9U z_2szs!zFLGb*`m5xA$>*J+HdDn{;_SoJxRT0&LS%msJ8|^X~V*f7nfCq9^PSIz}z) z>(77tX%XV;cmMv~Zf0XTfF>jrB1BYEHT0Q6dT5HWkWq@^emM1~`(31J8YC>EpO%BI z9+)tkb6(s)#vtY%+T@^`>~>^qTITY9zMU_cineI73`%YpKm6_%8w?@_7-L!x6%=E5 zQY9*iFzrwdVg%KS{}C2RgW`O@A*s2hmbM_m5d{UAV2urX011$vi7Hy;g>@n1+=7u_wRA_{nd1ET#lb# zcTd-sgR~59-d+3G&xd9EX7hSFF3XSsJ4i&EraG(^$?c~<{&2VBtVgJ)# ze(hpB{Num9-C+vt%+ab%gQJa%wPh!2lEO5phJp}b2*BlhINZIvO3-a>l~N99xeQfM zVb{iKQD)h8^AxKjLTQZ6c5ZS0%a`YKYM4f12)9%?Jh!-F*gxb$O1#uB$04UV!&*65@Lw8f4=)zLsFMH{Oki3k|c8AI@hayc>fk}7NR^!RQwMFxw+MaQ-?h+Jb{ks$kp+l0*) z3@T;geE$2(piP>Rt9Eyf+nr~|2$x5Va@j7hR>T+zx)P+;lpJ>--;@Wbe6+t5Lk#piq0bC)VPfOe)IiR11S)>WE-x-p*s=k zcB?Q2t&CQ~h@8sd0-9#D?VMbG{Q0#P9iqby#9)AK?e>Sat$_w)WkC*d&1!~-3YLR1 zLr&XdxIl@pY)<9S@3=83f_J~|NrnbKJ904KP!^i7zB0#e)3)>7OqKcEi7;Ehs9ge9o zJqT8R{rs{*2*7q#&5!TqZB9rDP#?Z-N*Oy?fkkD!zSv)#f(V zi?tN%>gg7L|NG14ht0Qd$MdqR1(VEZ)9r2c*Zpaf!QB1k_aEO*Xt}mool>nnpML)9 zkEe2OfAily-R7nR0EPn&r4qdvU_$2fVxACIaCMOvAwf%2AnxbPhTyh*p70M70?3@#~pF7($M)rMQ28 zO9*r*u%fvLkYbcQRS!Fg+VtU^dRs@&G3m6RoKUof_uCc?nrQiW@!eETxad>_mQlNG zDO4tj9AWjBzrT0@3CcA(;_2-}%$`t9(C9VFDh^NuXcgKB0Cs_0o5$~OyZT^8i%D5y zVv_TcBb0seW!=AgK0ke!8|rxadN^L@$KCq8-oM>kj!Rcw?@ovPnv=VtkL{8|a zI9@Jxv$?nqr-k#oPn-R(FZne6^6R-Smt}<`$Tr>GU2{3S9#4ym;LXS1eY(GzA_<39 z^r7?1-~Qj1wb1VW^sf)sQ)X0iLuisxQ$Y$-!F2;MDtiM)ixGk*an<^A(&L9lvh`|0 z*uGqY4h~` zj9wzRNjQxd2{s_qCuH@0`u5AqzMAY=Kb>#+c=G= zwY__2Z{FXUb>M`-0xx}iKH~2E+ll)#+U3XB?s4g>5iQ;3yQ_&~x$K|!7YWSm+kg1} z-8RFKmD1?z@t6Po^KpoqPyha#>sw9;w}BbJw8T7?DTRootrOUj4vfjJ1_wsi!sYc? z_m4Ljh3L~jZH2L(#yY(9+NCx~?E2bcGxeDEg}fP~LKC5}ZKG}1&tF_E*boS2%!w9i zm~y*GhiW$DHWUjZjH-DAX`#1bt1LT6DbZp6cNhRjIOdz1MyNRvAqECQ2+%^nCJ-aV zND-!D!tnXaGN4o&VUt~MH}9|6qECZjz+u-Yr5OyW7O^srsa{vDT#JCg^J>7zvAKJH zorw%8&@YRx=8B3~n-xmbS@Sj%bb_QIc)a{|Uw|DWy?PYxKRj&7m=qX}u?&w~V3+L@ zSC|11J?!%KSh;ZS+B2qXO+~5XlS{epz6AO{rUM) z2Do|r{x=`Cw{F!&TXebX|MJ)8rAOTV^Piq}b8DCdf^eiwOeL^4AWNhIG3HX!XadGY zOJKR|4>#X!w)WH~$#%%6lk4CuyFmsjHhGSivdofOtATRUbe^G_H#qbWn~kHkPDDdt zuHJ%NCzGH=S}}5%z?ceQpb$_+C zS!iO8u;l`1U}PJPB9uK?si?3*LABz4LLjXaGVgDj18&q5C}#zPEQD#L0YuTMG(|H~ z@a5Zi2w4i+D4=}x-QzT(XdrtVVX6#vGQ(Wfn$|{gaz&&yN{LucmkJP6yMMdcMJO7G zS}wJj;fR5N6(r7=+-?}5xk61b=>GcSH<6Gncp2sUPxreFHc4xNQKey9kZeU?$X2Mv zVj1@zAKKcRGch&yWEMz}tbs}CbzT2|g5X3+vMf0gtE$<(&)p*;Blpb8s$Kwsm=OU$ zA%*A$|NkQ>9B_ac4ls@Gs>(Go7Wa3}RC&(b=g)li?f{=3|8l?P)${fJ`*_Lwhj!OI z+`K(K{`nWaI6U6he!2eP>iFTs)lo>;G_2mx7zOnCuH8$W7tY&BOV9vPxKyw6&wu%P z?uME3`P+9tTx~P#`u^#Ee03_t-OdHZZKlth4mXtReqXd?`m6hXpH0w5i| zfBRMlsKWtB5^jF;oTC+TNF=GnP64JAeQ9E5bC$3rv|EAb?~fr+j`{5Ebq)pwh}s`@ z+H8gt;7U@pa6JYqBB->Dop*o!x}s>v@TD53A74HLW9-|0mKjh(TEU%a8#|qHC<=Bw z`|T}v1<8mLR!}y1+=)>}h?k4*?|xm+KU}*XzkPdf9FF7h;c#5{XNH>F?epiCzx?@f zdfj*Hvhu^T^V>U>=@5 zfBEt>(@*!m{QR&QhnIi*yO%d(#2^PT6d*d;5xgWc7XxA^<$-dB4b$1wp69!7e*JcC zVP_H8CtY72yh4ar1L%{$J;pXiCt~by$ZJbGn?0DQLNhQq0zJC&GC)OOCjxeMrxG^y zaL9+oj@eb{*v%qYNXf)Y`JE^mV#%RV~^E6AdCHnmE=&%Cx%a^xXW{70?^2loo7z&wA zr!IZW6vYT)80DzDpT8_L47j_yaQ*uIVKTG17S_S4Mgd~2(Q1^bVfPSS`R0cci>&0F z;W&?Qr?;p;*$0=;db<1g?b(l~*w6R%(A~%RxQ9^MvM!ugw^#i1KOg3+HYiT z^Osk*kuq7cp^!m=+SgY#l#AJU9qn%0b~?MgYhQPtetldEup`Fd>NF?U-ThvH`TDni z{q{JvNR9|Ido<|)K_v?e!wLXg=@31vFimxy*X8r&)%z>jQ_Ap#3_W;XQIV~H!DUm& zXU7i7$sS99gNjN7@c8u+M$cDQN5bMtBb!vl61E;~q}-J3A%!~}wMkGw0j_CU?6CL; zAmES)&a2xa2@uI}G~wv1AjA+XOqod72#1F$?w`IYOwlyWcE|R^n_;Ef5c{wPb|oN4 zim47G5V@DOWH4f9slJ~L7-ZhQxY@=Ozy#WwZZYW^tRVB^OcV47S{SAF!!MuDP$%TF zs&IJ!_B3WqIHZnVefCttvML0%($NlBn(saw;nrL+=4>OlsF7jiVxI~Zt;fe-KOa84 zwDzZd+Rw|_8y-0aFT2O_>Un(q_pf;IvhVkEy?%du`Re)^0`fo+niu49O7!KtEqHNp zl63cOua2vr_pY<|hi_l+*QIO|WF*28G>)%-`{DJoJOXp{7BszK55=qyvxij^6<`lj z?2OolNQezS`RUWTe*f&yBG>euvz7Okr@kmHt20q-9bQ~dGUb|$PRqy&MKj~+?n`rU zyS*7AZ3rz4UpCSM1RaiMFAZVEXvRKFHbSOTREIg#1pmQ`>Jw6IB z1LNx5^~MnjW_0(~*+!Ze4JB7FZHNPDvqmq})5lL|i4fXSx98jU$1TFyqk*N%WzQ`Z zy+>zREd_0cdQ`rDcLGwh85<=qfK+Tx;pW70*VFe;pZWIL;I8%bba$bb#c_L_dU`?} zuTJyZe|+v|@AK|ye|quZ^!(lRk+xt?$7*L^h~CIFFCCYQ;y7&KvSW4lkfAPJitGHe zfBXJeB^dxngyy_{{o%vy@sOOc1rkF6nPJw1W+gg0;ZABUDnKj2TmTyAez^a7^~1B9 zHnWHZ=vv>t12t!2k3-j{IEPS=af712mF z+{5lEn=%86^9E>$%N!jP|B(QUfXVIn?5JdbN(KVT4vID_L7504?97#b8DZ~tcV`4G zg`rsNm_NLm*|G#dJq;M_^yN|80dlN3M%j#!(P61wcV~l8WSrjJjF=8O5r&ZhdRW7X zkQfXU!Jr^*OL>0!`OA|XGIw*i*L?ft*@&&UfE>L)t+j1}D%=gB#gqULD~|8qP6Z=- zZIKH0xC|H{ry}1^kN$pY``E3`)(VI1S|xo zWP7kh!z<n>-drESARY8TK(xaMw6mik9111f+*%nF#9tqFA>hgm$Z8+nzdOK$RZ$L+5}oee zb)=uLSw|V3j0Brd;e0-q5k|!IyX)bjMk)oSfON%Z&{12#j#0>(ET)=b;r`<eYxB_4-jp0(&y1)DN z{@Lr3eL3%Uk88M=^7-u%`S4VZo6~9i<}d7+xP9KXQ#KXH0Y_-G1B&(i+Y`csL`1=b@#d!^1*1YOkV?}aYDHizQF#nU)r^A?Z7i&Z z$2|c!=keWDMs8GqLl}ExGxvZ35ds@Q%{E9j*^PSo_1EPRMA*x<=gsS@jM=-!cDxd? z+ERtuYK@&c5CH&fT>bPzaK+qb^dxI*ir^&z8luhRK0kf_{C)m#B+vW({^+57-{b1d z)z+7%#~#Pq)Bf|X-?w*f{O$AS@#F3G{>{~NM`~~pvOUx#D!E4r&|YRQ^oSG@yH_WI zaMkf}+*dz7t|c-y&)BxZ%^~vWJa`OokRE`e8B+GKl@^7Fo);2E7(GIUQ;x-bJ>EUM zeS1B^gbU`lSUNXRJG-q`b9JK&tmT%}U65BdQOFPf@DBB`aA}>!BC^%&p@H7wbT}0E zNmC*8XblT+BVhtX_e4$E07Kv(4Vgrmz}2%GGVH`~+BC)*?0`E^2}cL0fewa{CRLB0 zFOUPG!J_4S{q6<#w2Ht!8W9SCrb0_k8pLTtY2}!KqL*6_Pdmdl=heIGF*t*S5)tYY zOtF!)qLN5OinOVARG)wTc!n&8QCnVdyFjp-s|{(H*jcFvhvm3=|LT-` zkYrDVv$GQ`tt3!eac7;MzJ2=?Z=Z$yw7yGm*?T`cyS=jO?$Wv4oc!rOzR%Y$w~rsc z`G?oX_b;zc8wsFQXEQuY`y~@>Z#`lyRii8pMYPc>Wez1eHxVqH-2lYEVP0)HVvEQv zJ3SE5vphR`80)a6C{CfA8U>F%$be{w9l7vu`FOth?alD4T!~i1{_yGBs#*)Z+OoKZ z6h^>ksAExRDMUNp{vRK%>V98_GQ@LO9l6gKJmk0n5T!#~&!f}X}SEDbF?aD^-`RnT$oJ=Kpb`WC)E<$!7*d#ntN!y^cqWj_F zr>7`N4x`ki!|T^apU7aO&QClM)mUP;ExTc8nA93Ce|pJbn-miXYZixu=+OYcs>b8D zuOIu(^RzGL?+xu|#Odldru*T(HLtGg^T&s{J;nFCyLk6wzIpTN7?hEG@Tz2VE_H)! zdJe>SHM-rN8IA6}5!w)AbPg2EhGAKWZEmrRk*69N0ZbwcvNsWw*^!2$z*(@v41;Kr zm5xR%J?(ej4nMw*(<0=&crJYUe}BI0MZvHbP=H8+G$66ikSs?B^X4D^*CX$~FHiV* z4ghVgh`gFPfdPzywms~Xu_q`UT6Jidy%iwB2Au5XKazrm6+~{&uZ-l_CHm znry^MrqVR6VK{aF`2jQ_f`+@u7eBoeI`@Dfivc#B0=c@|TgQmeOSMfBh29JGuu|%n zasBGqWaMUKF#;xxI?HvG0>rR$jF3kzP)7CokI!dbvS`Td+Hv~f)iyl>?L&Y4manEQ zscTfnqFmV-6w3MPw>L8&1WY9e8uHx3LBtBBcAoc#?|->mzdUgD_~B z-i0~!aQA>Mw0a+JemdR$@H{tY4~6!f*uYSo9rPme;8@G*Ue*|-wC^Qo^BkFDghGNk zn+}aRnMchL$(gjbG={AKJ534*(EH$uQA|0MEE=Spax{B?dia75Z#T}?IL`_+KK^%YVObB`nggDPq78ImI>?B@}GS;lp43 z{TuH0t7ovMt>K1jSBl5eVK(a^2MsK$%@jMhPsm{zNx^i;68|VVdO#rKczr`gP%#vt zArKZLxUvX3Dr*xMPE{i9!2Q>U_HY3JjaJ9A-`ye|t4*G59AC)-^%%5n>Wd$O|~?|yn7U=|%3&7=S;oaz88M6Wpe@zd9jXAHJ4ySi36 z0Gb_c$V?2yjKds}nV6y+Q_p?knkO>7x)xvX&I4$Oheep#t@U`Aql zev?LpQ0RzGY|s&7L|_gNk{XqW4x08*GXdw?)-0HGh!g-JHL5eZ_qzYQzx>^mBYLYw zC)fRd`S0Ib1QCK5?u2PZrU|1ZqGh&V<`4hd524x%4YypgrIA@M=R907$1)SnVmSm0 z3WpuG;hsWmZ9+Er2P$B~BzSmsH3HzU6JUZC%^;dci47u6M%IDAV0^NZ8xMIi0E*PRklyM$#eg4dec7BOBx9%`IbOrlZs8?pXT%^S5qCqPOEr zoL<~)9#LD7E7sEIU%tB`dhBj%AzrMTg{3cK6J#!nM_RONKZ%{f`MVB5$#z~ zae_(!03ZNKL_t&v5;bi&1cm`Z*#>R(UXOS8w?Ez-QtkoyGUD$4`{%ntpq+pn6;6>D zOv9AWZYWf+GGF}NZ*p~Ytr^iUWJ7fB;LXjoXOnccQYdIH0X}$9pywWtvuG0x@eedj zm|;Xdzs~SX6OO3SNdi!e%hop8pu)f~b7dBXs(!sQLLr+9TaByVy@+7+K81!U?0(vm zlc8|3647of&`3(rQTyY$1B}kAx6dPTXo4WaMx(xe{DcyRwOz#++wJXuG8DGpv%mi3 zehE}|jh(9%Z=dD$KCiZ=!@xSn$crs`r`xS4hY=Jpe)wq{*)-u`*x5~Bmf_h%UKrK2 zp6-1A_vFev?<2e#^x(*ks}Be&yP>1x2M^Cd1%EbZ`@)^MH2~n8s}a- z9EO|TFZ;ewqz)Avu;A6{*#?GkNe>-?wx=d;>FAqr5yWRN*%Hdbial0v2!&;S1S2kn==Wx-Lv zOb}stKVF?SlF_+G#)1veQFID|Qbr=WFx0b5{*x$>ArLukjz=<0M8ethpp}F=5K$i0 z15kE?P_`$vfBse=DTuHk#qq~iMR7KVP`1$U;cV{~@M&cOQklar4h>Zl0qd~hn`oEmIkqJH(sk*VeqOC@X}-i6gKn zkCz$K;;GqD+hs;dp=g5EFxY7Yu!e7PG#4^e5+ zFc_Xh1_0GSD!)T8)M`{W01fo@-~8>%JU?}@teQ2J#>NW}?0IvenEQZ@jEZHYhQQKn zNS^JXo(rQj(8Yfe=wSEO$#L^MB5LbkAOLfSCXEh&Fb#-=BNTy7XYb!WRTzx6g2pnB z?>`Kn! zgkTVTD7xvmT=vV;cONDQB?6Po#^^mVCKUr_dlp-bJDl!f+Ou8sCdP~{Vt~w`1_=zTAQ=c{0llFCWHDEg zMY3T8`p|528nvIFKK7g69;Tx0u2kz!|K;N{0&>6tvltP?7He~?XqDW+G8;Jl_J4mK z`r2L6s7cBrtu(dIs~bc*J$Hr$wH&llHi-tCnwPE8i(xkZK-vKt0nV$Nkqly(;p_<0 zrUPV??yxv2r~!u&73BW;drJY>ifDIxe)w^tZFHFB9u(Bf2(g0>x&eS%2*tr=)%oEp zf+nxty&Q-@WQa7(U;Z7 z(;Ak$6p}#(;M0%qPYx&qPdmjLpj&BSa~omftjqrJ#GAvI%VaYXTw(N@;cyxq7ItZ! zd+F)CzxwTqQ(aUhqM4DI1K7Onsm6W|G3TfO7aqI3yg5S^2@OSSvJ?xiodt7*6A0_)VEsp~@*%U!X+p&8Xwx-++S;(fc z4wtzH_B_Io4%D`(w%Ne3q&x=UyR@0DDV6(bpf$!gfAu^Bd=q{22g zRbCA4ip3tnPVUR}OcJI?Ge*E^&e_A4Y&;|%07#^6IgtFVUxL`L0jF*4++hc#fH-e2mU5z3m z=#cAR9vb3Y)Mm*|E22YjIqcKTo`O4KjUtkNgvIPM*)guKkC7Tk+C#BMcG?M}%n*qH z07s`HYA7sy`?eq|L@6Ol`RaG4G$dOvEi^kF$R?d~(4B^w9hyM2_j-D2fbAI{ULC+Q z7)ny3Jq?jI&CnQ`3e5_}CD?Sus;|G?wZ?>U^Ng!p6cGMUxii8nmy0xFr=Qty=8LZEicsiyFwu(5hx5IoDV~xp@{YdbTj#NX+m7lj)nt%9U2Of7Cg%L9~TT{Lx^TK&rc&0Y^83F zoBI5ybTM7-SpA-COq%Q?!};NR^mr^|T3vYz|w!*|paG^}By~m!r7| z?h*T^fBpAr4W+UJHtAvKKaV?m>YWrfiybUoJ%l zX~8H!eDj-Aj3QbA7KF~|M2|+&6gy439EfeKd1&DL`27jhot|&s0bAUt2eKYxxW2!xiDtV?t{+f-7bW&(CAB_{^rB0 z%kMcBKY_QS}&9ey&AW%IFw<5uRsx%RD zRyvQ*PccB!a1tDVItX@D4@Zw?8VCg_BKKiEeY&8-006ez!t>v}77dCOZ75I^RCI!3 zG$^D+GV}E8w4F9w>hq_&`}69?eDfx==Vh$qFcLOAC?|4K1fxeGT3g#aDp#Dp|M`mz zNobqWQoepMcV}y#(_R=rMUMmbb>5xZ+)y4O^2P679N^rC8)Pc!C=Wql7`YF&S-nb^ z^Qu_)cRjCveE&>OfBE>}C>|y;`|+V1gv)KhPc?u4yH|%ic&uDm?aQENU)+vx*EZvD zw)SYWX2x-EwH!NRZtS>}BWhy90dsWDx-#iNWCc?Z7Pd5NvS` z(+SZTY+|4w57y)7GvuV*qk)Qr=Rdu21l^DzirE7K&1Qm@K?Ne_vuCfao}Ka}w8S^JH%h=%B%pzc!5%;u$tc1U2@|Fp;Y)x2`Ri)7VWB~tjc4z#>b!XeW82+K2e|=g(qu0|sOvgaVN5=*|J!pUInVyM-<~Ds1C8C_W$;8U%Ktw&ksDEj+{n}z&6m{AKd$X zY0uN)vgS4-Pygxnw|cy9mFYomr)_hyXXn~Fl38#W!rU_$Cz`iz0Zk=jO;2a+p62qBq2zY0n^lrpZpgL`^B;#|6{`j zNK@1`b;+C6WS<$$IeV`)$ME+ZqnQn6L+Kupoo@`2|uaY%S(l8k)%`g&Cult3b8 zo2h`?77D~jGdE=vmkNe%e)(`hJpJK8*O#|nrb6Qe7fbDpF~(5I5#hP};a|MG49%$_ z2g7I`IPY4F&|FvKycc4qdbUdu+dcu%OFuAZ>fr9KXzHT{73{N;-3FjYnVi|JA{~?y z5MWjc(RVm299A!mG(5H6pT6X?M`;okJ=@JM|8%!gJI3jZ_3FrmB75s?-c^cAeYvwP zF5xXY!S(fDU#<7|n-r7DakZ>Xf@8=TJpmAKllrJ+8G>1%Fj%b$4a$O(@gkjl_g>@t_2c`m^W26}G22`nAOIQxrZRegw#^FC6gAtx-LLP@DB8lz z0&8NjdBOr*~(x zCojM2`ugtEDE0j-AGRsM`yKXaqKUE1>)-r%-BtHhLbTk8>^N^gL{#Pyao!6Sv@AzP z)$Wbt%mr8B#aw%3IObXiwHb*lwo-J_P1K1o+#LK^Wa=5!aCl4c?AP14> znlseCYb&g%rc}V&QbrrBprAm7jdpGd1zh|)n6M*YD&pZq66pw;as+@uPr)kHu%-<( zIf4u##XuB_a4X1;slxK~%?crusMrk=0!XtGZ32YXi>u>xU)JUD@N!)0w?Qm%(lSdk9z8pZStGL>IYn8P+UnXR zS8E++V*T#bqgKgD<;Z{xr~Ag3gaI=Z5_7iM-BovQf30c{UOmsa`}LD{w9_drx^}6y z^Vk(eM7mBFzkl=afPKH4)oN`c$nLSh$77F~S-bD6U@y41Xmihd>^)OVT~@Sy0KwT= z!2o4iy!A6?~cas;3@a@IVbKKbc&OC_Kg#{TDj{yfpt zJok(W8cg=+*|3T(-zr6Uh($#{{mbWv+nQCWDxBI{MoUi&^o!$~3KB&irbV~GoB#v> zavzT50IL=Mjy3?o1lTSvgB9kKsw|U&!X(2GVTx=8MF4Ka1{QOg#SqkoJ*V5VR}XfW zPGeiN1zgQhZgMnnG_xNc9P=WVt_#Yd;e~3v(-R9a63($N69cV>2ZqR{`PTebwB1n zA^|MP{S?dfa%pg$=NK{~8o3m(bI!K5F!ns$UoH#bWJQ{oM7RMosQ^(->u^ra6f*1z z+KCwgv$E)fYEij!gy|WaHMaee9zX1iJ#wbzviIbc-7ObSt`#(ubLVl8wfa-?PP@sWGE%hpw^W2HK|p9rc#aX4 zL(a^LBN|eqQX*#%1PV-t=v2UN3NfLPn85V5;k#E?mXNfK0Tw-*Lsp-e6`HubSbATU zLqA?zTGJ0VIweRnKnj4Ul1qfE{ zarNEx90j#B6{b)|xnqcCTV?F>Y-KWB4I&wu>&RMI4u?}n1uAC>shD9frj`PoRcV;C8f^=j``y?5 z(RCvgn5&HO?w>vh1b|j#7y&ux$fZ}HR94K%DZ>mN{^Iqz-)mw*09)0SG*UW@xVUaf z#Uw{q0a#IV+E7$=j5NX%5hKmN4HE_eFp08UO0xk(zj*mr39{gD6alN-pa`U)oFowd zK?RD~wB3K&uU|jjBRs?42*u2p4orugCU&zOx|htQwTpG>i+lEUxjgln_w%%Ip7)!r zLh8j7NYHQqJz$Hh5K;-3K=a$jcek9Nq9z5q%E#{?YE!vKz)DPDas-!>OTC3iBl69Q zYk9Sr!OEDqw;AEKtIJWr8P$p)oHX?1=a2iy{^HdG&M$wytr4w!(f9jNNbbAsiGWIe z_PeJKa;tqHwX{g@iM@iyi$1Asc(X)YT(XUlCZR+Fg$U=GOBz%_NZ3tuny&0K_7+){ zA%v9UV$Yd?lj3YMM=EESIh$q!=OPxkx%Tbre*K`i0gx(Ezy9&lo&{v8S)`&CAiShV z8^vi1N<{;i`P)Anb$?T>%-rctKL_LITYauhoa4x4iVtKPN1u*6%7Z>{}Evp zA_AUG5Gz2d(PTV${xH=|BQe>Eh9jq#q-Qfscs8O@21BCv{r>&UgWp`KnA0?6x=#ZN zyBO6x$Chx_R3M@q!t1&mju(dqmpwhTwSu?v{dr0zgk*v;$wsMF-y=qd%akG<1KY*5{$sbp%w=!;Z)m$oJ=oPvgqV=fHY#{>>?5~DZ^@_7_$Kt6|mC) z93h=`{iq`qW*`s4dzxFL#RQx3Abvn8uAt1MbDAvVPN?DrQu-Ha^(x4v^V zRWs}II$B6=bPe=5)7=D$LCHu*S!C>40R@YHo3H}vHeo8MYH}*f;MJQ;wrUQ*VFzeX z3b82%nXyMj1Pz7N)N0Og`{~QoZ;mO>EWls^0Rw>=8ItGy{rPl1XUjCv!b!K@mgVaD z@%4Hgtga?)w{;q}kqIG`;mny*6PS*oLXHY?ZXbU-4a0(LP@Z!x&%Te-Cg(yDGAAr6 zdh8Pi0)XM=@Z#m#C?*u3I3od!1Y&O#s|$9j-LF(~&aZDj4;W8hKF|L3rw>!mrgiQ2 zU-!$0aevZQE)td={P^;6IoBwm^#?8L)b>6|^urdhrJ(5?%+u-~eL>w%SeR=E+Hz>2 zrWCDI8;z8M2#X-$L^VT|A;3}1O4z85N?4@B%BnUqhC2)1c>cU!UuDtr{w*IrI@G8C z`f&>cT0o%OW~+>7jf;Y_rZk4hmRO=Ktv`Kstn;@jm9Z#{BO2hIY{?4Rf&?=_l7gM0 zA&5$;7h`WD!T&(I$gsnJ11vBKAv*Kw^#d|zGAcxxFc38`E&>!m8$?4zI8@CT!#7_( z9e;R)3ZN7Pi)a&F#I(Dr&Y$0($L470W&@KB7ZZ8N%f}C&93Cxg-Fv&+?(g<8flz`o zxkidmV!zmxwYbz>g7%NU+zzlTmK?HY#k231`?))oF3ym004g1OC@n}KyB}XaJ7#WS zuck!+cB`nRs2~TMQDGv4YPH||@^MbYgCD-{JpcURgo)kRa^CoOIZnHWgbZV7`OWLc z)ZLPdFhpCJ))ABs4TQV?pfStI?xkN(~g}qO|dI>PKuSCe@CQD z17;NA07A49NL;>r3WA8aR7?QP%8BhfFM4BSG0H$IH_m1=Vvf4`_@({$h;(3vBtmFP z$|%Cne*f;n7$bX_h{$Y4Q*jI%}E(Xfd^|uYP>eGs_-xEG#!%E~BYpj-6^9 zbJujjBV)nBxw{Mn`x@%kY+Q%4vX}}N*<`PBIn^qRoLv@72M7YxzED8S5E)@t+me-8 z!Qyn?f7{k;EINJq8~5s0|rhG5ZL`M95{Z!L2Y6lb*M~d^?rHWe$zrYU*1|ta})ZTZ5fdU*CSHNi5IbT=(<) zU%nw~L`<{}ZChD#p50=MxxD$!!;8ha?L(rFjcdAS&sp;@TcYMLLnB(t!`wA75y8y0 zE)U1qoiRxF(We>eH4?oPNSncZDQ5)*K#AGrG7(z`!-LHgFjMwe9EupIyRYX5>$2nP zpWeTGb)C&Oe|o<~M3`i{BbSI+dT@0cVp%boCzjxPku8y;_wIq2ql^uqNPK=GHC4*8_&)_ru)?!K0T@moY?`=!`7lBVz@e-rn8qC1+#{CGK|6&& zLKQ(0!?zzl=8I>CVnrdua+z5jl_Ql_oxXg!*(+wy=q<@eMfcn-1%SxQC*Qq%cDXjQ z?)UQuo1@Ky?Sm`b_6#|!UP_d)fBE_IbO^-)VC=K_@c9GZ&+SIXM`749X<%60001BW zNklUT3K%TZB`1<9`o{5o3lLZ!A8CeJel{A2mk`7ZfZ{EH$l=0nfuj!BPzKu8y ztQl_W-Q7leS4m9YqrLp?v*n_wF>8vI%e(|@Z}tYTtGi|)xLLl;h4-FA9cW8m(T^QO z)DjSlZJiycEO6b4l-rCx_AWAKgxqJ^00b%9D6m0Y)6EJq;GBi?*So8g^ZxDIhi@LO zi*WwuPkYNlw2mdRtvd-!g>sM#uk5qYi^*j!qO6*-c7(AmWJFP}O?8U(L6qC<1A^D= zO^$*{wW3V3=#(L__&)~@K{w5tjiw2Mgc4Tb!OMr~v_Xd*2-^yt>-_B={oyrAMvNd( z&4}vJ+kX1;zFxmvP%vGLiRl)MNwBD(x6|$CZ)b<>2ojx)hHg#FqG7ar@bu-2$5)Be zUiVwE5K~H!4PJIc&IqLx5TNbr+b;t_SIHz)vv~FVI#2hfFWZABEfqyr0;*jFFfk|_ zarNp|qLZi;vxKZ>&Inb+Y$$}6os|gF#rp8`r(srn|HJov-u(J?)A@|cMWY|)m-9yK z8pe_s1K<7b%`wc=lr6w(^)>HzHJ8ODqv_Tyq#CU+r0P^qKV%%+=*Ns)D@y5ZOe7T5 zP^rcdA=+*xph~T~qk07dhbx^52$iujC8syzy4|Y{j*#Ejt_U}aS&T| z4Pq+$@Y>8kW+Q117IQ09(GEuB;=`7JrZMPy9BPwPmls{REmlfwORq$Mn9Vbsa$ttC zpqO*=e_PoLp7?q*1cD}LLdj~rc=HHOgF=dk0sx24=l3`4j}LQ~)1-nx2yE-a+_uj@ z`PrKVVH*(3m<^dDY*!*V=K1vP<}?S?fN*9*^d`4CR3l(A+x4?o-#^Yv#60i&-05;S z;SP#+U}m@#qb$siKYuA3WyJ&(s>$Wm<954kH}{v18wrfCp>kgIa1}i#BObhXu{z8M z+sG`j8i<&17%*C~qQ#cg%hKn!A9osf`ugb+r+1&vo9<(6n%S`3++#`=hu&)2=Yv1I ze6-G?u>l~=maF$sYC-}ccyL&|wzxQC8g@sS=#v(fy}~UXNH#Yqi}u!lbF9wM?^IcPIYG zKm-cD{j@`LSV1@d5OMkHK~o|$olpvW^u+!C`RDz~Z;mOT z$Ra#FGo74?a&YFrweKA)Ur#k2rmR;@3d9PZEKJo-gPrDZ5eii-opDq$M&@YVAL0XhZKNI=LL zkm}%=k%dYn3{*tz-+q3-kx>^vzB%su*Uw+?!^J39?{(VF!(5T!v7NVvzyICk#d&Tc z_HdvPOOs9`NEf5GWuA=2WG<+f+dU_FJYMJE+^-Hxs&CB*^@$W_xEEtGqEcZfXI6%C zYcgr1s3;{s*Mc%QyDOB)YR2%qyP5shfBxn9-#osmbmaN|4t)urtx`*H2KRQ(F)`rb z2xep>8yW7iIY9{Cwp26}Pv^m6n-zOM_E6i?Hlnu`&I&^@z&4|hqA(1N4;%k?(kWzZ zAMYs#jWB5dZn}Q?Fa)F+0)sOFqH@}X8yz_fy15Ae5aRCpkH2gWe!S?mLCS*VQZs4F zESmBd=l%Y?%Rq{;Mlv!{p}k7BfsD(iFJ3>sifB0B-k;qUkibl`6b#w~Y;)@N?dNmI zHl=B>ievr$@rAxf)tZAWR1}Ldjz`e5P!RI!$0w^JLQpV_(JDp5sT5$-jZa(gPweHZGmYHG?nvi5yHpZBXj{P>Kq>kiYwAbSSqd8kns{kWKC zUv+kxZNqu5QH|?*MB+ZzgFDT%;ihULL13~^MjPRofilggr67YOH2fBoBwCCxr_%zUC8uJN)rO_8=f^MnUtAecZP`BG8I%!WnlQz*u108_qX>Y zG7y=GY>RqvqQ(XY2rL)RU%z;G5l!>h|Xkrzn_Z*Gw35_NUJ- z$7ydy0vMa=UR8 z$JG)!fTG;iwHHliVGjnaLev&0PVVnN^M569EM;t;P9qdR2qZ*;xOjEVxkv9?pREPl ziUdR>)P~Fq&>#((A*DTTKfk@t%V$p_D+GjQN&^Z;z=SI1J}^ve_h(H+00aY_**mLg zKMl*_K(5!XUO&CONNR5L1R*OxuzF!8O_|Bs?|*%_mni|($O={Q>5I#_nJ^ONsB{JU zqRt#KTxAX}SFgWo0X3?Tqj?gw7OZrj(50LPtR<@gobP`5x`$;y|M6Ms?&fs=@q9O? zuPzUB%Tz(+X_wFY$RB?5-2t_YEzTE*!v(|Vu?@M8ZZ55_o3ph*7?cS_kpLMR&h4W2 za5d26bc5VSTFr#}InyJfX792HSJ(hQ{4}0j9ePLZo9{}OE4w>ov}}>>eBO2hp$Nx= zC(qwJKKAq+=W?LLGz6j*;AI5QS@%D`I{`B)sNsO8OHZFa=;u2%D<(-M*JvrCFoeAX zFrGYr(ur>6Xhe8#h+?V9hpiiog6X+{N(=MD+pidu@Z>KZuDJVjvw!<~j|3m|=&^0Z zh;2V(>ZJDS&5I+qxowg0uwTTenpIQ-*%<6Guv8F6w~-T28=XS!`*K*YI&9ERu?Uc} zg={uh>eWgNw;d@(0ru@5-(S6YxVE70Zr<~7(I57MXY`R3me241^&j6p_?sV}EIok? zV+#ZWAQ5I^EbwGV%t8*UA|?SqNmxn8JmfB?XpMc&K!yXN5EfZLHLOd}?j!7+0;XVT z?ii<=o6ny=@IR-F4Egl!ONFR3b7oP25Q9Iqk2H3j~nPesAgsNhfEBE&J)uXkKNcRDl zQ2|&W)jb&i$0E`QL?JhQ{N>}cAzc3MhnVx1o74H`loc%wR~PZ=tgvuC!+S+O{>_U^ zYfc4v9x@Q4TpdXVp~Y>|>?KE-`vFrB5&}=><+v>96=ha9H5Mx1pa38$Sbfr3XB1eu zvjWz~KYo1t{CRJhzrI_Zt%HY0M9d6f3U%}T|Nf)@^4~tbTB6Zz29aSog;XVrlM2{H zMnN1wJ5Vt}li^m3xtx0&t!)-mZKMZaO32}q)FWE==!puK0+YjI`gC{t_~p~x-M$t7 z6BawtbH4vf0I)M)0(Qj3tLw!ks}=)@N)ut0#;#~>Ogr15L(C9rDMyX@@u#nw;bvrH z%SPmE?zA}RpowBy%F2ROW1ka=AX3Rj)PCA#^hO3nw)NSIA0J*W4Raq;R?yXKwH$C% z-^2GGem+$|n2_ZVYWl(V-({W73%Rz=NX%Y_CS(te@-+GA)su^{*ojIv@*GhBvk25W z0JrIE3RT4D*3UnG8=y+-55GIc?WYgtyGh)s_kPiRvmw$x@8mc!u7CLM@e*@(q+7;B z64##RZwCZM!Wu{0lX4*ii)NPsyiP*Bj^Zh@4 zdieU;s;~cbfAi{Me#PZwTV`PN-M;(skN?ltH~;yB5w2A@? z0#ycRxmhNfDx*>))wqR2D5gfrEE|;~Wp-yocqV{>LJg0b^XJ!wo)ywM)02G}ei&TWcO`CAYISa8Iv(TK1!ZJOvefs%JNo25PB-03^ zr$y`l5wKbyMsm_%>AVk1i;ZNePoKA~g3&Tzhhw?^{)g9B7YUC!M=?0vl!Z1?&1Ik8 zz1yZhU|LZoYR;=yj}JU~Y3ObgQK+2U;lhwzdz*gv;zh0rSC>=d-ba%ip-{v?&UAwb z%4jZq{FiSdD|R2=yty<#e)zQW3SUQG57d4}YcWSD`u%y=#hcd;j?p+Wp<=D!Y^{Fk&F!CbyXo6mx-Rbs#;x&OIlfJTK?}_V-_2 z{Ptn{@Q?YI-(Th3*UPJ=A*34f+s(iJ{Z9}7-M@KyOg2PRp9v^pt~2*$11vd(-I*Mi zbPV@BV`MQ^Sy(C%o(M<5OjvY5a*mkcbTo$#p3Rc5$DG@K`|a0{H}~6KyNeP4{>KcU zSJe3SUZf!e3EFAr<;%wnPSFO!h*AQG$@458bDn2gT1y8NM5O`q{>#swwUd#pgH34> z$qrG1#H4eEQ{*H}xwMa=WY}h;S*UnkD{2%)N*aC#2Han`%f zw-SgMCBc1q&dcXdkJ(Kut`rKPN)JN=-o;rI`}5})ixX6Ef+mnNATX*|OlFZ@hFeA2 z()*u3?rbIGvp-x>Up{}l8nTCQ+i!_vS_JphtO>K`r_cA6 zqGYOALCtY^@$9lQm@vemj7nE=fO879hZN7hf6~kd8l-GsDJjCB$6@c2P&CEJTE&{5 z-+rnBsQKbA9xVIKueW#Z2iS{d?I2?XdaT&K-R$xF#l!1mog9D@)xe4@8Rs27ubUcq zb;xR!L%r_4e!Sb3r{6s^M~!woCLObvmXdn7?S$LtGeZhMbQb35Oi3H28mRj45C5>g z{9*sc_V50?%R_$sba~way8H9HZ~NPK_mBSO-#%ZDnP%FE(ZXZhBM^Ivb{0TQQWG>r zD^h#9O;QwzWm*A-_B5-*;>>f05LRXd85k{UZgqeA>ErvaciW}`3!-7uq`?1_guCE* z_kIV`HUOYk*t~lA%n<+t1!y?36a}0MYHe=Wd+RW0=7<0Q&riR6DZ9H>1nq#skr4>b zNdgp200eBp0hd(!5HXTH_WNzxNv1_bnn=ghcW>T2eb9Q+dcfKvTs|WAtVg)(yIuIKavleya!X{_)r2qt}1);%aG1W>NjTc&^2;V4+W9^m7`MGo2?A zoaqTr2`SP}&&3|hGOaEGwgON@HO}ZGB#qO}*LNR3-kmm;Ts8n0v`L_X{~2ah67%-M zaAlDWnmJh!haaBy%Go&6lvA`5f_G@svDMkfSk;gs0n!9!eS7<@%y7Wzkd5XZ0a~gQ z0LJJt%mBbhnz4WK>fSP@j?1p^Q_e6UnxvF+S0yAyTbG)V zF3ucLu@c)h)#3SXfB$8BaI^eR|HGp_{qE-8#n_LJpPJtN;`P&K`N!XW)Oy%q7;d?; zZ=sZpoH@veVxVGjq+Ov*}{x za#NGFC@p3YnFz{dQ{Ai(Odtuvv}wp=%qc|7c{dyxpe-v*gTY+RpMLS}r)SHio4c7g zk>yrZH0Jf|31s(AFp1$zeCBw*RpWKNdnoUG|8sy%!4u(b*qK!AuzQ{F=R z$|j=FCnp)H$?eluSA~L9`|mv4;q>-;JERgMpqd7vqmFNuhwgQYwJ!hZUoXC!&wlh|k4}0foMTt0HnVLG zLT2SD&Dfxm@@?kBw#Khi=g0;M$J*Sx2@C|gaN@J5^$m|PhY+K{C=xX?{8;T zDg+dX^!3dqFg8jsQRy-2`HP3k5^#f~G{Zn4CZy8z6gHRJF8v)X7 zFw_QI9d_r?yVswJGeO7G^DN!2QAAkXnT?SPTVko>oA>Lwo`3Jw|BYE<-xhRty`S!c1wk_7@Xlkg8~jY7!ZnvO?403(@M>8 zJQWh3{{9#5>-;Z%`r;g(Z*LyJ>I%pxH@>3^-83bF1lXo5%#Rh%f}2c9^ky@ba@S&hpK#-xWqjL_y&?P5kI znjE{e&vtixvk&j~ZRfi>vEMr+rVbV5HhV!Z(-do|KvZysW-nAg(`$^F+q#`{j<5gj z59{ua|LVI3M7?{xe~3KOrty#e_4tEt%^!UJFc%syfe65(&nBA=AXc-cQjQ9#%%an^ z$VdU5QZtZivqy7IA2Nrgz5Y8z`^ciib#i9=6Xk&Mdf2Di)K-4wxUMW^UAor|*oKvGZ* z1-n%vXTV4ay+o8ebs9CQ8mV*uFad=Df~}oD`TWKC`~Uv72sqUcWS0>X37ZqPf$8CU zpFP_rn_+jUP!aM3SY9QS1}1fW@$g=6qwmMA04fQ&F)NMAkx{|XV;a3;>eJQ9c19C~ zU2oZAK}SQ2+Qo>Jr*iu058vD!PyEj3PoBoj*I(Zb$Km<6E;45?g0^bY$n@;l5Ojdk zzRGqw9MG+G8*zToR7=cOiWy=nI7iQs#F~whWPzS6vy?|_qVDc=tgrvu?~eJypZ@TW z6PLd^K0beNKF2!$@gF{Y{v*Hm;q&A!D+iHqbxL%#7>FqaBn4&;*#wA+!lGEafZaqZ zRv-w3S=N{nb*xX9uiszataC_)00S@x7&I9qd+!Zt)Xm-G|6yAe1zoK(jZgpt6@>Pm zJ?qS^$3&`9Yz0F~swTA*+*(C|3^=Qq6_p=;e+4+93WUJ`g&j?zSO8%F1_7i^h=7?A zi5XK^*SXC|%1o!>RDy8A=OJ%5v2vtL+X8dt^jwnT)_9EEx${-IRk)Ma+motw$l|eg^|St*t{ zefs*{?eT=qzy18)ZoT~KYMA?n-#Ke?1}A207!?iMP6UG_6RIs}oJq^9?YPf-jGf9Y zYWA^AfG~6R%HgyLQGLc{I?Gk%%^2(5@$Stpe|w70|J{$znzp-Ne|7JB&ku8KQlb}c=a?&6w3<|JoTZh1)9Zpuz z2oZ)^xmym84#%4r6={RC!>AsHAqoY9!aW`ShZQGU`Buu z#L}>2vMsx_2LWaUn%W4?L@12G!VJtjCDeP_!7yvtI0JL3sqWdzdH3$k^=Z|!@4md) z#ohal)lT{J>3*^LMqz_6z?^y4%i3l3X1Lw;jbtNS+tzpYzz&5A04NY}_tp_bqs@#V zhg~_S%2BhORP*lY>YHD@KKaE@e)J^kbou&s=ih&_8>j2}+h2aT`0?59kG^}cYqYwZ zM4<>`Qn4-;D`nOs1wuuZma>Ai8yOYNw!EUvnGmtvee?A<@2|IY7G&6f2onH6I5OA| zyG*iJl;+&FllZ?$RaLMo+6>WjC`{28FCLvK02siGd7MJt5? z2Zvc`RjQ~QlslAy0*<+qT5JiidQCFw?$g^3+euHq{qoTgHQr{fb@u2`3C(F)C`2%H z?8nxY78KO6Dd})a<8<2feZ*2Z8>a(oRawi{SQv{^l~Hc1Gdxh1r?GMj-F&=#{m9S}-`_8AcSA z2}-?kM1#Ys#Y$5O0m?CFSu{hi?FbtbNitenI-yVmHzOGer%+A{1r$t_Haz?6!IDWp z&A=!qChdv2jd?R_MD_-f4mgkjr;(U>|J$GK(h~+MM@~fEzyypquRO@ z!4XkJO#0oM52rCNe(>4Fy~Ox5&grLP9 zc4@+Lo0)5JHo3Pk1IXD6rU)1lvkh*=I&tu7y}Ww+t2ZZH{K=2+`*`{3hdwb!{um0in z^&B<@8-PK>1j*Ky7Fd#VHb!aIm=nXcVE!~gz-bsHjAolKQYr3VJn0d%9VX-?3{;y0 z#~2Lejx49#%uK~h2%p}35|oDME)WLYA_>`65U_GYO9>^w001BWNklX?-W`rV05F|S(%F4qRpY2j!m%$>0p}N*L8dMCEi{e|>#- zH_yNS*^{)|=b)Zce~93&^@1M;UHW12cC{QZK zoFr3XJB0`P8a1uyx$?AaI9cmh?{2SdPsAVn<%_et{>T6K54RgEg92ax0FjwX?pyS2 zf~Xpvm?hDI1(^JIWWXS-2s<1BB@If%?%DGm21rm5<)}6l>LO;96^F1Xrh>uIQmLxr z?=FR;XgbQ_fVw-<;v`{1L_!HQfmta*DUdKjfWvLiiSn$|HboI>SRixhy9|Vqqj9te zf{K8gD7YwF07_e)zj(1@C)=>iEvE{1W4j%jMl^xZB8XrBbg(gE0%^swS7&{&U`&QM z$+k6w;6%43X6_QB&8)BqSK5TqChd?vy!$k+x5F3Tzkk2eGA%AKhWiF?u{niMErp$% z66&Ni?QID-dag(9cYCDUbc?Oa3~~0**lS=nku%}h%OFmO=`pl=oAdVS)0;299+kVD zrd+a30|p%i4uAF^zkksVG0tVFxuQ>E1~#hpTQ~(NkGo^6bDiU;S=%<&+f&`#X}h@{ zQ!VgEfBoHi?VtYc-#%>xKs0QcAfxqN&&Dukt)4>@vSAoDV8GyC1;bF#tYq3jF-(&N z`->-?P>M~}*T7sVO$Bl@>*UxihFLp^VI%>guD||hG14;VibhfFnVOV>v9$sLCX9J?r|J;_T~Hk{(BetyT5yVniBz7 z01)iGwLQ~_;aO`9TLNqt01Oj0&A&0T2s_LG2!zqL02qb#{OMjfSrKUwArk;AE5>P5 z^tLow2?9*!gaKc@{wR>dOv400n6acdSX8qdMJHi~L4XRAs#J(616c0wW`UA#w{=jA zrRIvh^uq-jqa6;ogT`DXnKTQ;lp)wI7QAZt{ONg%APmhq&C7Y5t7sw`Is!&eT|u&% zx`P3k(0uUu!+ja;bf^++I5A=5yO zyxO*!YKL4~8>cC%KmX!?{L#a!|MCxSPc{G>jOfd42g@?#}ib9UdspRm8H_&5hcw-#3X;=#dbVorvXKum{wWdv1< zwUb1})~FDQ4V5wIUQv#|-FtwxQp4XwxHhc~Z3tYw=d06>5N(gXnJ ztN-+uFD?R&ZDe&|X1Psn6(!BXrm!YGrMkO%|BEj#k7G!V*dL~PZRNo7@X4Ql@8RWt z`Q?XM6wP4YExiLZG-|`N1qwD`8m0jg07xYGvrqtLfHUEM1115)q}t1;dmxk>(NF;~ zCz;cBZClRVvv&d&2)7b*CUpJvr3F&00ya=B9Bd&FBV{nGpiClV1Yn2ITv6&S&ldL& zZDYXVtkj0=?e_XOa<_l9%MK7rr(&zAQ9wx)r5Tu6QN@Zm6i=RBL};5DbFH#Pn_?hj z2NQOeLlKS?u+Rxv6X)N0)WS|~ic$t=R+w%~Bnwio4ROG;#Z({+P(+S~9Kqwq54Y=a zKK{|OM`z2LYbK(a00TNzNzqhpGpeS_$C`7U^R$jRw>ey@{@{=P^zqWHRt;JP_>6@?Kema(Hlb|WUGz}mCg7N(4fBotKBf6|KkAV^4xxd>L zg*HK+V60=@y!-0ww^zsG?OF!%-l0acY8e$+{^TbQ-~ZpgyqU6rSa$o2^4z9NMmYqC z25bNT5HLwJN;ZFnG%Eo*ARwGFLsXl@^8DFSO-6*Mka}fKPCzv`fW*GJI}|~O#b_S9 z`syRaCYq)IXUs$k!ldo6XuzQW6%}-n#SjfvgW*+#@5is;*3Nyql)>eF@{_kaBI;vpkuP8poR>A<#1t~oXD zJl9&cS;rbRM@`6rqatzktv~tR{T5~=+~hujVwOl?Bq6cHyNtQb5mTFL%=yi4e)Db& z!2$@vfB}OBNC3eHKl@+4b(YOWjfM)dF^PWKdCOUC&_2Zp+s$wP_3L9%cbC_*h`e|- zZz5V_71sFEpM3WA|N6I^5)9@p!4Fc}1_Xc|TSoJ9u^M%Y;aAW+SA z(Ea7pT`@~&<){cZ_bpaa8tyPhzbmN zT`D4WJ3!;z`}J~e=ihtz?0nJ8RkZ&n^wr!nb8)K}SHxE~#U>kIEA|oRE+2=p{ ze0QFl0|6$@8Eqoe6xxzG#S-nZOF389*tXlZzy7yRnh0@azP+i#z5V5z_csN^#TWDasqHXps`AG_fARMJ`ORGw zXhZ-IVAzHMz%&hmh(H7Zx)6$;PXhl#kus7%m~wY0l`qbiZqzvHh_wP-T2Ib6R8*sD)f>M!opaLv1MSxuaXNz4dd$w`);kI`79#2L0 zgflmetLvJ@opJW;@xEH2A}c_GN7IACRx>>0ZKO=XU>PwNHg-1^#UgA2Ee%j)80A2i zMp7LaFvQ{I*=Gl$VdR`Kdxgwk&IA`Tr^2JZ@&EU(`l=sX&SU?zyJV(Ah`R%&wlu9+4mkwG$@>g9cga}&Q|O6 zINg5vufM;k{fj65@YS17tHFy`_4Z`Igq`30>1SX4f3J@=2*addP^JNxCJ;gZ1ONcR zMzTyvoj-?ZIzWYKHCPcA6QcX`mkV6I5V28#aB?hjV4HE4QMs)HXFJl0*%&aReEZeq z41;OdBtd}cLC~mjq(Dc=2t}e*v|Y5m+u@*2*5%h9w&lrvsd?mS2c>m+wUy&u8;2(k z_7SHNBM=PBRau&tmdk5Z*$xVj1EC#~M9ulr;NID7)Il5-jm{9YE;=1|j!@vH^r>*WkzkmAh z>BH5v9J_lu4G?4!e)1Rhe*NWL$sw2u7=R6ffQbxJFl+-PTZ<~zi zq+lmd5$(y#y+Fv#5QQL@HRPeK_0~s@ZRCf;4}}baoIyB>VKyX9$jYi# z4be_5GJ5Q@_oZbEgfm+C^4(X*<=L6woP!+E<Ds0FUr?OPTq z9m>>5@$UB1wpxu{2)4snTWZm~dUtbL?`|CAkUb|AGHlT4tOhm*kU^5^=AANwEieIJxJ`^1lNE-~G8bX;=La+@4T9AkAeV;ASBAoyh(TMrY z?>{Wh?oCl;(n{Cu_1sib@$&P>3x_vr8BL*%fDCLZORCerM2tW&O6kZ*Ds9uS5snEp z(gCsAW<&uDkMI%>hr<#ce}2yH<%A<{Iqh8)Td>!8?I>!Y38y^0-ksLFJM8v}G-5g1 zB@BG}aCJJ}-QEp3EW)86nh`_>5J3haoMg0C%pQzrV5Gq=w%yYg4-T_#zqx(!qU}x# zlt7F&JKCnEsDRoUq^2sixz?>ey#Ms(^{3-e1vm@?FlfRAK+wz=|KYE`b#I3@!#h<_ zp_sj>R@_YAwG(bWeEH8`9m~tvxo%Ij9`BCga=vRVg;_P%_W8rF-fjW_2qb_Anb3eWQEde)Txv;YoL~YDqFx7EVQu8gosurXXfHg6OmeRtR)p5;T$s(_yEg+-yT! zK+Nn}7=G}rdp*lJPR9z65q3H!=rQ|w+k-^d%5B`OckA7@oGlp&9rDnlIPTtF-mUZc zv=&H{iF6oKbanCBK2t=ftAcRMG@QcR zmoQt+>C$O#>*c3UZ@>C*v(ADEn5F>(pb3B=`1r5>$svn!y+0TS)ei*umDFdb^Mm6aN02U9J|#qcho!eoG~Aa8$v zSqdO$I!tCMNoPkRiJpDg(YXY|&FH9I2(?oLIMIE0N6y#di3%^ z?p(_lh2osjCZ8y7adJF=dY~F|a>kga_3m`q^4>nnIE!riRN?mR&E0l7j;-imC4)qe zG*j7NHZn3Jl*m?IMi-DF6tfqCXOCW8?6KWmzq|kH9s|+W2Exvf08JPs%P3kJb<%co z`{DBPcOR~fQ>JMX6im|u2$*!7|LK4J{N>8UUjNIluU0t7 z!>+gO_H^}W>q~e_wgKnE!}Up~L*j0Kw(q6un-%Q4Fak-`DlTW|`<|&_%s@nDxC{In z!<5rT01S>KXaG(#p1kUcP#9FYvZ%z2BCTOsnbDe4P=r#wqFUYk;d&MWVUvMKQ9C%! zypKJ5M?;$n0W{8J4)!q6M5iOIIp6(%Wx zYj_k|FDYNVJa6cOF*wS1V>{g*_lL8#Idk#uu-J8Z{oy#q=G77dm5eZmWP+Kg?8tz7 z1w=Y%f@Ih;5DZ6u^5p4xj*oXA+82)wn5`V{>#h*$62Q?%BiB|XOs|{PySwX8@4tR~ zb+^g@4VW+h(4Y|xk>}s}>0iElxFbPlL%CaKud+EQZ$r12zxn6?emw{vc8~8L?mm3F z`nVdT!Jrjp=d`IX9QQ5`EitY?tuxxbM>3QVWhbBdGz7p;dzIPCWFJAU^AC7v?L~`rDBd+ zS1=Y>YcOmJh$v?T3qgmHRxm^~g)Li+h^7Dyly|S5EKTVVE?vL-u*KAAYrDQft?k*X z`yJ}j$89|c%=NN#SemwmzJIVZv&J`<*Q17JnV{211ewW3L}ONhZW7TTQ9&@Fu8~ZE z%Kh2%m*<7sk8k<;(z|}{qxqvXf{zxnIL8by~U=R)5q7p_|1ncoObLkE>73CHMBvM@tvy)#X~u!x90q@6RHaog0H0Mh65VW+=eFM#|xwBxt70h|w&E!S>+QnTZ%c zLlJ4CRcx(XF*Ye;pP9pbcBOf%N7`6^n#P z3!#dffu=AC2gy>3g2|qQs6^G_*|UX6TjBcn{`D4p-W~hHXwX)Vzy0XoRBdlR-3`RW zGH=%EOE0m-`J;;@=lbsbQDY7%3SE(b1Twmtx*BFio6y_|pu^5mb5Q9J9jMqHo;|-f zY{z%6*YCW%7l9Pf%1(vVY`~SQHo2jSr%~JO@%HND+c)oS)+v~PL4zh}fN2_zM}PeI z#UFq7-XVH*C`_Z3(x|q@#H{sn`{Dhse)Vya5G=4^>gFggqxTlsa=$-3*#G`F*Djlg zd-s4!iw6qg6*_z~~cUWSOvG{c3uGEd zQ6{aZd8D0jh=fQlTR;&?RpmH7V&f)8d}mgG9jYh_k-_cq5~f1jHH1{6TZ1toFd zs0jUZhC*Uc0|5{K4hRtA=e_HWH+y$?Rp!$7Sx1@9cN-%InlWJvb3>+^5#C%7Xi}P6 zpo;J`p|CA?52#C0K6uxh$Kz8@zOH3^wi&?MJ7s1dGAc?^K@_mHOiLsi8O=?>l?AXlYj!W5RqPQZ;-0wrX5^J)WYS)z75efacb-sfdJaVT1S`R&=rn(^@I z%WZNq?P6Ipy3zZ<{n=Tia=E^qGiFnEjmgRiC7lJPC)cHENW#GNmyroGXg07KF!wKM%bXKm6*yeLhAI#!Px&mynH9t;4iAxi~#N z-E#fAzqwu#5V_k-Kx^|H@M*IdHg#8ODQOgqf+s}_cqFqVgFmeT#BdP=m?{)XB3&ZS z-%sX&Kus{=Vb&;uN@bUnW1)F^nmjE*cCh_$onl6@{p-_h5)Q%;C8}hgKsZF1R0A-< zEZmSD7FA{~j!!KXdAfVfJ?7`559JBv#P`Ui(#ioSi z)9*h#M)mEsENglxVJLNYd0v(3w^xT~On6Z4CXfM`kujuo4GCD64PEK%j-&{fb<(0( zFPP51I;rt||J$eYKX^OQnJUO4$e^Mq%~LTh-kWFV+FIPl?epR4ch~pNJpcdz37Xep zwU+Ar`LN{nr#~$>_|bRggT@jpT`0?#-o}W|$UHtRH&>s2`P=)ssSF0Vt#g;{&Q32+ z&M!Bcu~e3P_0}LcXszAzIDVfrd68}SXGkFjIXuW`h zi1ZZGFW#IgOQL%k5LIEago=_%DOKE(E;_mgdUzV)yFxRi#0>MBvMfUkAdMXXmqB$O)`Y`1)cBh+ZR3?f-V`QZ|C8q&gxz46c6{(kQ z@C2=!XKP9(gJ~*aG6uEcVJT16&Te5aQ3NJn4t?h%`M?6%u&#_C5aF6E|$D20Gg zmL*zbmROU9-mM^6!#(Nqfk}3Pk|ANTED5tPhy+3uO9$Lg1OtV+ym+@I*~YaX*B^fW ztmSdtjEdYiu>bbW)}3qr`1{A^!}er~!|I$EJ{dQcLm`fjpAV4;2h+S;T7eNAmMU$G zG%}T7=_1UKW%bhG36PdzfuFp&82tX~H@EexAHVgjXh=`^GQj0(FsZI&CR$IgvUImq?yIFWMbEqC z>Hg;P?|%1iKpl7EZt!|G9WURX?S`7xo|)Ckak1T5ti1~MrmV1D-YG(0Q_vR9m4!2ky3>HHku{8Ipnxo4azzfHF$|U6JFK|1Di?aLe!|)ER2#-mXPHTkz<#OGRW%m{`5popMG_Rmp}RXq;g0zSaYZi z?1O-)4TdgT7&F^*JU=}hzI^%N_Hpe3K;LhsiS=YNNNie`WsUSVKm8xxla?eEe!oPrrcN&p0d75ZUPjEHR&))5s#!NFIU|E8$^v*!8 zNOD7>SD2vE4FVPO^;HW5BmDgR`<-{d(;`_V(ZVg^*5x)A(GgC?*qjuTNKXNMetZtl zZH$b&`!;UpN89c9Q(_FSLW!}0O@^Y=Fob0!?D+r1ClVK{NBh~?qt7^&UA z`-_)*KK|;{$+utcIb?M~S+dNGx@cXV=Jl}5x%x1aVqouUQ`~MZ27`Obi)8KYKmN^^ z6)6fyfRLEnpN;#|i-A%}3o=WXfPi}@l%5H6{y9+uAW4BB(qIw|$)xSy?da)XiiFa- z6JRnuLM?LiL84hCYpA3oasF~McOlt$`r?`Ri_)ByOm~G5 z2^pZ$3Jkqws`i2evKhOs538e;5?XJcIQjE3OlRA{q)4H097|Qt=n{Q!6k7D9FP-Mu z!wcvY?8fymz;1FX1-UGlNr$B|DYH~JS|*(_$vqiLp1nLB7*es!cKiLu=kUQSJF@sV zzI=b`X&JY_yI&|jxs2x{eCw97vU{-~E%W)yLq-55DosS`MNWh%VWQ-zf{>^Rc96-W zNYOJXx;8If@BRAsziiX=^1Ih3#cD>Sk_JZ(t#-J2X$+g((s(?uE%Ip8@A8valg_O}@setxp+w=2bBf>J}QJIX|iaG_LTdF7dm!btJ z56==ziZBpSkO0r#o+2&i;YnEs2@6XLsxQ$rj=O;QYGl%2tu*L(_e1MQX1C4T?_Qcn z$y<#gS%agbAsqojH?uJH0T2+SgXr1XlIYK}$!e3HZjNKcBZt%dwkU}ugE{O*n{B2R z+6WsQzBmtUaX6J$8{I3iM}NqmrKi9s$|6J>X_jGxOYAT}@N_w8-YNCd*Dp{S2xBbI zKYVv7I;99A;@?Cq-^fTz3b76BttE-*ZcNQBG+ zX%NjA@?@48nUcv;@^Ap&17&yqW}7#^{T(mQh8LHwZP%+K%Uo;kgSA2=#ifLGt`zkQmmzWvibd3Rpy`2DZe@BZMzt95VI zqAOP{4YRU=#Y|8)^h0|*#)4;lySJPBQVeZg=l1ZgzkbSukj66BNIswl-oCY71Fi2tD(3``lBlm|_qDP3Ur?%fGt2~U#+C=m==T4Y-@&C3=tCP|qn zi4E<`)q%l=ZqtjeUr$B|Fv(zo2$B*RE`~Rk_n|`!WLju#@pSvVIj^h6Dr+`BJZh}Z z3%8e>!A*u-TOWK|*mGfz9EKR&Eys*@=vJ&GN(S7mD{Q%M6etS<0Y?dT)-ZT?O9AQK z(`_ZQ)LsnT#n`>P*kmdymHqg;FLQLOLo;_9wj8!zO3||1KHMKyJ0FfSq72p@IbMAA z)dnM;?ymaU=t|JS1$i1h0k(of(g9j0ST=P&2BCxlNmCl&t(Nnbm;J-9K5X7^+WCu@ zyJ4u!N)F9Qz?w&u(q)TvwCHQL_4(=k?!ynaPl8ca+%{kRPw#6#Mp(xw_4#?#cy@YrzNxY-0VW9w5|YswxkRqLCI3P$Fij-VL{OG6veS^Z{bpM; z$fQG_-DFHhB*A44n#Peu@@_!F1$onQ^Kn%oLfV{s^L|USsFFOpn=G@WNQgp)xd5SC zbnDUAqjI+A`{nfAuB985%X+ue0r$Dto(+|iHPe=8n_)y(k0r&?I2LvTdt1Gj+?Bn^ zlc7F89X$plWD;bsq%?FFij)A8js#M!qDo5XQT*-M=si7{4ae*IIgxch(Cc_&Yh_{e zXty74p19d^?cgvRSt_`F^VMZd&9~RbY%)Sdc9%R1Ak$JzFo)(erEzyQ-aK|#$RG>K z1SZVt_T}Z_hu=Kx-yQhk#r|?Eh5M3FR(a5+)X-6KC7Sw-Y){AM`|Io9-W~`tNiP@Q z{qb9Choxa{yP-e)^Ox06{^Y+O{ME~$@agIwznuN#>?9GnqRi}?d$-8dPWHL7$I!WI zJS<0)hntu67k~T1n#>M_6cj)~Hl3VQaDIMVdpSEje^DkUI%%D05W4lqb#2+#$QDZe z8BxSQ5mPYBgn$Q`5v>(k|{ z(GXxi9=rOgSb|Bq?@z6)9k5C+*B@`5r?XlzqEypi-Rwda-(KvXxc#v7@PG))A(;tD zCLo$AOlDyLoPFz8w`-#XG8(MxC8jCya>Foq|P@vX#o1 z)tB{ox&89%?;ir`uB45x{`9+D+|TQrYnP7C4-2<%|Cb+?2VS0)SdPE^`-j)xzTH|) zp$Y9cQ|@M&8k}wE4dlq{`*HXD;k^9qKYdv{1h@$j1yJDTVmoBC=i?!;IeGbNJ0>AH z5(xF)TWc*_^vs|_;GdHs34s9FJxPiH7zW$Cy->|CrJ-~fn1Cg_LQ8t}0OcryES2;O zvxV#R)iV`}fbTE9e(MeZDoI)f%n}l09m&pyzP3XgmY&I)j%w(4^ZENt*ouLN!|_mt za#*-O878Ae9ej!|4xz>wErcxxm83;Wd4U&-qC}x5kBBQ?`|a<3ICP5wmx{mmiA$nV%mXL2>y*;ZCGZmcsqP}R2E=59&gaQ1^bPxg|13)Gz5fBJ*`_+l3 zE7MijjX}avePK)1Do4)9VJs%385O3;`R?-}EHhI+oqhdv89QZR@T8c6kP6R+7IQnc zrLURR2{Uu6K-+)(tDsM zl2VvVK$`f!cen*8h=dW!EI^ni6E?oTGzpngS527|X5AyB3vbovE3|(`xwi5 zz;w1B>2*+MaYH!z3Rkp%m%%*)ZRHSQ9JKU0RJmJP=<#vQrW%lm4hTj;X*4Bb5S0Z% zcv!N?JOLT)^v!PMQd)eu?S3-Vd7cw6uUpmOcrany_T}Nrmt(Qh(i)3pJ{@_!FQ?zU zn>eobSGNnv?jnNl1VsR_MpAU8%zJe0zWVm~@oE*->7peBE+#X!wXNEI{enS53)A~= z&#F~~(V1C-ut>AsvgWdCJ+{ZI&%gQI)7;7Gq``~TM6JG=rqh%0WIt}L0ulg;Oqc|Ke@;sSAVV5VRDvLp4hhp&7jlLJ zlFPz{^yU#fwz?gf_uh}g#vEkIRWhjPcc1T}R3uPNUVV2_l!gG9issS#@$q3^)-H59 zOEJhmk`8x{X}v!Uuf_&6AKLSQ?#G$u=et^2YSU^~<%txXvjh$t>dM}lpafw(ioxrU zZo#L==LBU5ljsbQ?6o-xyCKX3LLfXk3p`b$N`Ct4!b@bt?bTdZZOrEo;I=7aS(XkT zE1z%gK1bNr(+r-^kE@SmeDOzLox1S!PluAb?5`NKyca#dUR3b1ReB^N*PA|?_j4>A&1IfA?p9beW%@>sfic(&6^t!#JL9&o}L7Kfh}Mh_d1+b?}0e z)MX(sAxH!yrc02Cf92&^G&bZ$|)2&hva&+L~M==P;&mEdz%#h@x3Kjm2Qax}JD8CKVZa zP_6NV&C9dV3w@mhx=+YdAGpe^Cc=V&>^X<}=-wz`GNWZ+zMCaSQUr=hBq>;Cx+iFf zgu^;Sm}muH%4oYcmy1=;;vKuC=6{^_^B6`h$td#WU zW&U`4`Fdw)zy5n&oUo5uYxdaRT>5V5uH`Ap&0qh+RmY3}?N2ZA?&EMdcPhzo~s)pO1ZQnH0=ItIQ$+6qc!4 z%1vfj)7@!~cGSt#Vd?99px5PCcCSuh#k)rrYcWqWs$@3|&Lq+a&2bP%7sJ@grU+f- z>EQaiB zFnEwa&e4|d`>XST>o5LkJga5yCtB97*I$*2EZHAcYybS$-_P;lfBlo!%i~9!pX%<` z6Ftg)KWv8M-~Rk&4G^66MIA^0$)E@Uk_iAP$iy@WU=kF`ze-DbSjvGgm4FGf1Z8Q{ zi`OIt?Is!EvT?xhdv=5CD8 z@n%L6$&yUaK&lsHnpvictI?5ZFl9-jWet*?UcR14ab2$NySBU0?ha`t#=z~AUSS0( z_7v$mlRNshl|(hUqWFCHc+-1HPArYnep=alX=M!ov6SXvOAEASR_jm)!4L!#h6E#e zuE%G={N>9thxbUfi=TY6HP!(g&Sp6!sf0DPWtgMqI_E3|(o(IF9*v~G}F0F=gem+&M~ zrAb7IC}G`5w%!_%)qO1>9bV`OOCle?e+WicDlk-bC`3Y{K)E6mfKV(^O5Hls1el7d zij}ljwJ*p4YrpTTIB0vhXTqwX78TPFzIrAsQ3OjRWi{D*XLa|mGF$azNQdjAcTh?* z>(2B*7J+~OW6IN7wQfWh)65`}xqb6?%B-2ko4ZxRw9#?x1%+m17zc|wh`q`|>jhCo zQ&g15vAcCcpFi9`k*J#sS=N)uXe|`6dSAGRwS`9OowdzG9D3p(U5t;*9X11}akm+V z`uMZ|a=j)X*=C0jQl!8n1OoC-0DxdX%0xf}MDec#fHHxUV2F|?z$6)R_v*Y_w~&UA zr!13Mk`N=;H9fJ(wz?;$v4BiEvmd{|i&R+fARU978LCf>X!jIL_>ly z$kQ9m!-cg#o|cq#5+x;o#Taf`U%%TJGOXWUEmWPV6m_Hx@X_Q(L(&Kjrl~Ask*7f> zOZSA@htJ2Lrk7{6So%H(m{gE~K5JcD%f2Fd1ao&9Xwd{{ z#QONGQd=7?&zMpeiaBhoPGtAq&0KJ;a5IufKmr zXnc9WX}d(JBr9pm2#Xa^4~tBMRgG!gWgTf)m*@M(b!qH}wfV@hd-1AJ2#+#c{_)vq zF^{y=fodi?j^TT1` zc3Pie_wMDU);K(6pRa!S{;GfV)wy}M?S8aRIZkwAnN;S#|EDi&QUb=4iq;_}1qu)a zijqmNBp@TWoljHyq-fKEN zNw_ZGoYCNoYY(?-ZLlgZhgD-3M9Y9tj_wQx1$yb0F$CysNL>H$um*?o%Q{SVIZcIG zHk`d%!H_*yPnikLD5mw^t<#1mhvoL_87%X|v)5rPo73}xObRTg@86$}v4*uglvzkL zS)}a15SqE*gE6X`XEJkaFjq3uVe63HtV^}^8JMHB`}=(S;mvtyIb@X^dmiSOZ?~20 z%Y1x(ynpy|eEZ{9`$rQhYY2$lEGvDWQxckC z$YR-gKm|Z}r<<~G>R_tDZgIR>GGSu(B*-9gX!K@Idaj;XAlBkv@voM{%Ijb(z zNb6Z?aEb8nrJ5k~W?5iC13N8^WR0j6Wf_*@3_O0gI>_weafm5~BX8ZU zhL_bNEj(!uvql>hbEcA5)U>wFHE+M%e0ukyp+^*@XWg~yyVEyY_S^gQ&~m-MdiQTH z$B5o)#fRAMSjTbar9A)Vzx%udAX&XQ)nV-+GEIa?rAPn}A%RSa1el<}ze*ASk|t2{ zG*OTzOu+c&-6+OV!xSDVFbc$E3T^>e)Gnh+m_$*fbj=^Hx};1b%_jiKB%l!Cg$xVN zB9%abUX4kyggLO!rp1F@_A!K3{!&me4O1tDkj4J&Wb$Us*w{6D&q5XRA)ToLnRf@0%CV=1;9xJ^f`)K;hbUY9IeJ4! zTR;Bt>h)JGlD4j)Ue5RP$D5OvTc00$qvHPi(?7l_IIb8BS2*A8t!xX)`@j3y#}*QV z;bMa}cK`wzq=bYcO#sp?m`0@$N(aQhNQXgExEWJ0h!h;*%p6|7+ptH8WfdrPRV^MK z43bHT&69E^m`b?neuLx3>qRMRLb`1&CAd;zGFeFm)9Ef`@ASb8AVpJE(I6E|w{;l& z@u*|(etLRpX{i>I3063O5TF=Jg6M0_7^pQfx)(sroZC`BDi5~@B?#}<4DzHLFg4b6 z0BI6s9->)@g#;1~ZQi~aT;d0a4!P&tGn% zl)?9PsQb}`tnyNedv^#RY*~7Zo}FSNwSZ>59JMX$m*@19(J)G;beNM?F7~E0{r?2P zY13R+b|vPu);{On%zOs`1PD$wtE6ZO97t4_~&wx;>>XygJrvym+##t;LLS{}2E4{gyC|I36&j1;PZ9 zBmh^qNE!xVB?-c?%s*IQM$urDqYM!Q4JOe({dA3i^ut-R1`nunf*IE6B?bZ0WB?_5 zwzi&5wSWI+r$=eRYFB+{*#S?IB%75WqZ%7SVaiPmL6KPCum~EKO_M2DE}12mC;_JV_X+okED{X` zjj(7L0E_+k=O>}Erkxck6{cB4z>2xrFqq2_sEfl2^%`%!AD*Q$Y97`V12S!*)F{A) zWH+>&!_gzAY>|iti9uBFsiPk@UGC@d(d|Ld*;}Y18YIU|!0UB?f9dLsBD3J_Qo@nB zGg|eP%^D9sJr1!B=deK2q+?P>1z@ynD3Wx;9+QwjLayc6FHU(VL>UdE8*Ow{xMT_l zlNEa_g=xTAYN$pp*>IgyuV4M^pUzJ{{Uo-2b@TjmYYra!QS@9My=9J(Rk`jm=K>MK zRxTrUX}X3jUSzS}?ZqbJ>XT%O$=$a;MB!}>|9b56`}9)JJY4Yi>T$JggipRTJd zdp`V^e|S9-whE*TMHf`C2&1A1uxJY`3#JJp%HSUiLCF#U&@4BYZj(jS>BXlv8ia=d zW!s~enwn}>c2q;5XgW&80s=DkSFbl@v5+v1CsZkgl*KRt)CiifBDbu@P!Uw2A%bia z97^?mKjZ$+SI?gH5md#Ae*gd=07*naRL-$=Gx}UZ4d=+Y-#*p5^Pw$9%b0+-In|y7^1_N0xL*OwvumtaZ7eYl4WWhX1FLb2708B z9YZ~tXp^B4yCwl6Mr8KG{^sld@u&0C-+kh3xw*Q!7A|KmFRpI-4h~oD9Mq<8HNaqu zsDy}^?3aK4=l9QVucNBDfY`?&r4efV`1I3fOAkVjY&5lU^VvHw+-0~xYwt!jvjU^3 zsv`E#Y$I~YKG&Rc8};GU`OW)JujgzyEK6UaS8({l4?ne+S2Y6LH*bIU$xSrRxt`vB zc=^0{Kz{W7KmONOqu>Gn&_J}-Y|?-UgEoW`kTO63EU_T|9YC`Lnk<5WlxdbqMt||i z6S`!_4jPUk%9B*GarOepNdYp<>=VJ zQZs`R+++CU?Xq3A_~_X!W_ZpVpyP0=7L}E?9Y0z=Y|$Fz0$`0iV!|)#DhakAC^_sgq7PjWU!%OB7L2h$*5Jdib~h z=grk8&#xH-%PJQM(XCxS{q)nmw(PPtw+Z%{4I4*>HG__tw2*=kQx;vYgDLFGF1Il~ zeLv@U*8TT<|N6x*A{uQTj6@QyeEa(5rAjrwe)ZcgZrb4?sl(mdPj62!r<3>p{Et8E zRkQ)q02sYNDAE9tGN6(T6=awtTTt8^0WL?H?qK%xQ^ z2@tSBqa~n%N|WdJuO6An3Bc?(OdFshL6T8XC6W=C6CgxJ1rczwf}p%0?RXs9p3BpZ zPY&$B9GFdgX&_NGNBijN{b19a>{&veCYp?p_mGOnxxKq%s)PXTHUUKSJ-~8DAyP$E zn}LE*GFZV1;QEv2ElOraty{@V`W=2^69J3E*LzWd~ICt!LjIq~VZ+~1L|8(_NPdZ1< zG7#z^!uQ|4e|FPQ_0@O3{Kf6T7|Fxkn@^ssR8;f$@K68s?FF!lMH2wo77~QXlxYA4 z4VZ)p7@%qK?@CdSV}b-CfiRJcb`i^y&z{moN^qIwmK8H5l|chCAizpgBf(uj?eE?^ zlvCxj)lZ`7sp=>bVw8)bJIpi~pu1(E(Y;6_BUD>%1GQ^?`2fv8GQYG1nD#D#h{cgn z931`W)#WaP#DIYnOiHK*B@xvsOZ$)K1Zm18fH0~V4$x$w7L1mRq7k0mBuj+=u3kJl z2-%pTTn*R{Lm*N#Im+0j8jt7QdG|Q>G3Vo)IVqsf*6n=0>_CIq)>8-%?p*KQJ$e2SsGZy8!$1D7pNDP3AV^Y*CKq59L9+~*g|I=>P{{_1e_M^T z30nk6z@P~OFx!(~9##N_*c$gx!UR3VA_-zPA{&y0u-yig9(S+qfGSQS*$>x)1p`Ep zrU)Yn1V|d7D5IgYfSBy*t|P|%{RPR{PA{)QNy-E4f)q%wrrn-hdtmx9u_@*WnmrkX zVxO%{*~{aHOQuQ&S+Z=)fGX&~1ffFEQ*`xYDKr~_^5NynrLc}b2@bO8VkI~($n-W@ zcw$tna5;YV>tCM)L}ttcYSYYIAjU*PV`rh? z{qukM7XA9O-+X@6+oI!DU|A3`8;mu4^8Mu3wIxy)W*8_ScI*R!=`Hl(WhZeMcxQrnpOnAY)nx%T<(pB@h% z-G~&-}$@pO&VYb-4U^dWQYs{ID7M>f47= zC;)L>BN{8XBX?Jlun7PJV5Lk=86_4#{vBB)>@Y2BDo_x!NZ9S_^JC0q^eP|{Nl*nB z2_;hmR4|4L8O0=;Z^jI> zAXk|H0E^L1tyxG0GTO~Ae);mycaD+yi1XB}#)YVe<#g4f6*0bf=${_0PV0e(iy4&% z2(=6iu2sf9<{SgpPmd!2htHk^!3^iJL)Au~v@7k>RIaQD6%Z5FmV1u#yv)mY`1tyV zKh|G=yp9-Qx@R@Z>3Z|c)seXWxB9D3uR7-N;Q7s5T*b)f^ZTzq%tV3i$HNlYz^v)Y zssaEL0H6YB$TkVuVDS&64L2k}!PNkhMNg4aYiq1#WHaGN%1Qt#b4EqVCW?TKGMR2u zuAM-^=>o7`^PLgWTIFJ}+u~o9M zW_$86Kg_5axfgq@WN1i{7P5%w(dN5zx*`$PI9ZNjSS7-SXfPWv*h&Tf*%B)fIy}9N zJ*O|e)IP_|$>~|;JqxM!)a88l{oQ68Aj_#DNH{9`sgu(12Vje|r1L=TADt+(%suDetO65emb`EGnPfY0 zUZ~c2xOsA0+kP33AKu^XNtDcT2`E8DvL9EFWYOV@wm!eTT`3`1pmb>SxTvuMXaQ0W zZ_Xe6=IODIILH0Fw-31e{HjfRo-f;-ulf4TrCqL;?bH9`*@1KcKfPc3;+QP=3F`q$ zsHxN;xSfNA5#^e5Td?lb#=YMCX?cFQ{PDZre0rNs01|5e19RT}`TDAlAO3j#yKB$$ zwxeA=zR&C9!qsHozW(yvUIozYs;{z?+zZI8q-g-K06;}r2*|dH$-g5+n_`t%(v-ng z25fuGLBliwnlvELVU#8TuwW823Sfo_f?9-d0rsR3w;d>(fIDFKLNkMa7*IfgXk@nt zWye5meatlcp{}o2qgTjM1Z^k62_kdrw>MWl7k7*#dghjzof9N&Q_(x;`v*=@uAq?; zIHDTD34tJBx*7l!kX>na6~W8k@NCF}%VEP|RP*`$yNkhPiLu9FY3q8px#oU;IPd$< z_d63r(PaSu0kg$1dYDY8R3g{g7pLpi+Fa_y-URmQsNigQ0F3uPUY`H{$&rY8|J5HK zwLJUu>2^O4cOTvl_=Nk**7(`s?dSjdi*T#t`Ro1p5!DB?O|#(U)99n4&;c4OJ#X6T{?FsZY5UXnzx;G1W|^hEZwh4d_kTH^V*BpvtSeGo86WQ$^v zf>bTjPyuiTO_)eS1*jsMf(DWW+qqmF%rZz;E17JiMnqIo%$3yaaxe^nj)ZfxIVOdN z?b-EGVNx+0j7FQS(#dURzrMZ3eu%jJAI-_0b725i|f zVFLZI23^fcQl(P=_}R;wQ!KT*xfvN8OJzU`j`e)^YJ2)uA76EP{`voVb9#C3xs9La z?RgvR>WtkSjkjz4_V?Edq#S?w_WEO)a4tKd&DDVb2Y?K2ReQ%gT54#+OpF+_M(tm} ze|p-#{Q0*}j+~jCkQ<3i*XzF=`uXAQ&&R8)rnQIKRlU1h9ap&(+Wyslyxj|?$=*>g z(gmC7O+^!cNCd=^!!j~wIl;fB9EC`#0=AMB?y0nma3TXJ34jWAH;qUoRC1M|>VvWi zRI!uz7>c5|wQGXy%T z=H~wFx@qJXD2I!gEYm8H=2$=p`~9BjJ{yxp$8=-G1S2M3*#v1qN&|MeKqM+{3ap57 z&#oqSBWsNF>$m3w7%fLTt?SxOH;cx@`FxoVceB7G3uQTMra;EAcZV$)rlT58AOG@; z>mZ{d!MQlW3X^Q&+#mjOdG@t<9xgyE(<6W0i^%{(sJl@1qvX965{H!Uwv}b0&*I3F{5_&aaf{i z`}v3M_E*0<^cZ*l{eNEn_SuumzyD?Q+xxp4J-L3^j~_+#YNzYdZI8YC@tdDN`lMz_ zgxg%o;Q`FcGKp?3Tx=IfRO1$!qiVpW~B3+w)yv_ODdJB z6;+Wi0SsLM=gz4`c~{?ktxG`XlKRgh6g*B-YYJzWbw-;W^>bU7*k#za)O zD3aV*CAv>$G|VVgE7`FN1!#hF(yD|KmOz@FUfdeQKrD%+Y;nx}vF;y#dbdrX$eL|E zt;g0**O(9cdE3qpV?qVPOo#vgfI7BLmXr{)r;5_`=f8b<=msXM;KUa08U3*A?eT}V zmz!Vx{;Dne5C84UtKU68y!-ln9AAvPLw|95c|0ucv-Vzx&rVXF-~IXcS#Rfv92+_F~EDcoO`lJ*4^vK%x`>#Iu?fUB%ZP}e;)V@7DZs$3xu2z8Hty=!{7jPj2I~o$n{orH$ZFMS%%sm=Ifwwx}dm zPfj-3;OHK3X@*fQvJi!UOa`%Z0u7B+HAAvQ^rA9-vF|^;db3R$j+s0ij%{6!N9Fzg zc-hANo&XFYP0}iWu$Ds?VUR@wFe%*p>X)CaDsGsRH7SWb*sJy*UR|F2>i37G*N1=l za{0|uKHRsP<@xfk@>&l+?XGT}bvQjgbnyL`+h@;~JvUc1?jStwP~biR)7VD`w&#%6F@yp|054DxjR%=*l5&9nRvS(LDCB(GBL5w+eUfPAp^Q(XP z;p+D{?R@@lKJPoKg2fIpAL6jo{oD1$Q+BDf2*mhdy~2ml0V03cZ;SYcGK2&Z5H$|y&XC_}CgTyDh_9fcH25DGSJ z0D@sUqrjWzM_Yp3I9n2A3A9X9z#bl3joKL95dv>$E*^{INOQthC+J<`A z_leMYdK$fEE5ikPy66#b7D8fMB)UizkN}VxY(={XJy>0As)ckXYb_w`2!a%XO>7a6 z&57K!<^1l?-`yt+z?G-ta_s$j>3)3JE|>jssd5y6G64WI3HD_nP1!A@3=k}}t51IU zY;9Pm!ZK*A{N5B4JRRgl`f%C<=bSb5hJ>GrOzj)U3 z?tY`J799($$R&kjlt+g~IHqRKK8DAXN|))2&#(UHn~$IPaqM#%6c_=E;<#^zHQ&Cw zd2w2#g-i9hZa=Tbjuss6_V2%WHw*(d89);n6|NqNqKYU0FoA-QVY_xX!M~N5R6@*U z0tgy5p%M*S&L9|NnhD9u3eW`sWSInEn;K$?#V96dR=_A)zJ9h+aHPa+8Kc=zC2C@3 zjm(^7G^9vFQOa`5X;pvn^!eladzS^74$8pJsX6w?%Z?t)@^oE5mnf6dC?;#tRYK6= zFtglCNU&8iLR4dj%}p{9ExTEcKG{(bEf;mFg`G`F8|A7TxjSF|`TP4sm4NbcwI15C z-n2ZQ&zJpj-aG}Ak~RRb08qM82uz0M48R7=cKgLIK0bt@3UDI&jFye_{QA57^vPep zTo#P`?|;hm=JO|qahNoq9V{7HoSt*??x(xoJw5e$eBaArAM0%BDaK9}*k-PSQ&pK7 zMP*}h+cUR^{qpwh*IR2cZBV3C0Z`0*=*wZeK0o{D(7gm(2g9%1X%W@W_xbJD?}rO0 zu&-;k8qr{HO+o?XNSLN8SO#gNa~6t!OGbrFR|8a{tt1RVNPslJ@;YglZBia#F;%i) z!%-m1J;~XsdctJ248tmxe091NKtn22*mO(pbY66+j4}u!kZuB%Vp@e=1>L;7&Ubex z&=d@UNKYci<#P72^uDYuAc1aaSgvB4rI{Lp3Jn#Ut;yX%MR;*nc8P==Ew|Z$FpkRB z$zI_O6{>G75u*(h_H@oY}h0Vn$Px-R{2b=V%}!(|)WC5wi^Nq~lmwiu@c z0D;IT5VUCv?b+wQcyVe|#q5RH50C>s|K+>O)hGYy^ChBqHytYv6@;zPcfs>4?GKf6 zZ1w*9$#1U?IkO&Q&E78y8ci~Zl5NMnY`L|fvR7o)#pChu?&0qJ!{vO3u1-RbN)VY* z!Q3u~BllPPi|0#IsI`Wedh4r25!}!7o3B1x3bfr$*N2uAS^_*vauS)cqbwV+Xu{<= zXMz72RvDr#q98mL@gX z(?qJKYiyY?K}3=!NC2WO($!oVNVMbAXa4yvCIH!PO2Kmib8Z)_Exl6=iL${#COB-Q z_A*($Q$%#JA-FV0)M3bB1q9j(u@upkxkQshvWC#I=#1HhTh6L>=3ekF_}%NTKJ1=R zDpxz64y~`J6_5MfewpV>+Ax$X$c_X6sP#iDSSDpC!$y(j^6ZOW++Kmz<}yfh1lrE+ z&);rWFaPEjy*s?@DzvGX1ote|$o(=%QpNAvvUM-(o9~%*^feimp*KLU<=VgBV z)%!gGgYD*eAz2j28o4;ra+yVw21*ue8rw{pe~@WM1qy^6Rz=AM1Q`Y>I$>#2MoMl4 zN75?VW;Fw%0h$#?3DqpiS>Z@oH&2=Y+p=qS&7D;s1)u^ZvS+{q7-P`{O^sqYuAbe- z+Xqx}qRFOPqOxl4`&NwW-UDtKHWS)lOv848W@EHtld!g06r|{GLL^BC(G)nr3xW!! zAV4)pXUj@VO)HbC$Xs@yBJKNc{`5m)!lj!1bn1OwuGSp;-NP8$B_{#XPQgM&nNX}t z05m~aEKRcr68)nuzIe6}Y6A_076IhLtFPzvlfU`wBi3;vV8YI-edd-rXAKm12^jP7 zeET=g*K&+~H&l_auf#GP6Un&?`$6mpZ;|tH-oO9w_~Cr^bj_gd_QSHM2v(FR05pZV z+pbss`1a=6lckqq?Fgpt&d1}G(DQEl_CMZjE&yz|w~ew;1~{vs1^^pi3IG5AFyS%M z<{xA;%CLkAtH5H3W;L5itEEk$9Dvc1u4;fxqEale5HLX`ts(^*6(B%Nw7Pi`z~s)6 zBaN02oQ)YEsa+IM5K@uRk^neSn8el7XYJ>^$)aIZFKTjfn{&@CkH@Ri0rO#VhD#~K zs&u+ilA7Ap(;lqW5oAe_oC~lp(0jLVm`g#3R0(Fe;DRnpqYA1<=D`Arqm7vrb^q;u zyxEOuz*9GeSeNDc;_M83s)8G7hT@+RH@F<@rF4&RTz&4hNVeZO@hbRBrvo4JDBr4r)h}E*poj|R! z4UgKk{rs`6&4QKYE`y})Vp*mPfG|{*TiX0TjcO&>hD@*s5t9frG72zZ zGKEwWB3WiGwgB0-N-&#Y6#&R4Xag#-2n%(%X*oykfGNlN+#@)L;Lqv!`M(w!|)9P)h<;52mG~r+{7>Yn& z#sTCy8|c|fgfRn!EEmD8p^8+@-cyPahG~wikIFo}|I43VUuMB2Bu>Y*_v6*U`}zEM z$;)H8V1S7t3Cn<~b!iYyz-YtG^C zdwF&D{!~CXQAWB7In#5_-NpX$vU}W>8X0V;s2r#B7;aqpvI4K)a>@ZxU0j2t(8>an zRGeDTr-6cj$&1~(i+ti>qZ{dBVbo7~%x-d>mElx?9|rRaZBKh6T%9Mi5b0&maSpZdDwus$5`3 zuo2OSmZ2mp6azV;U}oBLdJ{)Kz5nC)_fm?Dn61b|p1pM4z*}1wzqWXdCAF!{Kl| z4=m4c{Xf1~eE+9kK6%mHifTcItIwI|j{}BWYLn<*_!5~@CY^Gs>)@-Gs9WYEn6)sduA#6Hg6DKKc;gF}`W|Ucy zY*+>&B|~OdsBoLrSO6=)Du9#;7LHEiG256gd2~3r@iFJ3ecEde_ z;TXtPnVGs+_I)Sx7EL|-SSncACjnHmi&ORpP7zWFVhcv8q_i9~JjtPI#ev9jZ52;( z-fivt{=0AQ6P_v>an*ORyWF?a>3DpY`7{ewwnUWxM8N>G?1C08kU)W^Ex2TdmYXj> zf3b%O)n|9*@lU@y_x+pCuU0pp&DO?(ZqF_;?csd*=5RhG

`(1AuA9q>k z8KhlQ8BUmkiPm4ezB>MNYgRN>u!Kkh>DtbttxNP&>@P!zXr+LNXgy+$%w@LhvRH`B zFtw=)6tjTWU_es$8T>(amsSF{jT?Q{|tG4 zdVDV5Wn%HHBI=jqh1w(s9(rP}(UpApR5J%9PSMX72&<#;+i9*@T@g%G2Mf}%Ug{@cI&ax!d~ zrU4TM4FLUOzseI70p!*ZguxVYjw1ib7D1*8Ak7kuKoKejCmgbh#c-(rs6h9jqEJ2Q zBFPLGDgabLC^sM(hH1jRT8+jkiY)`G6s?SsQ3cNFR&4@sfC(5vxfILQo15|OTq=qp zz-+YvRT!tUrKNefe08m^rMl7jRG;XxXB1^B0k^@7%;46Mt^9_~TxOZ=X_+<4>B)?$`#*pC%Z4h0sCK!u%euPJX?r|9JOfqwIq)I3hU;r-AnB@RyNf}_HE6j;(%jJs~`fy07nk#^UoEAWDDel*` z_sfgkkc(#|d!`_$m~FbD1|pS?Y}JMC7HGxAU^Zr>!>|J-*;d7jHp-1#@C>*h6z!3k zHBn9}kKcU#K8Mqk9T#g`mi>OWZ4ZyfQ=V%A1dOQ?twNNk=qqJMz+jOn7yxW#Sk&_B ztIw}HE2^>P{I@?otygdU;pI+1KK%J(d=m5NT<0x4p~1^9e4B7v+ohVP76iDU4i8Vq zr+I=H?49T>nklJ35-wLIhpDqhfy~=q_BYB?YZAbuXp;cx)B9V#dHZ^Ixodr6bVQHw z;lt_eP5|GnSR25bN{KmZ`EcDW;6QYp+?2tozI<|!-rYrz&E+X0J1ZnTn) zD$z8oRw%S{w`EvX1u6`aOb`U1WG#gfG~t#4*d~&otRjnGR=8A@i?)j_i%AwKKxm+X zPz4dlsw%eqvrppT=|~}x0y`S0#ziL46+x_If$YvHp^a)IP@|0~RJ00QOm_rg+M%BE_)Xvxp-NO`yZcPKkv5}m+k)l_~(x&7k~l*fg;i9h8Bwm z44VPkvq^9iZDef|{Ix|8MKcAGn2jbNi!hSGGX3_4OO6i5^W zn^rk0CR4~#R=~;v6-fs)n*g#w6cCVdnE}WsY~}3j<;&gSjtp~+wnEVy%>aebN>PM^ zhCx=zF*M6$pqpw6MlG{NIV>4{LTF*QJ zP6duX{Nb;sv|+H+FZ;eNSG#e#+YYB~PSa-5C_@BOFr>9Nz+pi}x(EZ1StXVz?p}WR z`E^L0S%;L;s8r+D$OiJ18vX{{SS79tAmML~OHwn!Y7P?xZZ9Q9s5eXB`KvF71 zRnSVfm%~uos?UB_!l^P*RfyUuGRw2*(9_R<`0HU7Y!-02E<0adN1YFkPlv4%0FYs$ z2*{=hYUx1&y$C2l0|tvM2r3z|zWVa*br(S$zWvwxe(~~u_+q!D^7zAdxy*HK7Z)uS zAFr3SFi&}UI?o0oW68N8j`wp$V@MPhGzWr3T1D8FvXF^kW|}8c&Yurg>#F0{TTrUT zVDJ0rL5@Fuf3u(Q^7Zu^QR?myIR5gudw$TXYrgxh|9p404I3l~fg}Xc>ib~LR6wOH zDlFJ4cy1p2HN+z5Y*3a!G!$nuWWp+0sFtP$Sg;I(Fv2E*0wQHqOS>YgA<>6P8z=*$ z!%(@gA{m50hXk2oF^md}CBj@LTG5RV0xTTYH8ZLO3=tVWME zyC9$!Vm2~Go1N)e3y>jWE>jL$OrhlrX$F>A1uC{>YaZlOjEn@!>Cs0ODxHA1{q*g3 zhgmQR;C@|~w!7?gIzBu+WhP)H0aPKZ0+dAbU9d>OVHHgSuvG-1GTgYo`PHv3_SMeX zzWv?Dc6IX)UtFw!>zCUMC|ZvPT;JF0JxbewIZx*bJk@^T@l@xBwg%l|Hb5;RJQ_eF zSXNRl&q~zf1V@!n_4DE4y5oG}vI>oBYi;S#Rjh~K9)9&Q?+#ZlUhF%uQca%k@B78W z^XHfN`TzcZ-jA{k*Z@EP0BC~C_0l*hTqV_1laz#`a^_b2&*=setTx3`kp@^IQ-q_y z5@JRMhl89VD{3xC!9bx;U^ zrV8^s;*;y(TnmaubSFnuP8n`Vpw@+mkcovAa6?wF-9STGPzACXDOI$smu<3FF5A8g zO`uJ8PN>3GDxrprjtvlMa+Hk9nn2A`we-htzqucFfl2IkeO>zH>f_;V9JT?|G)uN^ z(Ew zb_IDSU^YUfiV8qe%~2|)po}8eXSLZZBq`85FUme!HrNisba%=sj}@8*%0?DPpPB^^ zrg3hSV%>fBhlg`&$TpVCW$FED!FGFke3}y_+l1)^S}_I9Q0zk%0096EKrW^%8Wu{` zxO?^Gr_UOA{L{a_i<{@4eRX}IsqyjMIb1cUT3_z8Tg@8RSIb2h5?gVS$3Oq+_1`_) zr?n$T!XVIYb}fcknG_HT8b|>(=S==5 z*fK%Fw91rC69kK4+QGox%dqhy#gXsKvIG#PHm04lOU7H#w)3M#AQARGvV zf~!iwfCVIhf(cK-0_fN_p4-*Sow?2xtUg^#TB;x$i$NxXAPq(VwyIYJqLk=W%`B{^ zg|lHwQ1xLjhR|ZF)TRS5nu!7+w=KI)h>SDM;&8P}PETfKmDTG0Pv3qR6P3Z@x?7g$ zS3ORL!{hN>1)vC^;8FmE3X{uDEzwY6w;850ie|wAts;<}>&wsIK3_(D_s>7{i<>Xs zzM*E-!(TUUh}lxRON~9wak<0C`=1~8|KacR{ogi@dL2JJ_E(o+(6SmF0NZRs1fZJD zHq#?nE>~rCV7}wUvt3iy@$vdu6A5rhCGqfwcfb1LY6GhuM9_v1SwUtks@XXa1jddPld0;d zNYWrC>B*{?4@Kb{^&PJnO;^d!L&ixzrc zSV6)flth81$s~(phE0g;^RK?R()i}z{-S4>pM3UaXXJ7I?NJ+-h(stxi`_o%Kiof7 z`{d25yuW*VJncWb`ug3)+socf%Y-CEv7{*##EOlq31_0J49;*IKdf&qF1q{7yW{%8 z3qhH1`SxG^cdxEif|(CL9$tTXftg{BhH8z+`ybzbe7HYv2^U}h0000Wbah1*O`;H( zLbyN}$gIJCA`C-?&RL+qsj^U-vXUwYDief}%_LJ0D?maOAVH`ghiNoBM$eWa0HQ%e zG=)M6HCLYjrtAO^RkBLjD0^m150vEzhJkVF!R6C994Z~KAk5wCPrB*q`9{BOpw)*AWAWsgSia!WXi3aQo>=?;54B*Lt|*hG)_iNZWZm} zZ(skirD;b|m%G-M<%*~C{oydCODGuyrbQ@0(9~{K$)ahB36*7&CJQE61Oe?{eg64= z{_ro~P_(iU( zDYvKsd^xmzIQKcf`R-w>O4?un1YiJ&>x+n4G-cB=Gy_E#Gc);5g`!Cnfux}-P6r4Q zfJIX&S&kW|NC2&(3DFNjHVn18s#Dw_+O%vuFTwNyJo;7U&9Xlxxl- zT$n+6sc2;Q{5UEuUoD4^({!tVp!V|BrDiu&v=FKk9I%QdMI57zj%`8ML^Z?VA|hNF zVMkSiEz%`0%6&L!B>-BZP4rRaiezacvT|l3y-(b#Izr6j?$MU|E1+2%G^UN?Koj`dR$^o4?KV^4XU!ck6aIoyQX1 zM$Nh9@bcNq_4Y4!>20%Bx{L-~e^TGPyZY59JqTM6g@_rJ$pSIgvp6|5Q3NS0=!^ey z|Mo>|oQ}fbC%#%5!?Cs0>D%w#zI}GlU<$k-^FZ$c>{&aXc&k36_ zXqYgdtE;9;2t^YFnX*MWXfZ5yw z?w`DRRuz^E1gl!EmC{(JQZbiFLfIn?Lm8-W%w{wfMR6)SlS%??b_{|A04$osY??^B z29ep%lQo@YIWvl4eFRKHaki}1CwpiqD|U>4NKM7qqo7TKW<-MHBm!$e!P2r(IVU?s~&!T|K*#@ z-sr@p)bOZjIT{+5Fn5<%wY&fL?){(t@~~yW0AQL1$ctwYs3Pn{z~PczB6C*qe}+Ya z5CD;^AjlFN4m%)(0m4xPpb}BxA`!MoLD9h;k{ZEIrvNcYlMVJHk}i_7=YWf@LOg3^ zAtqRW?NNx3$*F`$yQ9t@52Ihcxjg)I8n(?PdbHz(Xmno|-0a!v{Kbn$-yDOx1ZiX>S?q(DazmJmf4rX{1AAqNA! zTClZPk`kz*fdEKmFRH>2D>8y(8^bE6GMOwgQM;?wZF*W6t{Mfz+DOMyv>a?Tx4Zk% zEaEU8iUwn1H@&ST zBpWe;ggjcc@`xg)ai)1@f=I7vYP3RmmQiH|fBw_AcR7I+;Qek{mi@9DPmjmP^X38o z8l?neYLcnIvJ(M10K!VZazPb9L4=8v5)QAIeE)7P%ktu;F}JbR=>3Z0Zq}G%S^+vv zGcz48uejC4XBU6{>%-MopI?drBYVP|I@2ac7n4Zqw>Su z%}q1_uJPR;U%y>@v^t#*r^jP%S(%#2!8y%;beLe6ZUAkn2of-mA+o{&S3ro4=wPs;NziNs5TZ3YlL`=Q$!aAk z8wFOdV&2|wU@L?uMn=?m=Gs~~lE7@r@tC{Db%zAB1&c(Kj*s_G>+aRn>8D>FkC|ZG zU0v^QzPfG%VfGQW#WwmmD5iwjm9jz>;HKL;{d$ z3Z?<8j7ZG@OqK+hqIYlJyma@>&Bxudi~9L@Kb*+`y`2jpj@52<^sJ;NLoC^PLP23Rtl7{a)PU}YDhRY7$gEy-aWxW%jM3Dx!wpf*RRriNSGA|pWOPRX zeK4vtZ%1Dp0WzLEI_5ZC?zvnPiY~z_qSw5?KgRm{V*dGWcTSoS`xjTAe16kp0O3G( zBr2lNi)fi)5Dc(1myGON>&Grv1G2He>;;=q2v@b)%&D5~TuK|A>1Ovq&NhHVwHd{U z+K@g8>6WMOzrGu&sz9M%E^)D}tDhd99?$23l>&^i5g=Rw9DNN(AxHqQrKADSCcq+1 zn;-y}&xZ|)i)+rp%z`Vzn3(hcLjuOELGM3ZpHB7ill=29&V-YY2!!|(}0ATf{i*OV~IHq6%C@?9LMO$Qv_B2l0xu|8&%tF~VS#ZUI zQw9S>Ix}_oneL8!`N{Ez+uZ%?tLx}?R8WZ3T{|wTduI%qD4lI=hLf|-_2Z9kK3^z+ z6Sb86bkOB0Bu{_+_gy}*D{gi>3IL4okf+9+psiWSG9s}<9!9#M=FC`des}cs1?x

a?GN9QiWIF(ALM;J?U2L#W*;S3BW%V@YQZwZQoT@fjOQ~nfCFx}p zk%8K})0nQTp$RyJ@{)b*O2>D9{QiVYm&*O(Li@H~Js%E-! zm5LUP6znKg6u|-r0wvppc{fhLZUyuNtLfaM zRC6sO646E*Yqrdus%oq-QD`-VtfC7Q6;?#d+?5Ofr8d?PiK(j8hRUoyhm^8d%Ph`5 ziD>7){qAp1H8o+>a?zJ{-**^?$HU<~fkN1_%ZO6RK)SbG1V9s1IqaCSRWt$6V1`Ya zrkyz-p@=nx8?&}K%Vb20nCB7ex?Jph`1ufDeO2$?>-F->&!=3z_|?lbqII>tw56>J z(rQf8)kdq-88wYroN+oHzPb4FS%b_;6Dj-lc(bgN_3pP%<{o^pinCEUxt+E|QIKuV zp<$0g_LmoHaXUX%POXt0<>kWj7EuD=Qk|G5KF8I|@d#p;8J!DGEUKgc4!d z6CApHwu=!pJ&V&}E6-S0j|q%ki;!ybP|p{#TnSUbs7x%UsuGI4smgHDCZiuRs*)8YZyo-#Q=+Dveu2m zWki(U{r=C#ndMYj7Z+_^*1m3!hsVQ~1sW#GED)9n*x2=zV6_wh6D$KX0LwBgYlaav z(%Ae|(bi|zM8Mm_-El-`+k0k>EiU%g7kfPX?PLGD&-wU%TtI zt@5m#(?!W}TQ5TMyv@PZVkrxCdjB{tuML-HW|C$kS|!v3&yD{ZOw)+1nV?yMlEn`MEHRB|0b6l+ZbLIK;IQqqPU+knbnX38te(L$l`0;li*cf*2SFf)=`RqzEOQuxSD`r?^ z3Yy(eGP0HXfLV;qk(FY$jFy7J>lRkoi&cbMX0J9-vs7i;L6?>?AX~Ndva0}YF>`^& z<8QxtoK-HCQafi-0*# z$EoG&lh+p=%RG$Ld#bC?|KZbp+ikmM zC=ks^vZZAg+jP1zF}a!WJpJ<5&tIQnTfAw5)+l2ZB8DrGWR#BKZG$7iRoCcA0v7FX36A5X|FFzb#-#R1{0D+SB$A`lUe*W(vFL5{>ahxz!HHti`eW}0!1&NQSX!99}z2u0_A4y6S}(Lj|l-9>k% ziU^oYPC&-kpU1l1mNvnxGfa^=uVcM+vhU}od1|LxxQ)FXTo2!U`%GFn{p!~*#VA!C z091uErF4F{(yn3lc#fj8Uc3blw{1O55E2N@D%f^?yx{cu`0&Rcb{dPXxA%wj_TB3m zCU}BGF+l?MW7-V{sb_NV{xAtFN%xSYH$^{#H0^W}0Kl8~91nSc{0Gyp6#o%cVR@7sry zG*s-_qDE*awL`5Mp(tWgyEdg(Yi}`Xw{~LHs#$yQQED|t@v%y%Dyk8xR_!O>AD-7M zf5DyPy3YGNkMn&Xy_wTsuyj6AZ7(rxbPSl1md?myp(>;&yv5kdjTjdBvdtemOT)<# zYhghn>5YHlk@@wThZQbS%F~lZloD&Sm-{WqbX===yTrf-<7cB8FSDvxV8uy%<28EI zk@l^C6Mb^pH}}2zc5YOq*m)L9QARamXBSJV0lhtMw3kh?mvo9Z41%zqWCv04BNOPe z!6O`(Yj0K6WPWAjfdMJe>H{it8ZlD`D}lj$i3Em)uLe02f$U*&ri$U0KF5tj>diDs zd&KeLdQB1L!nc1tShwC(i|AMBI0jYjC%>?>bh|tnovRHv_E-;E_-4n=i*-mqO<@N~ zM(+A!6vNvz=Fo$<%IY@j9uIm1lhd&6SMg$UVDy&?aZO{hG45$s&mze@(S?#zS&F)6 z8vI&JS(#L2za7Peev=r8s^-S4E4w@%Ec-6QLV7;ic605m`^Mpu-^bqOdszHgQ11zu zDwIn@P0xuW`fO&!!6hvMz?XZ6XO;}890AC7<92_Cm8n_He^+C^n|eF=-E5^Lr{Z%j z7nSkRJk16)+srxE^C8B%N7jgX_u}mQ%=-TgZP z@u+(Sq)XTJ@;3x8f2Z@tg8NL~4o(Q}W@)kv_@9pCy$9xsaZhj& zqmLK`(@~=5KW}z;lvsG+6a#{`9|r|hZ$tG^zxCv-1Ee$47Gi0pUY0MLP`vK>HvNc0 zVIoPhD?ISWYR8|;wwnVm?a}hu)nKAtrF%ve^{v5~j?>r|=4+1>6#vv(9ZpQ-9Bi;BX?uvUn2eYC`wExbU@ zV7<06woCxs7Izx{^(RDMnRlSnD6aX=t-9{5-JWKjK8`iA#%+ zKx^!ENY4q2rZ|YKA2zLFE+aU}2N?jH$|+%f!7F@N3Mm1ZfHCymt+2Fq~(&cS#fc4 z{ko@1{6mDbYvur2D>%xF8Fly1IPhWvV=!$y>)M(#fwGE6Tjg+9V*J z4KmH5P5J2qNCWV-jfF?3+tX`hYr$C19-8-O!OHc2^!70gtLAQsH&Xp8$Sth^^zAo$rJ)NVSZiy~ti&~y-dP+I88tOYA^|l&sK&_4D z|Mp6(f0stre9vn0qR)zc$UL5Cb^oo+yw(FHpa)z@l^ekmonk*!cA|c)(N2~(=9+wJFeWIoMUWVU*osGlW&W_i89y@h&JxE>J2*%GCw;E!@Lgf^# zozx}#^))qtv`)tJEdgJlUo#lvnk9hf<`PRS+CtQ5dtOsOmHTt^e5rz{teJ7Z4tgng z3+zyS3^y>Jx9_^qy?HgTF|yyci^UFxcAd!goe>(SZ+~4XqrE@~ta+fA(^CqH}r#(#6i%GCD>}1J>YdMNf7ZTqF=BkaQOn za!TejO`F}zyR@QUXBRV&YN1Ud>_eF@t3%Es`=dj*+qngOSI|_)(5tEiUqx?z>F|7C z-(lapRgU31bCYY=QxvLS=dG{S9hcE+P7oGbTk_fCzmCGJjkla%Nf8F;jdfsAwp_)0 zS{H-9kGtDl_xpmIEx&t|X5>np>hzbG^BtNgAz5dMs`Swk^@t>&6A7~{P2(4(=Gkr! zT>vbi^kx}eOGdfsuoUm?(JaRLIT@y3bPyZi`pZ8fr^%^YjAObJ6Q0T5j}_$=_L+J8 ziL_Q$Tr`VKn6xock3%R?C0}H;7Hm)>Ys!AbYWsEEYanBVkIObr z3yCC$!=T(O?{7fJ${sj7t4|Xqwr1C^iy+)+xEz-=*wXeap%aM*Je3S&6UmE z$m6dLWPTH{$A{K+l`pH+JY#1G2Il&u*~~)E=-)~7KOuFb>qkx5rs$?ObWFGFJaBeq zC>d+y0i@iihovL>{oB6lK)x`lRk*6rn>j&r1-|NrU+(D-^0GY~F_?arSX)pq8LU+? zqn<(+H;yP;I`YGlfVm>;n{9F&0DN#EU?PZ&-)7IGK0oZB>t6Wvj{Hk@>(v576x3p& z_iZ$|-{c)6qF30zFYxrJePW}^KX$n+RM9mw&>zd!L^{Ue%_%mrVYP35JZe;I9rn$0 zPuI8%qQkfCA6|nGyl8wnNjc`?;V$#O`%lnY>Rgs(qgHcQr0mo(ZMR31v@zKEXHv!s zZU7lKNeRTx>u&*n+m2=wKU>C|djn7=b<+4raP*3>g_+OQwdXyuteUmUlbERnMmaf1 zIR<;}BUDGdBGQ|ryB5j`Z&I^X~Okb6x?hJ%O;pL_;q)fX=W0AcGB9k^6frcnxvX{BT1wMn4>kPx? z--3lwjJZRF>1RvKW+gJ!kH04&$_+@J*k`fog%iZMn?5??%EuAef-Hn-#!CMWtDBp zf{zOzfh7Ag6Vg$)gFN(V4_ydiVe#`0w@gPY3X0`-aw&u-o&O%DP*cr1KO5Y@RSna> za|qy6)Os6>-*|NY7T-{{5Rt-mcH2l0GxXN^_wsFNRx;j^cQ-7wUEP;IUScyN&48Rc zjdrwzk9+9mDTnH)kFNd^r8qzTG{)zfj$C|N0gFVzzPo`pN?b)dRqU)qOHOt{ z(A0(U^49^O;lZKdMGvcjtMImFJGK-3g`rI~%k_ zp9b#Ng1HP)wl?SH=K9LuR^5X)VOVSz`F#-EMx%)1N#Es??%GN%RX(8f`sna5$S_pT z+kqB+xUmti_yT10#egQ(ufkPPaWjc|(H?}I0UHmpO$~xZ>VA)u1I$|>kXzZ!5Cj00 z02qQnufbq58X zz3vX4m8h*Ch9&%2CqKt|hKC9oodfe6BcoLb`G@E2!O!pFQ}3k4_gAm#%o(KF&D-dW zzFsBl9amkS#T$2L%)hMMGSedYB&c4H2ZyMOR?Q1euo*LaEUrv4xUD}RXQ}pPOz-|? z{GCUU@T|9aEy0vdHhQ2JJSx#VF+W2m#f0sb+pfFwv_lIqBa)-hkc8gb-^-VW+EwGr%eOfq0Vz+{h&xY! z>N{XZpPaS}>(Jxh&Hr_TE}5s66<8Ihd`KQG$C^tkT=%WvdS$|VO@8}O!2g?44Cinm zm6mvA{o~+JW@;fLGstxQ$z$8GkZ-|1fBKGVC`wS=kxD|+Xk7oTy8I*U_=eZa>0z4s zC;DgvFhdC%d4sVq__O{g@(2vROD1X-$xWVNl(u{_Zg){mbV%`{2SviX3pSR_#hntT)r@;YWSxKXz$E5>cGBCXkQZE_o&D7!)-((vrvNLw}~s@LUM8x z0+iucwh6x9BKRmMhI9T6Z<9j~fA%D!raDX&2nGPT@ZOx@cd?&G8H=q?8Nmrochc13 zP@F8qnpNdngwwvDXO#ic1s`&>lLxDOYsmR~FWXn=!p`_*mjxa?>L(|C(6A1hXZ?QCrG^rI*M5^rLYyF0# z!zb2#^M0}|e?l66Y@GkwEJxPvhaEObYvBHYRU!&+bDtk#z~zj*(Ig}}oE{#=fnVSs zp<*GQdGI@P=OG+I59cH8RE2k$K2&`9PLfEDtB4eoU{Phg1xNWxmT1S8CjO!r6qL95 zS;)+LlDEVE<(KdbymY?9d$2a}{jy(tucx<)R%LQ80m6d%p-DGS?Jt52ICo)ybCIW^ zxHh!@+hPHp+t#F%KhRd8cTxX30N~AjXd9U}CezB%+{)_|P5p>t$`Aw?fRY205|9>} z#~X>5Q@r2V@!R14u*c~$xJt8qD@k3Ky?sRvkFAYEe?>R^r|^SVOv@Lw-3uAKO>wjA z#_V0|%ku+lrXzlJe%bf=?-{$K?Ys|Ds~2FJ#B!S<=~JO}O%deWYNi@}ybbRFS)5B` z5{R!Ins6gk{(ucc$vDGj&PvY<*PKy>xo2cw917N0E8HBhp%&Wu`YK1Urc@1tuB#ss zMwDNsO-=}xe-8M6=UVp(I%z3BD^&^nLL0ZlnJ)_k14qg3c*)~T(goZloJ<#w*k1fP znOMJ+&@OxPPMHa7-YCJ;8mKulk|xM+az{7)i8*&8&r7cW<~)#)1_eeP)?(pc4$5c} zU1{8X^p-)UA$j*OXvboAA(f5+MW%I(_geyI0qo-{T37O|{+%48+yBhSvKvd%c;fIM ziF%otd}lfw!A(|jpNxgiEK=RnfL?UOnb5EhbA{zk>p49`??sq`x$Phq{5ie}9iX-%)+=bZ230TEMiu&}D6Y z6X>x$GL_u45`H|RwDU@uUXxlBs*@a|P;e_cb(m*m;$AY6zfhWlOCBONqVOunyEEiB z7>C1TR&`G&e&!Z$o7MzC_3yt;DNkn<%$t3HEVIa=OPOJGEcv=V#%eX=Y^&zT{_7E= z{ZVLs+m-C80@Ifq$_)Baou8X$A**U-T!@sI;c9=8y|Yb25$|NJdzHR(fie090bGNS zDx_FCg{t06N;Cq&i_n_RK-)50th8#_3q`|UVs{s~5>QE`KqspteByHME z)Y-u&w(!u@uGm|hzFvB&T#?D4scxr9uL|LINlA@NVTq*ELUPMf74juO|E+16fm34~ zSXoqQd2}(z{J$_qjg!|)D{ha&0|W8}u?yB~Ih($wNS~yjPJ+p;OmW6V;RO-QpWM4u zK;wnk1|3#sE&p_{pJ(vzf9#y*s-qu0OW_3?e2IyD`f_hpN5r#^rmIPC{-cR!bdkQq zK;F8G@LfLXX!ry$*UW}_4=~8H-TWt2lTx`_bxSnzuZu$Fy14hr;o?^7anC8+C)C(o zse$KA{Q*SkcfUuMxSd|AH$-ZU>&YsZ8V-CLUs*YeC3Fm?eQ|h1&e!~n9LP+fk3wVt z$hCBM830V#%kwW13bSPz*>*ONx(T51q#@VG)$U0bQnAIB=IjNl;oWxgYgg3IpDSop z)#Tmd%KvyJ*C~B>oa3AQ zu7#+|Es|&mZ5ky*N{NjG$c8*y{gW12i=H=I1r@1(`C!^WLhVkrCavu&UVtGZR}UG# zZrPE~r;qNz*GK=Y$JX_Eco2ya_9tN>hesK4jkD*D!z-uqyRXbAoPB%xDkpce22$?` zo#z#f=~EsZL6HgTEO`p4z!zUZ7Qg@eRS?PjXd+-`(vQ>7!pyli8DLBFVSJrwL!F7D z2`E(Fs6@Qs5Q7l=R&f&1J$heUtze;Miefn5dvcuAe*RtKJo!xj_mF=+`j(T+j3i8+ zFsk<=)+_GGO-_B2SqbBJJ!`FHrQd=~1+`({41h??1DGx{1%irZ4Am^;F|aQXHv}U@ zC>15$F?t}{8n$<_$IZeNA_d7J?_zoa!idX--nWUqc^q$f*?;B-zQf&eR@?Qn@|;u2 z+uF#e&&JHR#6bon2*n9>JnwwjD_>yfm9p=u31#QYM1nND8-;VAo4L=q(U zVC(QHKUXrDi&McpkG*AOff2*!RPo%(h3fZ!MM_)zQE6nfF1PX=^gO6%DrBB6UJ3N& zBL|DktC{5~pbv~f?jY|XngI`79Hl7{|0J~0MYe254n+@4RJa;dObznr?q3Y$Z=~6e zCmAJpr?p5tHIQ+V7tG+VB%IA!9beWH($yeHv0H9!U3amLo~v#6Jci1UgRM(ZS}-K) zBR_vj%h!GhhAaT!7K|aSXJ1|^D{D0FzY^$7lCoHO3V8$}z*|n3;9T8(og#}{mtnHz z$UiB;U7x?PaFRu}tQ$T3eCQb6<|rrs+`ni^nnryR6eY+&k0pqB9G>lT>qg8eakFTF zK^G^x`a?ECW>#)c&T37#wL&&Ai5BoF3r^6z zSoZ+Fh_BZJ>}c3WL8xPZuu`@w^F8Z%F}?2gY^HWyZHQgLYQpLKUI81if*_Z=8MOvLaKVR!^G8 ze++58i|AV3j2Ea~_RVE1nY}vJ;DWvfg^Z=t3sPu1r0Fq|19Je_tc}jM8J`fW;u-u( zg&om$rSqkGG(8;2Z6l?D#*l{a&hg`lLmsEK7m>%C_$Z5jt_ceC@8h2<1ydF*omRDA zate;#qzq?|7Rj==d?TB4AGb%U{L-Ddp|t8xcx-r`jyNFcD!d3DAeq~$=CosOq%#V_ zJzj{S2iU-%$h**M41E&)FpLZ!5g%Qc(@2y~rjcmsJn!B8x? z_5)Kcxry7wo(r-6e&^2GMOQm9r}L7&{q!q&&3eDL!yow;YH(Z?VGMGJ4%}<(9couw^TNi1=~o$z z^;HRvzuEH4i3M|c?`?jkc2>9=Z&ak&3BvtEn7+RMEWLbo1vAfCcLAo%j+Kn%Ye4GE zN51>)nEW;~OOO*AK zu_L1ypS?O2(Owx#C2te1&4|CR~4Xz7KBoD`4Q z8NS<}cS;2LipsRquv!tU+vVO4Do24-tzA#V#aX{Q>2-AMJa(sZf8}(v#Hom{s`I!T z>B6g@@;Ll*y zoVklg@7eFm^`Puy8v^fxlX9XFK=V%?ipDb9aISz{m*)qR)vbL zFzpDOD#7cZiA0PuM#%*AjVFySoxaECE`MOE|7wsGa~%Di52F0U{@vBOcwdnJhVZS( zPn3~ktmslAK4^Gxyl?fGEP+yrXNa#!C(6ba9%cFMuPl0@jnecMdi0}gQofntNB*ox zu2Gl_g%J7`v+}$eM2)+n)cqpvir)Ka3|Lpz#<+%2KqnyE@n!hG5QP=oqvW%*9H!!$ zdO_sd4*C3r<&lxOjrEt{wY1*KVyQ4T zp2yY5tO@5n|8+kjGTw>utGU46yCORK;oUbQ_xBVsFRyPuo_}%x{}NYDbFn^T9v5)# zvwB}?X?<_Fo6Xa@_wm&cV@F5ZH%;@CskQJv&R*Goq%axl6Miy)y0~i%ogKNad{2<~ zY|ev55xwW5abqIMoC)qi{aiD3u+vpaz6flE`Yu$ z3t|-Tr!QLLk0?Tt3Y<PUbg~(Mx9r61o4~7!g4V9^%ZUiRQ~@9b6HN6VjEnkI6kQ*Bucy0PpwUdA z5kQ&&jdP`k~s#~4RmC|LR(qRzP>G$91!{*D)_y;rl} zo77W|A)am=wHdDa-U+YT?1tIwll9a>8AQtzpb{}Oisfq)VeRayEGcqV_;Lgp zHARRstebsMS2qoKc+2F-_497<2}?BifgUB(-qAzWHtq7c&7}c~$k3akO*ruJDqBbr zd*ar!534J4Oj9%k+t|McmFVeh$I)>%TR}rd-(km*21}0E1=U>I`wk;rx!82_;LT&% z@5d)jWAsjBNj(pu3#&7T5VmIGS;`*OyHLuGu8r>3IRgUpIx4SMk4SLwLBP{Y&bL1xr#hr|KB*a}l>Ha&6J)xc;Ew6*C7!U@i7c>^+cPu$~;D-`#%P0*J5Rl=OeCbm9majO`rY{7YOrm1yOT|a^~X8v zHdg!WXb8n7u5!9n8OcI{yErq@30avj&%Tu9LkF48u5#Vc=|thlB$jho)Q$N zjdH7rC)g~$=y`4*9MlWo2?%~DvV5^Wxip{K*;@Qkq8$tGySzev&92OV#XFjPoB6mf z>wZf=AOBw7PhyV5`{HtNY5|o(ONK%<&AjJE!FdGK(#E4v8YO9Dx{zW(bEdrtFI2_p zArJ^qEfSvKPsq;dX<|(smH>*5+PGwt9)^pv02T!`P`0nmN_fT!v%wJ$AxN^KqAdH9 zP?3>bdtfRAY&blKYaP@pk$tn$y3Kir@myda|87QVvySGlIf@Ox5!=81)0LUu{7P>} z{cZ|R(zwg+o5BYvq074`zPPS&)0gOG@+q@t29oS{L7GJ}_In+w1@a&WS2c?1&TTWp z#9`%lp1aw)|Ckm1r!xz)`?+Cc)Ag*Oh&vaO>3;B{)^bAsDNRuvEy%4LEhcTN<^1@< z*6Kl`nGWKDY<||{Uw56kWD>ecX3_I#MDM+h`s^Zi8~M3YUGLLIxr+>yr8JH=mW&T1?OZo5=6OJH|CzXO)U! zVOhWkfJVv|wKxSsB_m>#8T>|qOIw{=NesdgX!`QAq%sSg0SE*DFzR8pNNiKMPjm!e zGhk6Fs7>K`+NC1ko`fc8S)K-MJ_tJCs7{e?&srwH%dIvUy4zbFYpz1Cjm|fI*OC28 zvyA(MFgUWYQ0H2OCW>a&NM>15F~r1K)79${e-Ee|BS_&V5a{5~dRwG*S^w4n%9fuclKmyZUmQ^&KB+Tz5r#ro+Ym7$A1}MnUAbDTXjkG<{O4Bu?TRK#U#UC?*&%<}tB3}B7XKw!2nKEvP zspk*`%g==N`kY^51AcETTFs+umO}}W{htqB^0PkpR3WSkLxY~0JI>pvO!x5q4dQ*j#}iaaY-#ai&@QZh`Gqr zh&KSrCnX*=ZUah>1jjXk5gLqO0P4YKuq|ubTy_@5_pLC5_!R*F87|elcW<#^NQNi@ zhqNHZXc}-y_G4cXifOe4;2oS2f#KKC#L9 zt08Z#Pc9^o`KkD5Ms!dr#Ljc1FO1l`J39``=(H%G+u}zWXvO8fCr?RXnMRl42*Lx6 zgKmw~ZYZ#;c<410Wk;?>ybva#hA(9trmbiO+pXLJ11S zOF(w2n1_=gA5(EArWF|ky^A{i0CCboZt?zmc8`1zEK z&8&+`%@!&NmZ~UCXfm5Zzm(<-^QqG?e)0;G@pA;NLd^T)L_b9{@-0f+c&PN}RY}r3 z2@Z2HvNAXQhOQr-#~`dB44@kdu-9bJ3vcDOc(@YAASVz6i0p`_jBj zIE{^}O5d>`+Wgiy?H~J1SH!*I$xpAK!1L|_XHtgjXg)m@9|QnX2Y@3W^Z)=|Vcfb? zi32#O&oZct4}c;=3C`-}MVGi8a*OclfqBva3=~e8Ink^;< zWB&6_FMAq|-!bj&Tr=Y0zm;0-d6A<=LMcCg7@I6*jiXe^%2hFa5%U7DM5Ln^hOMtp zCOnq04Ijn=!|>9IvuKVt|5tTg?1-%Dbi;X^l5& zHDxvWS$On1<9EQ6=!IkvYG0)EXNl6ltZKugnNM~XzWQyGqUAVhfs4Z-_<6VUk*rqi z0>RrrEh1DV%rfTUNuSk%qgU~pORX%5KDHg4y_lPA%y-tKFFme@!6XQxEuPuGpy zz4;pkFGnD7N}LwrD7&Z4$NvU52^hy5%^5=yO|sM;kscyKGoHJ4Q$lO0!I#A36Ucgt z1F8W*_vnqcSG#xixhith2?fzaGjm7((HZ#eSUn4-uQd`2l4VB?+M-xed2DME?hx(6 z8i5uH){nnJbj7q8A<_Jr2_R{=ZM3rF4Av(=f%Es@mCOc9X)4?0<=AV+NC67W=06Ec z?(^(z$^HFrm80d--*C7yN2?5d|0i9GfN4*T69wu_`g#1O&r4)k@$uZqAkHV?qD;;8 z_h9*mq?yG+GzEV1u;IdpTq%ubYu-Ya>Oy3=F?gZbMEFMbMIz@Q@(Dh+C@qq}lymU1H{%W8e&|V>}p4@5Jc-a7Y98)z_@p z2*vM-YepnQjwEE8%=_E9?WmA(O?+32A-o&5H8`T1tmnB#bHA&Eqw zaHItp75?&H@8xb8b?HDwbp&`pp+JG-{yYyp*ShDSDl{T3I)axJp^Uzh;UHw#QxoVy zv|cT5tc6+52I;ff@Y40PF@9`t>kaAgb{cmb{TahuO!kUK9N_`)kGzxNNQGoxCUSBa zk(%ef0bEcmB{LEAWpqnvQ1s7D6vi=x+1d(i)MZM7KYCd()F}PQ97^RqlV*=VL<_Bd{Z#8x$qQ%@goB4XAkz(|pd_H%^=u--WU#h+$?a1e?{CaMY%gBDz&HZJUSNx*{eJ zb#hv+hpB+iC42x{K0QVRGbFAMz|bE2jD(r-t{&i4wZ!tkz0LB&19mjV%B*jkKE5Zx4LGZFYxwbY2PY=Eg>HTO4<58Q zftR8oOzqzpbs@c9`O8~S-gFqGMRX=Vk7&Wqej|~(oPjX+iRz$D6kV5+enTR z5rKMvs*^VSal7=4qx-XDMv6kE3x zs@Y)tbGRn*_WHw8w-}%*4<#VeV|_=XL1iG_rsM^*&^PeB`RuO;*m+#SVxoG!10o^0 zC8v08qZs^bg(8*zL&m6N_@O<6%HkfTZ*g!CCI)#zPeu1~cX}g7z`XtzP{S!!gmZPc zZEcD)hXvx?W0@)M{*BPXes&n1M{0>ae4St8p(#|t#-a9A=(}A`f-HoL(NUvK!F#cF zD!DzF$+cisdCB*E@O-VNz++2`F5%SHY%jC>%{UrN>EPTg=C-;omDw^{wWb9Mx7(LT z4vai58I5DccWm&-Gp^2yn6v0Kfm0%LTZ)R-d~Xn;U_D=2hwQVe)j$&@-sN7z9v_a8 z6atk;z6C#Kf21|{GdA{s-jVg`{+-;$)5cQ+3s>xndA0f743MX*wXpZdtAO*14h;Yp z5nTa>0So_Eu%SSRTM#{{3MkLx0rE6*nzn;?W=KyW`{RO}wvz#3Bo5IRhfrL9>MAQ59Oh?_Jtc7-|$SA z`pWk`#`UhYQyHgu)EAlyk9cxC0XPp1)>*%$r{k}vQk}BwH1;Xi90gYMnN))TUD+Qo63ot#kcTe2JcA_B(~n>7 zC(L>UG!-aXl@+)Jbg1z*jhz3xHu&0creyV9)QfdQ>bQu2k``OK<*1o{^!HHUt z_qPOq$%i|Y5f?7`+pqhqaPs}Q=wmo`J@>iUo!=|5ZKqug(n{v14J87N1(O8@3#J&7 z=wH6xH%(I8oGG3(<6CNI)p_5vCi%r-SvAPY z1_NSFo?2K38eUsL`qr*qoszKW)_W@1K z61sREM6nn$g;DR@X3$H2&(reKT~0r|<1-qlUEt|)WuZ&B>!4AxF2)6~mTxt`7qNc# zruX_^`ILC@odOR{Po=s!4LttPkj5>x}f@EG6biR`N^kXd{rj5S&r z&h;{zcO7^BJ>qIosch|TC7f_9A(HxB3j~0w(~|%JBq$Id0h9z&O&zw~aCJR)$_w#1 z<-Uh5JrxE-FSR%K$6T$XyL^`>#K@-Taw=F0YXc_mqVqo09J9*I z`QYkb1XG=84fBh^rGoao*8!ndLClsa6-Tr)o&nSpPm5bHrwa?GAGBL|9U(y&b=4&NLEz%N z!mg*(GLW!y33t623M-6|DbWx$`x31(s;&=%*Ln-g3T<~)?o!mU2z~yA-KI&L=W%Na zZ+WlCQ65`VV3n`i#B`Juc08rncexNAx~o2a<>zjDOS2~GX>+;ZeonjJa^Q>2oE9y^ zNept)scr2)Y3p~reQQaEJe9wqAgsgsMKU@CI#VO1SX0gH6B-7zhgdd+7ItH&69npS zych|;{#)}QuTT6h-1^DA{s;J)nvRp&<1_AinMvdJ7ClqmCV5-5>3|t}NCZd)SV%@Z z;}c*AgaYM;Uqz0HuOhmkaMZ;a=}D(fuJCa7(dX`F)a%FYF=GO6h=jXvc}jK%<~PY3 zUIXPn$zUbF+b*b;n2Fl7LM@x1nHky=0l5@|+wDi&x#qZQw&W(Ki@YqqI_viYAkgHS_SxPhYNuv*JoP3DBBfpEvzQZlt zgO7*eJSSoPb&r!EX5060$Tr|S!gF}@YU7Y+-H|}txZl62Kf~W#;pkmvC8q?Ck1`tT zWUPg&So7jv`(*1^I10YD5;pUn^)OjJM0!{eWGKsWelxm!HjOEkZ1cN5I%sy{Z=t?0 z$t5u}%+H7L)QK#lk7+$YWXF|x1T(y<=Ois&}UxXee4muUZN)XX?JhOL7k!-XYE>;beonQ*Evq{>hNkK z{Q3|{!M|9WUW0%Min`>@7v^aLJjR%sP?aE-OTI!`>n`A_-gbZsM-Qi?1Qm2Jq!1&vyi z(=GY3EYH$~6}$U*(@+Q{000P9(Pt!~1%L-$zK?0hTJqqVdm?<>q0ljR;aL?4Up?Gi zjcV>S>jPuiXR<&3U2T*$m~tiF`(z}>%X5&d&dL{%qdt%|EAftGR}B~k28bce;VZL) zwS*1q7D0OEEg6HM#F}$H7Y}+Pc4KP}%Gw#E7Q@Y`2`fYc9F5sf@N`nXjC zz`C^|LDT~Anf5YCkG}bi!9NmKSB0yR2lWJl@Y^73^T!IUgM^9p>sNukSL@3!Qd7sk zp%=5^Hy3|zuCN>5QiZylowl-zq6JC3ImZyy^-QM51B#+d{W5Q4!_oZtnm}xTZXFSl|E&L*{q~5lCKX6^ zENi{J!6Oyr8)ZiG#?b3|@7Z-4KL8$ItP)pDykk{}*K82GRMg|LA}GUiChWzV>@sBZ?g5buT-S8)c&}6kS;*^Zh6%neHefj* zW>lTrZqe)NRZpONXOUh#r8;wKK!*dqV{)V_CY2w+4+E1aDK*DMBLp&4oM4p2_W>{& z>2yiA_Ywdv-r^UAOSrz??d1L1`c22yD-XjjsINgX952oYxV^3G_2ERpI)Ycn%fRce zd;6ZQ&kFrMRMgIv`*uC5&bq79cK=&v8BJW5NLfI(q;XZtf8A=1kA(`huJ+149|wqE zp6M2J<*}sLZ28~(SqnYev+jNVWV-kXY@#p)0o?Lst>et2jl z)5vRAea#-V5`!q~`F1voh$cocA@JP{+0d&~q&g!X2t;fLl0|@s<>YwmmGqcshPM86 zzinal_Z%G9#%w-6TODELY}-}ytVzs;0}LHL>>3dyNuiTSfy4_dUEWg3wgEue9lTzD zqhf+13gxn)4C~LG=)Gh@;}&j<@ZLA^QH_e}IzKJXPo0+S9t8kFP$EA;4}+kqht!F@ zw4p>atbZa!aL})c+fO!qskr6O)!_2NHO{(rSdu38sy67eYyk$#+PdU^noGF0wNUqI z-S8z2RcW>fRRwG>jZ2d>T`}d`gT11&^QA4gF!LZOreDXn+5-QU$W@e_uk~t=4^GP$ zkFC7wyd1eXXY(+C0<#v{ZuT$-Hz7?a)8@$%S(-fe4T}R=)I@kDcC5aHeJCF(eI0Uv zucI~$K0mkoEvm)x#`y5x&Wt;W?W93WL;EB1$BMl+ZTpF-ObWaY4R>AbALvoU6Y1NV zCQRRrZ_QQKhBkZm&pBo-=1G&IH@K>?`>v+6ygqV07t56oh-D1%I44%{08}yJoqw#I z9ao}RrP;`jd(z7-s*y=q%W0aT>uiz=|7`Xqed5&k1rLwT7V|{oo1t{@1lpv&asY@i zZzqM$EAzEAS*#IgeSR}CGSb)V1iv%Af7>1`em_`^wbdHk$IdjW-jD`BL}(0vKb5E} zmCQW2&r~y5$q0M{X4c52ihVm4MGCgCK=X$G`#qQY_qZUx>6N#?)W->|mzDX~7(&s! z7jw*275T}y^f7Wgc`g4k(R%eR9Onoo^4*?Nw<)q$E?3G!$@`@4m* zAy*TXy`3Vn!CMU2_|%BkqPrAF3rS*pHYU3dEm%dKM@E6X_x;pFf$1*!YS^Dg0S&kN=|h3r`5 zt@|nuhfW_`h-WrTa$X;umhNieWS4#aO-xPR{B2`VewYb}KRB51T=eNEo=t(Zm*eu>eW@1|Rpr@JnZT4NZuYN*)(^DgXclXs9R|(R%oH3Gu4S z3nR(46n1I+>--CdvkGb&*(uXy*7%9_c?+9SoS)$^p%-B3r4SnQE~)(PD$!e(n_44J zakzPY6p(2VxiPq0$Cg~+^^FC~?t9bvdG?Ti=VENAy*k?Hy9%XC@|bVG8LGqk6VDuF zb2XEk2Ax>3`O7)Iry-KVo3VY}690Q^MG-3*ARR!ppXRiSAhFf0mTtm{cYZ>eGK}uvo^C&i z*6o^RD@`Fq!r3%s1~b(kmHvF5^4$eKAW@2%Tmx!#2x`zxf^h??vKC-QN}dc5k0?Js z54RYUhmE{@$lwm_U!&?r)gUTJX9j)_C$GhyL;%j>zYVU(0&f#tzC&PeDKzGx< z$5gmaPWc&yDy_#Cbp>~c_b7ld4M3s{phU%;|MoC+9m|(DU3pdwQvUdGX`0Ce!dd>7 zMnBTSC`XQ@_;G6V@y~7vr_yo22NQ231w`Ze7C_X+rC!1;jdY-7lqZ!ot5Bs(16E>$ zF?j$+rt_Ib=1R4#g}gL~ar(Eda0w!NM zvhd6)$D^5Ub$#tz8oy z%rV8soq`2ZFlhTv+K9%U;6VOmsGDynX|s7Jn2v7PViT65R`>|h@0lFeSI$aHEM5Kd&O)@ zD79zpQA%w})F`!TZ<5+dD~M6M5##Y_1Tm_F65*fUi~ozf&*%Q!*Lj`yc^v2QJwN$- zeN`HB==MEJyg{t}R)FZup1!$e8lV}V@UfWyWkds zHLcxhnC;FzDU<3j1IiL`yaa@-)34gRu)=pqq1r~oGGlYI-Wd!HQSt7r|A^?Lp(@=F@fW*5tWnc&+ zg@I9+9ld2dC6{pQmuOXav|mqG0sU<%tGr3&hv&pkH>tCzp#q|8Hy0OJC`@TtKWM7M9aRQW8yz#CkfI<~n?fc~m@dop0loI2=yDJdt3@UvTfBds zpk3O3`F&~$s_+vV>9K?*`0^RXTy3nja-QA z*ibc7wVc<(FUZ~yTGVzyr9C801Hz?l_ty9R8+(I?UzFP|*gkv>it~FK3XR^|A^Z*S zX&5;HsDCa?L-_inZ}$;YrBN#p@$cBEV<|O+XwG_iY#z&`I^`G`8Valq<%oRTHh=xb zF7gtU1dAp29RRZ0uAYz&smblJuN}{MO2x`naP$5sK*umRqFv20!2o8+#Kd){Wh<+S z8G3cJM{rZS4FnqzvMvlE!hh}UD4(W~F`>@`;@B$0g(=B)1UfvdFBqfn9~L7oQ^RRLZ(K ztyd^!(!PJx=g)iYmODAsUNcl*RM2bS_%6@S=)r#{5<5@4v=f`yU_pPM4BvvfAj)@~^kT-vSJ?0NFyqvl4e6wcDsg zb=G?zt3&ywDFC17$bXlj1L7bktSLk{q|Ye@fe;gs?91%r#6x{tnkc|2rp zaxq1=_`68l_+E1E^GeS2=kVhy8o`}SS6!A^x|t+0>u+!yA3OvbxK(vdoFqwWlGA{GU4V4owBi5EewqIGP>n^Bv@Pe3yhY~ez z;-X1$qYI|%qcgSZz5Qxu(tuh) zfHwP#+{5(5Rto+%K+k8h8!Z86q@LFoCp)5Q1AUwcUU2{!k?>=QG+}j688e;1!}ctP zSOEY5UiV&U#mRTtxz{`m_n5EFIj_k%21iK0=lSjG;ksH>?I5^HPoTZ}I7h=r;Ib!xU5{}OTiJD7xij?AeZPvr3YogL?WJl2@$hnI{kr3NW`I_W z$al4e)w|wNdvbMwC-3kTUGEPN2M}{i*T0xZbC3Dfu2C`mami1^1>5&>9z2f&{0OiU zX&yNGeyo?_-C%RHbJzEw28<;~a?o76ViL?jXQ{uDMdQg?g@UGTi``0+X<3Om#O?%T z$cwStIO6vw!#rlTRllEJ?#v~TV|Rxcm4LOji<<%idka-B7dQSr^053j*UrX;ZFWLp z#(u@g@o{iKOk}i=%DoP20eaME;>gMtw7cawI8%kWk(t`bgq+Cm(WS_RPdXo{)0xs% ze^WL^{Vj)0v$Jz(trz=eXxxJCohpN*u{F<}l@|`9_3BBR zI@#2TgMDOw06#lKP@M4$SLA}Wp3zO!v@&h0b1!P+>Q`#Sck>OUhS!Yr%2*U!urO8( zX@V%ZML~lh?7}xh+27VhSs&6VzD$rXH)taTR-(2sV zNjV=jljBCvWR?r@s6OVzx8pGxC%1F=dfkPq!1GxFSl;$I1!aRmi54&FFj9?-M)-Ft zI_%XHPxHZ1{H48emqR^wX*GZoBsPwj=$C6`j~fJMi!X4(CAQ z15#qAt^a)Hod%Tu7qnQvmUkO0#9Yfs0d& ziL!Z6R|jx<*;#7XT0KZdq+H;igKg%qaw};QdxM$9VGZw{ul>(#Hq_1W&t$=y&}I*@ zBvfuZym*OOWc-;r`B}ywRdM~e{{4IiX2?v_R(1r`xj8&MygV+f+%_bdcprOUomkf< z@ZJm)-hx;`G$?%Ah;(4I?>=nztY0*IWc3R>hs#OS9vd=^qBA7to`2i8IB=URF@uSF zaY1^X4L%rpGaRtWWT*YuF>0dKMy&osutlPNC|RU7OVE|t?AL_ocu0Y4ZnlIvv$^xI zp~#Oi6}&Hr++THL~KVw@ub3tVp4rUWRL5PH3< zWIuN@J8oDv1%KS{p0Qhs4C}7+@ZQ1SdsP&XSL$vhKuC3-9l!K)`64zXEEjfh>}Sgel=l(!Q#AaPv)V4b6uNY)4DFy}1lfR7 zndz7wS4}-eR)xZTE^R-8r&ha<=%~f#YTwk(O@O)@JTfW>*;+|%t`5WOCJ$Url%qY&SS1VM;%2plP({}=v-e^XIkVq zdQO$g07faPI*upph{Amh)dasZLmt7g{heW9nDBBp&?E^nOxOuhx-dN+b|yKmS4y&v zYL0@E1k3HZWhDn85C&$Y0j0hZ>1o_tu&+88Yv@KeIyuFhb#~O5wLv~Fx2AgsrQb9c zSDarQ{@}p3wX;$lK#ss@x4q|sBGmbJuD=XvSyq z(X+KC{<-X(-1Rak$;GnXw-0+H*aI1q4S>gwZqXZN5Ka%mt;T7U>EmntxbzH)oK$~b zY<%fxvMy8TD0jNA+}^-v767t<#2IqrB$TC&KaiT!<&+7~FWcg1B-n;^B@8Fl|NeTs zk$L?K@!u73U-~LP=7KDs{5p(kR=fKC&+5pgN%ZMYU%us2e5tR=Va$;&zk^OqotJuS zBE{JL-%!toSW+9u3VJ$|M`(F2;{Q!oq-!=Ov7Gfz0mfycn+Jrz+{uO89*6EJUK@TCpk-1 zu;$h=)NRl!#+LUaB;MvQO=hs`>FEON6M}3-Uv$9kjWXU14!e$%?vDuedZqnA*v{QS ze$vohjSfXm^)qfTr_6$%+T|A%2B6XPEh3 zVtta%&gJFbjj+wI_NJzoP1M}#?90h)MYT%;xkow$=MKukpku8f4uvj&(G8I_iR7#A zXD30y-aZEz)eMJJ-PaXvf^yXF&wx69&0OySGQytzK1p>>nFH}khviYX}L2I3w z{8*;)r{}O@BK#WlB55~+YXZywwjCi#%|HBHZ{6bYabD*OaX#(d3+tLNcp;>xFm487 zwjPKgT>w2aygJOmu0{y7S)thU*q~y5(y=#_TmbRXC$h=v-XaPGxa<0p=93S^uVyIM zRm2soS+_^FibqNqjs$!Vc>4CC!|VqWU)A~gr8C^79Z4p9Ndw$|8u0TXMV?)@##Kkh ztZa~#vY>!Ck(^cg^sHx7Q>#Na)i^40K(HtvFQSxGuSR-PIvI1A$w-`zKHd3U#6;S6 zzWn=snTQzp8&trHal(4u2K0!H4#ff>@0zwNVL(r#_w%qh-|H*Fz3i~Cv%jfK`5MY= z>+36Pk^bUV70~sP7(%}Dz^@NP5$-|5xRpZnl0cX%cqkTH`6Xl51_AEc(oKS{T>KCIv4m=P3M6yFNBi(vNY?A!a=@Z3k*epy$Zj#}0V z#4kfbE5YW8JZ7a{0A<+^LCX5#sX6e4xSF8Ntwuky z>Ez~3nHBKPPx`9-co80ReZFH}`*PyJtMy@H<>DW#dUTL3v>A*?%v@4fY_DdRFgZD! z8eqazFJ#D`m;4L6+7r66-fV+!1>6X#A zy25+!*`t+;dE6!ieZKQ0_OVAv|J4F-^7_jfu9kp!*bM3&6F-D_?L>Hama&f@#}n^lw8WGSTFqD|?*eG79M&Lkt>QIW@C_djU|Mea$x=VsntsyHro#&UKj9Bt}GCe$m615rP(u1EX*YJ3oHQ zBYa~B`x2HcI_9>vaM!^a`Zrk=0GCQ#a=-O*vWAkyTzP%K?)&L~Q5mGL3^5rM`+As| zC@cCuzwVgen3^E72lp1YxrXjXELW|(nz~tW5RHTU-ofr*!?>O|OFExEpVALT>TpJ*KK>4x5)iKy2b*Bk|AvCWBS>&L94_P0hK1u8Sf?ZC}ay zjpwVA?^Xp5jb!0jY>h*sv2z_-!;PY3Bh3JjR3zBcm5xJ@*_Zyo8Km8$nBU!b0k03b zI!;fn@V?HUd5p3Yw@L(!s^~Wp48Z^xjizP-OklzIadVT@^mKl9ij!;Y;~odADck8+ zzLPqmK6XF&*J4&Wfs3(u=e7u&q`4WBQfZUS%(=HMX%A>6P`1yXa`EQ@7yvABE<2#YWFSv;LMLW84(1s@C2uNV$Ngo>C!8fDgLILRp+WN__g{njtFqG!Im z|J2k4aNygjXy9HgX%ql;7*t13!13pEic$VrAwd@^C7{ZD_$zP4>wk$%@xOA2i0fS> z@yHrIa8{8t5BJ8Xwy%Cay_%Z?GIvxJ1!Uv{ciQ~AiaBD{Q`9*H7cJ7j4JcW?7oEwX zsGKpwx^29_R}+Y?%|gnE$vl9HlYWpO1fK&fE>fa5v&RfMRUCprLV`DLhM82}$_r%|fFK50DREavs9yW4(X z#N}8ff^fRs8{PMd(*vdS_6&v6hnjZ4MVP^1TrT(Jtvudn*34wP!?YxjBl-<5+n>?Z zW_=oanA8fJ6J=v(wN(j!$)m`GbU3Zfzg05*CWYfvgxg40I1g}(To1^(MgsE=JM?eZ zGBzhYvTw$=m;w)cofjrn^bUx}^7QY6>S=_z6w^`^`nqEj;AUc&z>j8ziB|7HTdGgs zDk>t4NVx@USFLOgbgYobX;ybnO5-NWGx|L3&YZDNL+qUVRfeeZ$Uuj25A!y|Fp+=E zvRxOe5r?WvldH=MJvNe!ghet)N$a2N91ulfwh=jkpg~j9*3wq@)7-hOi}=}aI5A=%*!?iy zn-7C?j%jaZLjDX?G*oa^t|`vf*_S= zYWX6j&wCXG7LO^C5{AZR18<&A?4)qollfckz}%bude-}f%7zOSxV@BEH+$U?29R;PO^P$c7IiD;j8A0X>5^?~@f>5#`hzy+sfLUAFNxE*M0C*UEhG^~^ zxb{Pr21InH4G)XQ^nVMswbUt-eb2%^D$Ajn(ka^J$&KM078ZzKV|?Z2lMjw#prGPr zN&kN@0Drxk6-ApSe$r~`^s4HkNo$9ETDu4PLzwmL(>~)Qt(Y+0FX6tM$4N$9sUq4m zu{7-P7B7mnR^W`CrD%QHQ;F(ha+oC6KeTcvmKi@uBH6j1qUK+Rby@0cY<;o;RY?c} zt1(H^HdYfKbUOKC)4ag1U3UYHo@c`5Y?8*xLkjA`iZKS~K}P~^Vb;l(zCPR%&tQhK z!WNA{uI$DOQ@I{T=hDp82=N{(dGxGTCF|_@->XUYDMa=5?Cc9O=X;Zm<@toX^0q|D zt|}2xL+iw}EDcsG%&(x;BF$$3mQc@NdyYTHo3?qIB!$Hht81CtvZFa)0Yjg%yaBN- z5)#SGt}j4o)X!M@oOvi`cglL!FVl}-UwtQz37xr1GdcgATZ~9}Kg4Y4U>)0H8M~Lf z7!jn5IB%o2nlL$|HMTTa#tM~oD!h9t3DX(CDc@P?(wIC(F7c{UBnc7b+wIo|ac;wWLi_Qs68 zuPUwM4QY!9KR{UZNgV7ZZ9%E{1eoYmX??ZO-QqUq@kVOq19U4 z))X-U4)3kcuQ@5@#O0A1pqEJ#pL#{8sZN7DU!__n5EUPpcP)<5ce)Vqmo)4#wl25HEU-VKz{rY&>Nh`+fT#F5pbbPdbQ)f3QcjFTwdt z)9B`=Oe8r+bDV}1W+XIx+Y$X8KUJfMU`N!^ zPyAd@wJf=F1Y}i$!u9(t0W)1H9^=SS&-*(qOSQ+ z^0nOwy*d5un?Lx$RIy@P+PpFDty{Y{M7(0LuKfG@yyDpBDp>B~pES^2e_ z`}>neVvFXl>Ttg6P`8IQreB;_*LlKyI;ZsHGiTk3D{i%9&e5QS-UeY zQ&l|VGHtk7E5^VG`s{f+&OcLkvlvsIO|IlYQ_D$$%oMC_|r%#g!u7pdj zI}iQwV;b7ff{Z*lz+@_|mvGb~KI`0?LB0?_+b`z>kGr{oIXJ;)IjuqfZJk{E!@lM( zcLSe9h1YHlhw*sr{IeWcoa8o@EgJN43@)Ix*He7IxjfoXq@S|D+$7%8VMa;7H;|0T5dvQf`k1h)K+{TwtU=#ek8LRoz%V-N#m60R1 z#_8$lK$Vha%(56W+9Zjv-Rl~5K&)y>5wqFd-M#pc(RU`-jrnyxEWNg{U!PPTfLbnH zyZmo8qsY#=qru2|dD*y8at4(j>KxN+`8^;o<~zL6=5e#J(XH(_-7uG#^;CG|*+HP=>c_iX zJoWYVtLE5IWd1W}1CclMmN`6-TU>N+yb2QyK{^eud}mjjrRx@~V=?$Z#iv7B~L2e1hSi%W~J{*p!i&ePz7 zc>@k3UZKL8x92FCn=Da7`g6MKLcaX-Id*4E-#^mx!OX#`_~NNt1u59+p8hmrZIghE zDOgA}N|I_;#up03WsdzleI7oQnd59>;Gi3%RBuzKmB`9U6doImQ)kQ*63SvrmeCNg zdjKzhCu*jf;&X6lzX0`o&N2R>oOvZX`JNRHdCFFW!*W&Tl6OP}#|vG)6q;80*;g02A4r28je7)w zRO!KTA1<0(dhy@!f}1|DnhkTIq;O(kq`JGlj7tGu3rx;QJqxeS@drXyK!pC&}Nrm z8J$SSTXz|i5&cAW{%zbQtItn)I;hbkvxZ_ClxG^(i33j3=cRH9K|D_Gou6!wsxol+ zD!FdqeN6wP;W&1lDVv75iI%o}g`QHP_SYa~-kGQI6RfQX-mKFbQw;tw{8^XWTv|~| zP|I_Ydra015p|eovgs&5mxjK8Y$l1-wPkEvRn&uh6m5M50TV|u~wo~ z9yvubMgI*t1LMNx!-AxPsnTuaXerW#!u+3bq-*kbA{te!f4?sF4n=ML*?Pt&SVYfV zUTMt@Y5~j@95zL)WoE3xdYpOroHGrZ5Z=m}MEr5zJH?zdKk)qSfyjPcl`~=+%;@(B z0-=;h&bVLLB0Mt_%&XTasgIR!GI8N73*$xl!xlRCK5viMa?mBQ>q02`S#DFrOK8}_ zFqS=e|C%byLQ1_2y6(*M^VQ#enVOv)Ad;O;g&Ou>F?oSP^|Rb|)lvTXdDskX`GH?^ z$~b4gI|kmj`Y>wXAeeKKCW$*>Cz~i1M+f) z?|R_MWpY*>HT-wB`GoSE`}aZuWSedVv7hLPV@SPIij zh<`91#)Y0nrf7sWE;cVvx>CHg)^ALaff?MQnlcRkh^)wBd!YNP7z3n+M5xrQ^jTv* zufSU+m_Gqr`6-R)lBhr{6h6}1uqx)1WUn}45VtqiXfbX>UPM+vKu8-DCnPX5T^;C! z>EAHb6D+3;ty!S7($7n_232qdMg;b#q@(0>!r=}qJikmlroUu!t5#fE}13dUqX5v)-dO|+tfbwqyOV> zaz;jU@S*{Onz7-_@k%7rSA9_H*9A-g<`+q`;&f14t zY!As!ewfw=mO3lwNLm`pu*=?3>spMhcvF}&Zq|1NG86wPJB{UF%RY3weraaGv1xlGRs(d4as!~ar@pIxUny-t__Uv*&tbu^3#cf?z$ zWw_fc)afAMj<~Qo4>`Q8N)2z>Cbf}0o4J#11qz7*PmtOl`>r`W>?e*t3ysH^jq3l8 zOjGdt%W6sQl%V(ue)EjaS!pErS8K!ffMBQQLY9IDXBfE)OtWfJcRDhP=K}gDLE=-D zwWamw5Qn+AQB4KAxAk|}oFRAU_~_^Eo|RRzn!)rm21-TsDMkrE0y|^HOK`4#&{>`4 zgF9LXAxa09z@YG~u9a5{<unDE2yXj|7ayzZg2oMj>8`2B{OgM(8(MB8K5{vI{?n&&LAF}&DUz)3lK1+T zf&?r7)37I{XExt4C@6PhS%c-nDGu|h@kHxl2!Yde1TKsF^5f*^;x5s@-xTLx+<71^ z%{Q%=C)FjYGE&4h&~u76Yo11;HXZs#Tb-MIeUt{eiWVj-c-^d5PSwG)*ti6`r_E!46KMcXwkN0ZQJ`*R^e};`8=XmYCm(?t3iblV6bI;5baD-?E&( ztS}x@`%Lf6ur>2Ia7QEUX*uvGuRLYfSBjYw;YVy;>D8ows6vdSRAEZs_y4S(iF!&1 zbXQoUXx1n%k~it52l{*kfkdcb_C{8g*L)>&t3i1;o^QP?J|+>mqkoXn&NjfAAkI|y ze+=`sFBE_LEY)oa?{JB_J|ZFsldyQMt_-o}FhFA_gdj_rb~r3ulP z%i;f?*xld4*ynJ~SFpDY#wFS-+=JK|S!<1H^27zC`Dy0FE*QK~l1x&zR#(r$^iEXc zqcKt#PAzR7|E-O-md>VNhVSEsZ|%E==It`0ugSa5Cu8#J-}EO}1T`sGHG@DiEZNI! z$FfbBm68_>iSIc>wpyFb>=A=yfukNT#O)enhMtGJdh#Y66d{B0yK*W5yGM0eXN0=r zZ6Y3zJhbVLQYI<%PIfuMRZdeM2W+nlTpce-NAYG76>CW0JbB|%^@3dsDH~1fyZhl? z`z+inA5mnBen$!7Vr^70=?sRtHrc~MV~!mP8bzG2)RK!=c7(JC`gmse#2^v4!vv2(z$}a+Humy%t8Or{ic*}o+ocEHIv{aEmJEAY3;p$>b($o<=HRp%0SrQwZxdqvpLV%#Hr(>KNo*4{~Vp; zaqYJ`g|Y>aA6&#V=C`%~4HC$d(R#4e?@d%i80mfkN`_?SiOn9DhzJMQ9J?9>u>~>N zczu|&N)gmpgJXI=OtvzfrIx4xqp?_`iJTNd<&SIY-AM7Vu_XcSBI+Y zm+6gBOH^#j(_A1AWfo0bHl}{xa;nvrJRvo?Yn`%f{tB}6AA#V+d>meIFMAl71EjsF z{a)8rP7Ls7NLOJ`NYmSZSMr8!2JD?-~Bo`e)i2 zcWEMbkB7;p@G?B6OKl3-wG7~-&`enw8O<&-Fe>AyFzeQ0t?c{`J-|T8UH1sE;xpwx zeqa^g{S_ZOahr}C^_}5EPky6MhZm4v-PO*=GS|il{))#}S7*|NIJ2{CB4OmI?t6t9 z7yC~S)kx=Od^UJ&^i$EZC^%iS&nOCWWcxC0|BugrgFNb|<-5A;i(R5vrvx>lk{27) zRT=hTz~Le?s`vzH(rm<{{u1L8*v{9im$zqCvcxx0S6vzz9Y#VuURO&A+Fp{~m4OY? z0o$Nuut8W$mQI6L9fN>9oKgejEf))t|9MUD zkICKdJt>OXlx&z|4EJ1tk$pg`@Ea&mnxiV;Z$$*uL@ zE{XmFE_GHL=sl^(j^4blZB$OflLNhVt3^F%G-Q-Iy{D{Q*ma}9@3E{dM?$v7h99SC z{jKF!qAI8O5C86#^Yh4f6~lHqtv{Z<_s*Pn@Ng;+6jx*U2$FQu-Wbg~C^G+UPQ?z^ zBBStQwAbO36wtR9STw$|P~VN&r^j-=YoAS+bLAepjjP-AkBsaZ=m(y;GL z0FZ=)kVgivD?Kf1mR`{8eTME$miCzsff=M7B}D7-w;y}YmG7Ki{=EW5Oi`q@z0rU1 z-LAWN>5QBMUWeOXosPLmMJ)}Gg3kY2?Tzu&O;7e86Ia1}p5!7=Q@+<`I$s_2Sk)i0 zha4z&<7!U1GR{Lnd`x`RdK}Mgb2*59G$UR9JuNyz`ZqPPd(S{n{O@>aG=DW9>qlzx zGtXo-Iay_qsv`lNnRec%)!q&RUkDcmU-+8c!56nB$3kKyvxaNtcy_n_I$UdcbnnkE z+}4NDqzq+@fZi0-f&-XCY#UW=w9vYn@X2j#2;^PX5PySu>_+6dsmp!AK-V)Lex={r%m)tSVW`qe4O^D5F0Xu2I5&~kek3KhJ98^72(@E;hrSrM9rvy+<#jRg z=pr&Es!J^+kjFi*XoypWPEZC!6$_|LPEM1=jcO>FHBk$u1~Fzj6)<2q!_cSug@`X1 zoUPNSC*)Trh9|Jc@wDsvXe>i!5Wycp;YRtuD$g2M@^w;HZoxW2S6}5xR_NB1X zKXwaFt6!#P;K^0SETN8pkryWp`!bxJ)Z?Ry@KJwN-b|~og7ct)U0i;?UB^r_wFb)e zXPm^X8J2Vjl=_cPj0p=yJb;eeMvmJx6mN!LHz~)$d zPf;jUD=sKw5j4*L6~kPE0Yhkk7VVC9;lhR*kZY3eR%Ex6&r&OPfA4D(KbrX zW?`>3U(C8-6+Bgx#t_1ZW7C@Jq|@~Qb7~Ac9kl{~fN*V|SUR3Nzff4V7K8hf_S}%$ zysw%@O&b*9&)B}^*vP|;c{cs8Pgb4RVs_cB+_p+S>qXQ)Lt-b3q}TdB9>4GmV0c6N zH_|`PX?p>+;J^I@tk+f7ZN0!%Uq@Lh{hqM1Fxa=OLB-6PeSFbjCSB?@!C4}>dS>NA zd4l&7EvCufp!OUC70K9oYD$9AKB25{+sTe^`9RTjP0C z(Uduk9pv%yTR6$quu7I4JbY&x!e5*IXUyUbymPAS9X`TV!%tz zS!>Vt3Wr=TMKov10@J;F`blHb(W-y9rmwG}xeQ#>TCsA{`M26Ttt~?pW(BK#0;PF; zaEj$T*>WA;E;>}j=-ooq&fPC+PHK^_b2rs_AGSl0ZR5I#={1L@h(*7M+<7}DW3@k( z9m}hyT&rdW$1^*1yFSLAJf!7U>G;U=pqRkb|Aw8N9c4KIHwC0klu3AA7Qz4b0_OR zJ276!Tx;@8yrLZYOeQhuJ9Zy4K*Tw({fAYHA$Ha&i`$9zVkb}>OJgG?h4k-LkRhoq za+$%3x`-DbZ;Znmi`>DPj5@oOBBcLXkEDIJijLl6vyE)bGmD$(Pl?1|Cp$V#3ah+0 zb67oVb4ACe8j;G~PNxACDPP_g;t6_v0DZaOJ0Sk1b(j*RF@@Pe`b!BK!7MhiADoC& zXrN{&{ounPvojDOBVi?I5`%UgRGq`x6tZG|qXtTyqhzFToAzFh;zGsj_d0l$A019MB+>lI)#If;*p)H&%ddA9vqa+XPLwG;EKC{F5Ql zsa?|bVnN9%kfV}=~a=m%HzZr9h55<`aiWGn3TKoa)!hM-+b)4jVSnA-nEbd(e zWl;=u%iAiGcTcOR&2#6qYt}2-38|3}5cdD~`DqBZkrHC|-j{8{%SP)rT2>3YjG>Rb zab;AEj43r>{L$pu`T4~zhK#2^dDee5y&fI<^9LmBYrshGQrL+ngZWDtEkWJgCB{$% zKU;Z^dSMBMfMr=(>idOWzod+4)dPOg-nzr_QB2!5C|1l19fX#uV66F!VWy+er_Ifn zPitrs&}99y-C`$lZ!C^RWLYF)0{_K(DU92r;r>|JZ87Az%&i+CfQ@94cV1sV>DF%V*^x&mNRodHP;X&J zsotRF9v7P9=K?JNkS3AFKnGyfC%Nkpw)d`=m8uu-2(Ma7p{n0X$?#F(Wcrn7=JRh_NN)FFU5Lsx0oy)3 zaQWvi@%r@pgwhpOZLQhUP8aqL-hubZ(i)#oLBa%4I4Y7kE8JD_9{(@eN^9+#dbeC< zs1?$>#f4mc-3xm0RB$MvLRs=o2JEBw(@uL~ZiW#tg!*kw04P!5QT9LliNaaiwr#Iw ze*Kg_?skUbp=qszhJR0X0+1=Z)#cf*1eGyViV;ByFF90+X409P(J@avkAA;H<)Tuk z?LA!qUG=yB_o!?E080J@8d^XD?13!@0Cba*hHF_Cp~$YRXwj+~O}P4jbszYPyWYG@ zJW7foZEjY{KI+!1_9&9!H&PfbtdvCpiC@~hYsCeHMkXe5gBBN^x|HnYtee96?`_`i zne8tz&}-^=2rTcJZ%~Cdh5h4gH)i$-XZy3IEoipP3GI_NgLfW4$6TzII>lU~_#E^I zXV(`!Obc-CEQ+*v*b7!c7Yv+1F>+-|jPzO9K17`f1q0|I%|E>4=N{wGq*Yvb_VY)a zy7#<>AQuYS{H5dQm1!|?k+k#`AE+&luCBah=kjw30Kk?U z$)ygEeNIi=0{vh6k-$vNno=OR1q*abK_U(_*P<^|V=l?tg(rU|2QG2dnXk4+ME5YedEt&G+N%rsdq#+h7RgAr<|=B;T`GZP+`m|l$=A@kfx>$C1i+7B4-s6B8M=A zjEJ`+p;5$W4!`I37d)Tqe(w8wUHA98ukW|6pzW(|i8L|!vQ?QwM;HD}=9E5l@mz1K zK1|awf`WWr^O5yg^*wb2+y7Bn;xtD_^x*1{gSpP8tz z;0G&Ilh>+#UD%dhviq}N!L>?qaoRPMhmq1V;B=NmVylO09isb5hm;RATFGP{pgeSd`hUca)swoweNko(K1 ztAg#V8J@CoR3A>%=5$vYU*hDI-AN>0U+2MD!%qE9GlAhh3WHK!7j>0xUA}8;CjR`P z<8WYjm{s=;O384&a#%#a48w7MY0j0}fUpvig#Lk7i0xZc#}^y83cq!ojEh_RE=!3P@8r$PTp6TNZiOb-{;!nCw1O{Nos z@&G7}43vd13t*zqr7%sP5{Fgo@nGW)KI~FVvrjF49xrI#7R;|t2W?OFuvlqhc~y4W z+Ncs|23zs-svrKwUUlQb(0w|B`;n@ljmaVQda90A8HerV+SI11R+B3`<$t$(FB_3k z>gq-RC6$CbS_%%E8?PmJ$H}aystjlT&Dk?psKLPhL?X; zzoArEW>#8Cj{LVd;$4WdbEdaE2337WPK@b0I7%*N)vM zCUS z`T1yjS5%a+#(m4cY-5_`9vAb^1ut&Z#i?|>RdL2i#{IlYJ24J128HML8MoaFnbL-6 zsgakVoFj%a4r* z#khs~NPb>0TM8Wf{HB0&@!^LD{t>6a6BC)tz&}I5dt>7jJAZv&E(w_k zvNmKQF!JIsLmeGRCB;;|V=i(R3)p9ZM+Qxg@7Um*Pc)nHj>n%@ZjM^v7w5ZIY+j#? zbuN9f3mqsTCA>Fgz^fm39WglTIPZi!`+D@m9N=QLyTE=Bj(D#mNxi1_(K156+UTN7 zrWB1Wl{tPP<&wvc2M(gvK3~3`d(hLCy64Rfe~6+_=#r&B>?plsL6Z_=^PgB*(~_7#WkC+RrBJF? z6D7ukrVjuI!VZk;4h%-Y0xgRd{GKv#I)VZBK~G<9nLf*v%HA(3*>d5l?tf5;A$yRa ze)2w3UYK=mJZwJA)~2oLX>TU`4E?9U(2I=Fa@}{POISILPme1v_(B8n83Xl7P&WIt zJl}J#A5!0)TK?zNmBx!j+Z#dd&8MB}@cs&f?9e?QdM;HK!75koMOa}2u{&@2>5+CS zZpR<}Ss3UNamJ~ZRW)p;QNwOOB@Kce_oP2>Z|~VNM#qo9h~H1RWD6#0ct0-LsobYx z;2f*F6Pb+EWK@Sga<;b8pJ7j*%w#&!fLX; zkN+qesnMRt-Eq3I{$(n>*{wpjd38KbXyywz06-!HPy9PHz37D>mfbXt^_MHURWRdr zz^cW4k9KG4 zCe9kQAM)tSFsUD2aqf@h*Hw?{&xe(#2rn((xtqNCvElbXm~B=S_n=$?;z% zK1*|ouQsN(qlD4sW)rK&x8{z=FGa=9eEn;CIev3B+(g6ZJGU-cZr|6%VwnJ( zjHmW{wcT|4Fc!kNY)}&Sn#nK0p50Xw3_pdHDlAbFncbhQw`=T9C%1^z>DFK8l9?fR zWs)6jU1@Mog&*8jpRAVY!d;O{X1$7Oo|)eao@koU+i|SF4?w7OAdiZmwH~l4#rBv+ zHbMrNL|sW45qcI3bE8!x74LT)t%_tIDFD+3Ng!BjTS~tNHGs!(F#zLwsCM>kxU2AN ze-p1hJ#6@VhRAsha!lt)xc06LFvEUsZffqM@Yi^Z`&g%E>vAm5+g)6?-z+Ry$q<@% zQnGsMLUstsKf;Y&x);C3;w_q)ZFIYDPAkW|@yAXC$>wIwr83XKA|GxjsU60I;vwRA`Ly+sNjqK(oVk+i;y$IpEUY_gY zgyzy7TtAdDuXXPHxAO_L8OGv~CWeB}&Ci{i(HvfHue`kF*T>S3`jmsEiWD*Wkdb)D z@)uiUEZpyXE(wK{e@oDPi^XD&GP1MiT_gk~?I>L;O7$#8tJ0)Z3*bl?TmPo>z{s7Z zij|ku!wti>1GZ#sep>E_MwB>qRkQnDu3LHZQ_g&JS!Y~eew3!;Ct>gCj8Pf&UUpXE z&$&`1i{sJJ!)qUBK1^NRfbMWVSEqaayqVmXJ)VE>-B{?W4WaqgpH7~QcgH!AcS+z; zZ8=r;qWGzvcD)FR?82d3?^<#(eGt(Kw|2vljPCCkz-D8z)nH^lXP5c2EVJbio=@kj zq0FMrAi4dGY)nIMds2A%-po7P(W!>%uc|Y#Zi0I=n`Tp{S9)n;qCX8lAO#N~Aun$? zW6=7g`;R;J*KR@9(+DPhFa;y^RXrtz!~~?(R3xBN@<19ONPrezDq`vkBmfWwa=^gA zkEO=m;T1Cr9|QCPjWxt$`KMYQE*r>o*`(h;GnZ#}K47}Oys5l%#c%I}h`$bp==Zrq z`h4M>(mq(PHX^@JZ@A2N-&(&eP+sE2ulx4DCpm z|8BeB+ax?ZslIk;dIv4klQTYlH?0u9beo|r2zw1A$zseC31D_G4UND^r-*3*Ui>qE z(*&Fk4-~X9;4U(l_@4YuPd>|rh!cY)bs;Pf5DAciwFVOO2C#5X-9kVN5WlF|Zc7~a zpkNm%s4I`it{w(_d~DEd-gzt#7j8uf#;!cFi-eSg9@$7%11B zbP50i)Ca_stSO$F83gsO6UF7XKyB?5&X8`z!+?sq;i((*bBO4kV>1z7zIg5CO0=VG z`^s`x4`nafB~pCTGTV_P!DKLAH}yv_(Zc3BCwvSq4HCElw#A`X6Xr z|9SRs8HMnJ)CFVufzI+LSaz=wz%I0!%3~S~@B(cBwbC;v2p+Cg=8_ zD|O0YwYO{%lQOSB|^dsDUt{qHLIQ7@TVWziv4; z`L)qk*%oltx{{tU3qI-^@IrB3Yn9avzx7uxjm;`e8;|1uUCeDIMQ7@tsZ<}t!2`}m z?jEVp(N?=I{2&|`L~)jbV|;uZQcsgL){EgAGm1Zv zICey2ML54RoZs{@U>E;BS#m5!=UU!s*9~^sHKE+2))@RWKH%Y4XSnNTyT)E)-gM!~ zh0V833qch@+l$ScTN?4gBP>=%w5hvV(4w+nb9JTX`9J1cYY{AQ9b^(h73VTLQQF@W z5f@(Fo`X-p09f1TVT1(}XCX?Y)u3kvkH-&xNZL2oIx5h*eSu@uHjoqjyzKs9jfUt& zmG)VeBkTXIeKV{GU07b)Yei3?9ffBn=m8)oBSU9!rL0K+4MV#xj~$IEPR^|_c9Kc+ zOK|Mf!e`n1B`4Ey#l$WZKqC;U{|86{RRv5U5N;sfW3*`203rRPxOF-q@~7g5_ZZlK zdiSMas=1(Uud8uQ`j7fZ^^xxLzK*yy1sS5jeG!3?y6M6w-{nX7LEgmX(9FbNuj_oe zH=~7X#oJr=)**(WloR{(*!O@1-u%iIe{1azfBt#J&9(OX(%$&rYeJhjEMkaV;N1|RK%0rA!?NWf-@ZRT{!m^=T2C_WHi=5&Ux!-9?!|EH7j-HObwuVU zx(QeSN(Y)C6_f0j2H-uvDy9*j{D=fDSc$~CrM}7Ied@b0s{|wiVMdO4nQnv;%Gdrb z9)Me;0Wnz^3U~mW2s%WsNFks~5{3R(M6TssnOuLt)ILHIC=SYexfBueG{8J!)!%7! z_5u=*-l-FiSzQ0zD@(7zD74SPNxfUeE~jkma?j=pZ+jI|EuY{1^E|%>Y8A$^W~f(U z;+rNWCnlR?H-1j@!xeXsQC_aRse0i^tl98PKQEGX^)I|wZTg5su_4auEdjGz^ov3z znsS?8(*2Ww8CILvG9SJIM2+I9z&u=Sp6Zq^WasX9@QIJJuWcS7fViRDgB$$J2D*(mc zPQ%ek&{7};IO>T)Uhu~eF)pQ{B5hX?G#U(!(OrUUR&iE_Us(&Q5oE+2R;Q99Eew9;CE>h^XhE6G{>;>vOe@{ zWsT)A8LCPqbMiqfL3XeSYOV`q1&D~DI?xB&+bLue9V551HLJaZlpTtI|37wubS@z; z-x{kpd8HO000fkLf)C)GPT;N`l~KurCDM)#89@2K8lHwEFz7USPqCu_UOJJWkd8#s z1H3!F_A=oF5J(^-p@Fxy)ye^jbrBUS-Ql19)_ZEPqvu^CcjAeavS)c--^S8j~!E=R#T z3wwY`Mlst{F8F*K*Ce9gNHkvMEfhcMCe$USqv0(y5=a6FDs){uO^Q-+3-6mNg+Yh{ z7ziOiq{}Du7}wZuzkYo2ceFMBbvm<<^GGbUY;@4;z;7+)vQa1RFZYF)3&-Ps^&ICM z%W-*7S8{NH#p4CJ<#PnQvFAKfo58ZqP;=$Ur~Rzsyf1T!cNY0L3XZIb4x5(64UbCe z$P@3_YJ@#Ln&(7}AxbApTghGzzQqkTVM-S56G7`(5gY&pNCjF5o*2lQu%LshfC1>B zF2-<1v7i7l#A47pU?HZ;v%0}%ZcP!E(!M29+IWR-Sa%kQBYXe?6crM)g~o&!opcfn ziANxbvRW!|$nOAer|F362yewreP*_U4$x_Wpny&h7q9mlb`v%%oxht;)R@;q*4Vp> zGCRCnqJ67b^U#cd&J14=)W5#^=Xa0!_TdaY<1VksXEpXK;R69}h82QrFfvcLel?C; zlkd#px?Om+eDk!?d}XzXiy@|myVI!WPTS{e(c7JOuQoKlivRPbKfl3| zf}xlER-aJk=e>HpgIdj_nwD=~b!+<8SjWw2S~7)dRPm0p8tL{4**PU>mF&|fghj$T z*APtZRLcpLDft%ifINXh0hkXfI)3?e(q5M&UO0QkH1 zv`X{%OU2KYe>EsO837+f(CVBKkCp=4VK29Ta$K@*9aV)}f!-jXMX-QI$KZIPm`cHW z2G9Yd>w;SFfjWQn83R>1T9yC@&{{+kKqnAzwdxn01P$c@i=47R{eZ`YV;<8}J;G0Q z(NC3%re+gDIss6Z_gT;(ASFNmCw+29*PjhniJdPd-Gv{059)4zXu{&{m>ei;N)tF!%EONJNjk#X>hm(P4@y6CAZ zk9WD|cpc$2MUj^d&Jwi60HgPMO*eP4G1)*Bob7U|Hd*V4Ahru8`lTH?0ueD11rlH@ zfL6i7r4v)6Q0>-8n93bzB;LywW1W&hc~ieAg+qukuo8rmlI*yxm%I0O@$Sv+y^F*+ zTY8F3jc_R{apSHcbS+SdA#G_X3Q_su(o8&z2}_|N695`as}?5mnr)KPWuT+O1|(?E z$5cV_WHviP`~(ftPH1q{&npY(&(GCGI5jk06|OYLwS{c{xSRiM_U*>j^Pv2=hIm1n zaO(KhMsaqzvjDm8NATAL%>nUpUa%eUI_BxMEM2a~D+u?1-~aarh(88g?6{#8nb0$< ziI53R&+5`7$RI&-N4>j%EXHue$!MX)F$6g!EocLy82}s)IJbg01mu^+xJf2ru!FO> zDs}i$14{y-0*4g>xHoCP*FR~{ytz@0f=F^7Tc#KBok1D80rxOl;LzJK(&`EboqY!& z6vBaYE$nH87TTeZ24L_+1vrKDy@))O{R^2?fP~frK#;XS>7!jhGDeE%Xv@W3aN^CK z4T$%wconxjFM$0y7Qd3Q!?kbyQ?qa|fAxs@OGu#ji?8;Wh7RtXPQc@rw;(mK-gg;v i$zVJ6`5}Xzjrh2=M@kQ$uHA>?2W%|uN##UJ!v6rXnAJc4 literal 0 HcmV?d00001 diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/example/example.cpp b/perception/navigator_vision/exFAST_SparseStereo/src/example/example.cpp new file mode 100644 index 00000000..4c6426eb --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/example/example.cpp @@ -0,0 +1,144 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +// This is a minimalistic example on how to use the extended +// FAST feature detector and the sparse stereo matcher. + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace std; +using namespace cv; +using namespace sparsestereo; +using namespace boost; +using namespace boost::posix_time; + +int main(int argc, char** argv) { + try { + // Stereo matching parameters + double uniqueness = 0.7; + int maxDisp = 70; + int leftRightStep = 2; + + // Feature detection parameters + double adaptivity = 1.0; + int minThreshold = 10; + + // Parse arguments + if(argc != 3 && argc != 4) { + cout << "Usage: " << argv[0] << " LEFT-IMG RIGHT-IMG [CALIBRARION-FILE]" << endl; + return 1; + } + char* leftFile = argv[1]; + char* rightFile = argv[2]; + char* calibFile = argc == 4 ? argv[3] : NULL; + + // Read input images + cv::Mat_ leftImg, rightImg; + leftImg = imread(leftFile, CV_LOAD_IMAGE_GRAYSCALE); + rightImg = imread(rightFile, CV_LOAD_IMAGE_GRAYSCALE); + + if(leftImg.data == NULL || rightImg.data == NULL) + throw sparsestereo::Exception("Unable to open input images!"); + + // Load rectification data + StereoRectification* rectification = NULL; + if(calibFile != NULL) + rectification = new StereoRectification(CalibrationResult(calibFile)); + + // The stereo matcher. SSE Optimized implementation is only available for a 5x5 window + SparseStereo, short> stereo(maxDisp, 1, uniqueness, + rectification, false, false, leftRightStep); + + // Feature detectors for left and right image + FeatureDetector* leftFeatureDetector = new ExtendedFAST(true, minThreshold, adaptivity, false, 2); + FeatureDetector* rightFeatureDetector = new ExtendedFAST(false, minThreshold, adaptivity, false, 2); + + ptime lastTime = microsec_clock::local_time(); + vector correspondences; + + // Objects for storing final and intermediate results + cv::Mat_ charLeft(leftImg.rows, leftImg.cols), + charRight(rightImg.rows, rightImg.cols); + Mat_ censusLeft(leftImg.rows, leftImg.cols), + censusRight(rightImg.rows, rightImg.cols); + vector keypointsLeft, keypointsRight; + + // For performance evaluation we do the stereo matching 100 times + for(int i=0; i< 100; i++) { + // Featuredetection. This part can be parallelized with OMP + #pragma omp parallel sections default(shared) num_threads(2) + { + #pragma omp section + { + ImageConversion::unsignedToSigned(leftImg, &charLeft); + Census::transform5x5(charLeft, &censusLeft); + keypointsLeft.clear(); + leftFeatureDetector->detect(leftImg, keypointsLeft); + } + #pragma omp section + { + ImageConversion::unsignedToSigned(rightImg, &charRight); + Census::transform5x5(charRight, &censusRight); + keypointsRight.clear(); + rightFeatureDetector->detect(rightImg, keypointsRight); + } + } + + // Stereo matching. Not parallelized (overhead too large) + correspondences.clear(); + stereo.match(censusLeft, censusRight, keypointsLeft, keypointsRight, &correspondences); + } + + // Print statistics + time_duration elapsed = (microsec_clock::local_time() - lastTime); + cout << "Time for 100x stereo matching: " << elapsed.total_microseconds()/1.0e6 << "s" << endl + << "Features detected in left image: " << keypointsLeft.size() << endl + << "Features detected in right image: " << keypointsRight.size() << endl + << "Percentage of matched features: " << (100.0 * correspondences.size() / keypointsLeft.size()) << "%" << endl; + + // Highlight matches as colored boxes + Mat_ screen(leftImg.rows, leftImg.cols); + cvtColor(leftImg, screen, CV_GRAY2BGR); + + for(int i=0; i<(int)correspondences.size(); i++) { + double scaledDisp = (double)correspondences[i].disparity() / maxDisp; + Vec3b color; + if(scaledDisp > 0.5) + color = Vec3b(0, (1 - scaledDisp)*512, 255); + else color = Vec3b(0, 255, scaledDisp*512); + + rectangle(screen, correspondences[i].imgLeft->pt - Point2f(2,2), + correspondences[i].imgLeft->pt + Point2f(2, 2), + (Scalar) color, CV_FILLED); + } + + // Display image and wait + namedWindow("Stereo"); + imshow("Stereo", screen); + waitKey(); + + // Clean up + delete leftFeatureDetector; + delete rightFeatureDetector; + if(rectification != NULL) + delete rectification; + + return 0; + } + catch (const std::exception& e) { + cerr << "Fatal exception: " << e.what(); + return 1; + } +} diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt new file mode 100644 index 00000000..e57cf787 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt @@ -0,0 +1,26 @@ +add_library(sparsestereo + calibrationresult.h + calibrationresult.cpp + censuswindow.h + exception.h + extendedfast.cpp + extendedfast.h + fast9.h + fast9-inl.h + hammingdistance.cpp + hammingdistance.h + simd.h + simd.cpp + sparsematch.h + sparserectification.cpp + sparserectification.h + sparsestereo.h + sparsestereo-inl.h + stereorectification.cpp + stereorectification.h + imageconversion.h + imageconversion.cpp + census.h + census-inl.h + census.cpp +) diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp new file mode 100644 index 00000000..8f64530e --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp @@ -0,0 +1,55 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#include "calibrationresult.h" +#include +#include "exception.h" + +namespace sparsestereo { + using namespace cv; + using namespace std; + + CalibrationResult::CalibrationResult(const char* file) { + FileStorage fs(file, CV_STORAGE_READ); + if(!fs.isOpened()) + throw Exception("Unable to read calibration results"); + + fs["M1"] >> cameraMatrix[0]; + fs["D1"] >> distCoeffs[0]; + fs["M2"] >> cameraMatrix[1]; + fs["D2"] >> distCoeffs[1]; + fs["R1"] >> R[0]; + fs["R2"] >> R[1]; + fs["P1"] >> P[0]; + fs["P2"] >> P[1]; + fs["Q"] >> Q; + fs["T"] >> T; + + Mat_ sz(2, 1); + fs["size"] >> sz; + imageSize.width = sz(0, 0); + imageSize.height = sz(1, 0); + + fs.release(); + } + + void CalibrationResult::writeToFile(const char * file) const { + FileStorage fs(file, CV_STORAGE_WRITE); + if(!fs.isOpened()) + throw Exception("Unable to store calibration results"); + + fs << "M1" << cameraMatrix[0] << "D1" << distCoeffs[0] + << "M2" << cameraMatrix[1] << "D2" << distCoeffs[1] + << "R1" << R[0] << "R2" << R[1] << "P1" << P[0] << "P2" << P[1] << "Q" << Q + << "T" << T; + + Mat_ sz(2, 1); + sz(0, 0) = imageSize.width; + sz(1, 0) = imageSize.height; + fs << "size" << sz; + + fs.release(); + } +} diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h new file mode 100644 index 00000000..872aa787 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h @@ -0,0 +1,28 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#ifndef SPARSESTEREO_CALIBRATIONRESULT_H +#define SPARSESTEREO_CALIBRATIONRESULT_H + +#include + +namespace sparsestereo { + // Stores the results of a stereo camera calibration + struct CalibrationResult { + cv::Mat_ cameraMatrix[2], distCoeffs[2]; + cv::Mat_ R[2], P[2], Q, T; + cv::Size imageSize; + + CalibrationResult() { + cameraMatrix[0] = cv::Mat_::eye(3, 3); + cameraMatrix[1] = cv::Mat_::eye(3, 3); + } + CalibrationResult(const char* file); + + void writeToFile(const char* file) const; + }; +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census-inl.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census-inl.h new file mode 100644 index 00000000..d22e3839 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census-inl.h @@ -0,0 +1,59 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#ifndef SPARSESTEREO_CENSUS_INL_H +#define SPARSESTEREO_CENSUS_INL_H + +#include "exception.h" +#include "census.h" + +namespace sparsestereo { + template + void Census::transform5x5(const cv::Mat_& input, cv::Mat_* output) { + int maxX=input.cols-2, maxY=input.rows-2; + + for(int y=2; y input(y-2, x-2)) << 24) | + ((centoid > input(y-2, x-1)) << 23) | + ((centoid > input(y-2, x)) << 22) | + ((centoid > input(y-2, x+1)) << 21) | + ((centoid > input(y-2, x+2)) << 20) | + // Row 2 + ((centoid > input(y-1, x-2)) << 19) | + ((centoid > input(y-1, x-1)) << 18) | + ((centoid > input(y-1, x)) << 17) | + ((centoid > input(y-1, x+1)) << 16) | + ((centoid > input(y-1, x+2)) <<15) | + // Row 3 + ((centoid > input(y, x-2)) << 14) | + ((centoid > input(y, x-1)) << 13) | + ((centoid > input(y, x)) << 12) | + ((centoid > input(y, x+1)) << 11) | + ((centoid > input(y, x+2)) << 10) | + // Row 4 + ((centoid > input(y+1, x-2)) << 9) | + ((centoid > input(y+1, x-1)) << 8) | + ((centoid > input(y+1, x)) << 7) | + ((centoid > input(y+1, x+1)) << 6) | + ((centoid > input(y+1, x+2)) << 5) | + // Row 5 + ((centoid > input(y+2, x-2)) << 4) | + ((centoid > input(y+2, x-1)) << 3) | + ((centoid > input(y+2, x)) << 2) | + ((centoid > input(y+2, x+1)) << 1) | + (centoid > input(y+2, x+2)); + } + } + + template <> + void Census::transform5x5(const cv::Mat_& input, cv::Mat_* output); +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.cpp b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.cpp new file mode 100644 index 00000000..0bbab689 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.cpp @@ -0,0 +1,109 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#include "census-inl.h" + +namespace sparsestereo { + using namespace std; + using namespace cv; + + template <> + void Census::transform5x5(const Mat_& input, Mat_* output) { + transform5x5SSE(input, output); + } + + void Census::transform5x5SSE(const Mat_& input, Mat_* output) { + // Predeclare required constants + const v16qi const01 = SIMD::scalar16(0x01); + + // We skip one row at the beginning and end to avoid range checking + for(int y=2; y +#include "simd.h" + +namespace sparsestereo { + // Computes various variants of the census transform + class Census { + public: + // Census transform using 5x5 window + template + static void transform5x5(const cv::Mat_& input, cv::Mat_* output); + + private: + // SSE optimized implementations + static void transform5x5SSE(const cv::Mat_& input, cv::Mat_* output); + + // Efficiently stores 4 byte blocks by interleaving four 1-byte vectors + static __always_inline void storeSSEVec(const v16qi& byte4, const v16qi& byte3, const v16qi& byte2, + const v16qi& byte1, char* dst); + }; +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/censuswindow.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/censuswindow.h new file mode 100644 index 00000000..9ce555c0 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/censuswindow.h @@ -0,0 +1,237 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#ifndef SPARSESTEREO_CENSUSWINDOW_H +#define SPARSESTEREO_CENSUSWINDOW_H + +#include +#include +#include "hammingdistance.h" +#include "simd.h" + +//#define SPARSESTEREO_NO_POPCNT //Defined by CMake + +#if defined(SPARSESTEREO_NO_POPCNT) +#define SPARSESTEREO_SSE_OPTIMIZE +#endif + +namespace sparsestereo { + // Matches over a set of census windows + template + class CensusWindow { + public: + CensusWindow() { +#ifdef SPARSESTEREO_SSE_OPTIMIZE + lookupTablePtr = hammingDist.getLookupTable(); + lookupTableVec = SIMD::scalar4((int)hammingDist.getLookupTable()); + zero = SIMD::scalar16(0); +#endif + } + + void setReferenceImage(const cv::Mat_& image) { + refImage = image; + } + + void setComparisonImage(const cv::Mat_& image) { + compImage = image; + } + + const cv::Mat_& getReferenceImage() const {return refImage;} + const cv::Mat_& getComparisonImage() const {return compImage;} + const int getWindowSize() const {return SIZE;} + + // Sets the position of the reference window + void setReferencePoint(const cv::Point2i& point); + + // Performs a window matching using census transformed images + __always_inline short match(cv::Point2i point) const; + + private: + v4si lookupTableVec; + v4si refWindow[6]; + v16qi zero; + + const unsigned char* lookupTablePtr; + HammingDistance hammingDist; + cv::Mat_ refImage; + cv::Mat_ compImage; + cv::Point2i refPoint; + + // Stores a window in two SSE vectors + void loadWindow(const cv::Mat_& image, const cv::Point2i& point, v4si* dstWindow) const; + } __attribute__ ((aligned (16))); + + template + void CensusWindow::loadWindow(const cv::Mat_& image, const cv::Point2i& point, v4si* dstWindow) const {} + +#ifdef SPARSESTEREO_SSE_OPTIMIZE + template <> + void CensusWindow<5>::loadWindow(const cv::Mat_& image, const cv::Point2i& point, v4si* dstWindow) const { + dstWindow[0] = (v4si)__builtin_ia32_loaddqu((char*)&image(point.y-2, point.x-2)); + dstWindow[1] = (v4si)__builtin_ia32_loaddqu((char*)&image(point.y-1, point.x-2)); + dstWindow[2] = (v4si)__builtin_ia32_loaddqu((char*)&image(point.y, point.x-2)); + dstWindow[3] = (v4si)__builtin_ia32_loaddqu((char*)&image(point.y+1, point.x-2)); + dstWindow[4] = (v4si)__builtin_ia32_loaddqu((char*)&image(point.y+2, point.x-2)); + + // Unfortunately, the rest cannot be loaded aligned + v4si buffer = {image(point.y-2, point.x+2), image(point.y-1, point.x+2), image(point.y, point.x+2), image(point.y+1, point.x+2)}; + dstWindow[5] = buffer; + } +#endif + + template + void CensusWindow::setReferencePoint(const cv::Point2i& point) { + refPoint = point; + } + +#ifdef SPARSESTEREO_SSE_OPTIMIZE + template <> + void CensusWindow<5>::setReferencePoint(const cv::Point2i& point) { + loadWindow(refImage, point, refWindow); + } +#endif + + template + __always_inline short CensusWindow::match(cv::Point2i point) const { + int costs = 0; + +#ifndef SPARSESTEREO_NO_POPCNT + for(int y=-SIZE/2; y<=SIZE/2; y++) { + unsigned long long* ptr1 = (unsigned long long*)&refImage(refPoint.y + y, refPoint.x -SIZE/2); + unsigned long long* ptr2 = (unsigned long long*)&compImage(point.y + y, point.x -SIZE/2); + + for(int x=0; x<=SIZE/2;x++) + costs += __builtin_popcountll(ptr1[x] ^ ptr2[x]); + } +#else + for(int y=-SIZE/2; y<=SIZE/2; y++) + for(int x=-SIZE/2; x<=SIZE/2; x++) + costs += hammingDist.calculate(refImage(refPoint.y + y, refPoint.x + x), + compImage(point.y + y, point.x + x)); +#endif + + return costs; + } + +#ifdef SPARSESTEREO_SSE_OPTIMIZE +#ifdef __LP64__ + // SSE2 optimized implementation for 64-bit systems. + template <> + __always_inline short CensusWindow<5>::match(cv::Point2i point) const { + v8hi xorRes; + unsigned int sum; + + xorRes = (v8hi)__builtin_ia32_pxor128((v2di)__builtin_ia32_loaddqu((char*)&compImage(point.y-2, point.x-2)), (v2di)refWindow[0]); + sum = lookupTableVec[(unsigned short)SIMD::element8(xorRes, 0)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 1)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 2)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 3)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 4)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 5)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 6)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 7)]; + + xorRes = (v8hi)__builtin_ia32_pxor128((v2di)__builtin_ia32_loaddqu((char*)&compImage(point.y-1, point.x-2)), (v2di)refWindow[1]); + sum += lookupTableVec[(unsigned short)SIMD::element8(xorRes, 0)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 1)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 2)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 3)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 4)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 5)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 6)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 7)]; + + xorRes = (v8hi)__builtin_ia32_pxor128((v2di)__builtin_ia32_loaddqu((char*)&compImage(point.y, point.x-2)), (v2di)refWindow[2]); + sum += lookupTableVec[(unsigned short)SIMD::element8(xorRes, 0)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 1)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 2)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 3)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 4)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 5)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 6)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 7)]; + + xorRes = (v8hi)__builtin_ia32_pxor128((v2di)__builtin_ia32_loaddqu((char*)&compImage(point.y+1, point.x-2)), (v2di)refWindow[3]); + sum += lookupTableVec[(unsigned short)SIMD::element8(xorRes, 0)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 1)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 2)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 3)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 4)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 5)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 6)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 7)]; + + xorRes = (v8hi)__builtin_ia32_pxor128((v2di)__builtin_ia32_loaddqu((char*)&compImage(point.y+2, point.x-2)), (v2di)refWindow[4]); + sum += lookupTableVec[(unsigned short)SIMD::element8(xorRes, 0)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 1)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 2)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 3)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 4)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 5)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 6)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 7)]; + + v4si buffer1 = {compImage(point.y-2, point.x+2), compImage(point.y-1, point.x+2), compImage(point.y, point.x+2), compImage(point.y+1, point.x+2)}; + xorRes = (v8hi)__builtin_ia32_pxor128((v2di)buffer1, (v2di)refWindow[5]); + sum += lookupTableVec[(unsigned short)SIMD::element8(xorRes, 0)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 1)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 2)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 3)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 4)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 5)] + + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 6)] + lookupTableVec[(unsigned short)SIMD::element8(xorRes, 7)]; + + unsigned short lastXor = compImage(point.y+2, point.x+2) ^ refImage(refPoint.y + 2, refPoint.x+2); + sum += lookupTablePtr[(unsigned short)lastXor] + lookupTablePtr[((unsigned short*)&lastXor)[1]]; + + return sum; + } + +#else + + // SSE2 optimized implementation for 32-bit systems. + template <> + __always_inline short CensusWindow<5>::match(cv::Point2i point) const { + + v8hi xorRes; + v4si lookupPtr; + unsigned int sum; + + xorRes = (v8hi)__builtin_ia32_pxor128((v2di)__builtin_ia32_loaddqu((char*)&compImage(point.y-2, point.x-2)), (v2di)refWindow[0]); + lookupPtr = (v4si)__builtin_ia32_punpcklwd128(xorRes, (v8hi)zero) + lookupTableVec; + sum = *((unsigned char*)SIMD::element4(lookupPtr, 0)) + *((unsigned char*)SIMD::element4(lookupPtr, 1)) + + *((unsigned char*)SIMD::element4(lookupPtr, 2)) + *((unsigned char*)SIMD::element4(lookupPtr, 3)); + lookupPtr = (v4si)__builtin_ia32_punpckhwd128(xorRes, (v8hi)zero) + lookupTableVec; + sum += *((unsigned char*)SIMD::element4(lookupPtr, 0)) + *((unsigned char*)SIMD::element4(lookupPtr, 1)) + + *((unsigned char*)SIMD::element4(lookupPtr, 2)) + *((unsigned char*)SIMD::element4(lookupPtr, 3)); + + xorRes = (v8hi)__builtin_ia32_pxor128((v2di)__builtin_ia32_loaddqu((char*)&compImage(point.y-1, point.x-2)), (v2di)refWindow[1]); + lookupPtr = (v4si)__builtin_ia32_punpcklwd128(xorRes, (v8hi)zero) + lookupTableVec; + sum += *((unsigned char*)SIMD::element4(lookupPtr, 0)) + *((unsigned char*)SIMD::element4(lookupPtr, 1)) + + *((unsigned char*)SIMD::element4(lookupPtr, 2)) + *((unsigned char*)SIMD::element4(lookupPtr, 3)); + lookupPtr = (v4si)__builtin_ia32_punpckhwd128(xorRes, (v8hi)zero) + lookupTableVec; + sum += *((unsigned char*)SIMD::element4(lookupPtr, 0)) + *((unsigned char*)SIMD::element4(lookupPtr, 1)) + + *((unsigned char*)SIMD::element4(lookupPtr, 2)) + *((unsigned char*)SIMD::element4(lookupPtr, 3)); + + xorRes = (v8hi)__builtin_ia32_pxor128((v2di)__builtin_ia32_loaddqu((char*)&compImage(point.y, point.x-2)), (v2di)refWindow[2]); + lookupPtr = (v4si)__builtin_ia32_punpcklwd128(xorRes, (v8hi)zero) + lookupTableVec; + sum += *((unsigned char*)SIMD::element4(lookupPtr, 0)) + *((unsigned char*)SIMD::element4(lookupPtr, 1)) + + *((unsigned char*)SIMD::element4(lookupPtr, 2)) + *((unsigned char*)SIMD::element4(lookupPtr, 3)); + lookupPtr = (v4si)__builtin_ia32_punpckhwd128(xorRes, (v8hi)zero) + lookupTableVec; + sum += *((unsigned char*)SIMD::element4(lookupPtr, 0)) + *((unsigned char*)SIMD::element4(lookupPtr, 1)) + + *((unsigned char*)SIMD::element4(lookupPtr, 2)) + *((unsigned char*)SIMD::element4(lookupPtr, 3)); + + xorRes = (v8hi)__builtin_ia32_pxor128((v2di)__builtin_ia32_loaddqu((char*)&compImage(point.y+1, point.x-2)), (v2di)refWindow[3]); + lookupPtr = (v4si)__builtin_ia32_punpcklwd128(xorRes, (v8hi)zero) + lookupTableVec; + sum += *((unsigned char*)SIMD::element4(lookupPtr, 0)) + *((unsigned char*)SIMD::element4(lookupPtr, 1)) + + *((unsigned char*)SIMD::element4(lookupPtr, 2)) + *((unsigned char*)SIMD::element4(lookupPtr, 3)); + lookupPtr = (v4si)__builtin_ia32_punpckhwd128(xorRes, (v8hi)zero) + lookupTableVec; + sum += *((unsigned char*)SIMD::element4(lookupPtr, 0)) + *((unsigned char*)SIMD::element4(lookupPtr, 1)) + + *((unsigned char*)SIMD::element4(lookupPtr, 2)) + *((unsigned char*)SIMD::element4(lookupPtr, 3)); + + xorRes = (v8hi)__builtin_ia32_pxor128((v2di)__builtin_ia32_loaddqu((char*)&compImage(point.y+2, point.x-2)), (v2di)refWindow[4]); + lookupPtr = (v4si)__builtin_ia32_punpcklwd128(xorRes, (v8hi)zero) + lookupTableVec; + sum += *((unsigned char*)SIMD::element4(lookupPtr, 0)) + *((unsigned char*)SIMD::element4(lookupPtr, 1)) + + *((unsigned char*)SIMD::element4(lookupPtr, 2)) + *((unsigned char*)SIMD::element4(lookupPtr, 3)); + lookupPtr = (v4si)__builtin_ia32_punpckhwd128(xorRes, (v8hi)zero) + lookupTableVec; + sum += *((unsigned char*)SIMD::element4(lookupPtr, 0)) + *((unsigned char*)SIMD::element4(lookupPtr, 1)) + + *((unsigned char*)SIMD::element4(lookupPtr, 2)) + *((unsigned char*)SIMD::element4(lookupPtr, 3)); + + v4si buffer1 = {compImage(point.y-2, point.x+2), compImage(point.y-1, point.x+2), compImage(point.y, point.x+2), compImage(point.y+1, point.x+2)}; + xorRes = (v8hi)__builtin_ia32_pxor128((v2di)buffer1, (v2di)refWindow[5]); + lookupPtr = (v4si)__builtin_ia32_punpcklwd128(xorRes, (v8hi)zero) + lookupTableVec; + sum += *((unsigned char*)SIMD::element4(lookupPtr, 0)) + *((unsigned char*)SIMD::element4(lookupPtr, 1)) + + *((unsigned char*)SIMD::element4(lookupPtr, 2)) + *((unsigned char*)SIMD::element4(lookupPtr, 3)); + lookupPtr = (v4si)__builtin_ia32_punpckhwd128(xorRes, (v8hi)zero) + lookupTableVec; + sum += *((unsigned char*)SIMD::element4(lookupPtr, 0)) + *((unsigned char*)SIMD::element4(lookupPtr, 1)) + + *((unsigned char*)SIMD::element4(lookupPtr, 2)) + *((unsigned char*)SIMD::element4(lookupPtr, 3)); + + unsigned short lastXor = compImage(point.y+2, point.x+2) ^ refImage(refPoint.y + 2, refPoint.x+2); + sum += lookupTablePtr[(unsigned short)lastXor] + lookupTablePtr[((unsigned short*)&lastXor)[1]]; + + return sum; + } +#endif +#endif +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/exception.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/exception.h new file mode 100644 index 00000000..89330d9f --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/exception.h @@ -0,0 +1,29 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#ifndef SPARSESTEREO_EXCEPTION_H +#define SPARSESTEREO_EXCEPTION_H + +#include +#include + +namespace sparsestereo { + // Base class for all exceptions + class Exception: public std::exception { + public: + Exception(const char* msg) + : message(msg) {} + Exception(std::string msg) + : message(msg) {} + ~Exception() throw() {} + + virtual const char* what() const throw() + {return message.c_str();} + + private: + std::string message; + }; +} +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h new file mode 100644 index 00000000..db816ee1 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h @@ -0,0 +1,47 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#include "extendedfast.h" + +namespace sparsestereo { + + template + __always_inline v16qi ExtendedFAST::loadCircleSSE(const cv::Mat_& input, int x, int y) { + const char* center = (const char*)&(input(y, x)); + + v16qi circle = {*(center + offsets[0]), *(center + offsets[1]), *(center + offsets[2]), *(center + offsets[3]), + *(center + offsets[4]), *(center + offsets[5]), *(center + offsets[6]), *(center + offsets[7]), + *(center + offsets[8]), *(center + offsets[9]), *(center + offsets[10]), *(center + offsets[11]), + *(center + offsets[12]), *(center + offsets[13]), *(center + offsets[14]), *(center + offsets[15]) + }; + + return circle; + } + + // Tests for a feature and optionally stores the score + __always_inline bool ExtendedFAST::testFeatureAndScore(const cv::Mat_& input, const cv::Point2i pt, bool storeScore) { + // Calculate the adaptive threshold by computing an RMS contrast + // like measure based on absolute values + v16qi uCircle = loadCircleSSE(input, (int)pt.x, (int)pt.y); + v16qi circle = uCircle - const128; + v2di partialSum = __builtin_ia32_psadbw128(uCircle, const0); + int sum = SIMD::element2(partialSum, 0) + SIMD::element2(partialSum, 1); + v16qi avg = SIMD::scalar16(sum/16); + + v2di partialSAD = __builtin_ia32_psadbw128(uCircle, avg); + int sad = SIMD::element2(partialSAD, 0) + SIMD::element2(partialSAD, 1); + unsigned char adaptiveThreshold = cv::saturate_cast(sad * adaptivity / 16); + v16qi adaptiveThresholdVec = SIMD::scalar16(adaptiveThreshold); + + // Perform corner test + v16qi center = SIMD::scalar16(SPARSESTEREO_EXFAST_CENTRAL_VALUE(input, (int)pt.y, (int)pt.x) - 128); + if(detectSingleCornerSSE(circle, center, adaptiveThresholdVec, 9)) { + if(storeScore) // We have to calculate a corner score + scores.push_back(calcSingleScoreSSE(circle, center, adaptiveThresholdVec, 9) - adaptiveThreshold); + return true; + } + else return false; + } +} diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp new file mode 100644 index 00000000..3378a76a --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp @@ -0,0 +1,232 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#include +#include +#include "extendedfast.h" +#include "fast9-inl.h" +#include "exception.h" + +namespace sparsestereo { + using namespace cv; + using namespace std; + + const v16qi ExtendedFAST::const0(SIMD::scalar16NonLookup(0)); + const v16qi ExtendedFAST::const128(SIMD::scalar16NonLookup(128)); + const v16qi ExtendedFAST::const255(SIMD::scalar16NonLookup(255)); + + // Longest arc length lookup table + unsigned char ExtendedFAST::lookupTable[1U<<16]; + bool ExtendedFAST::lookupInitialized = false; + + ExtendedFAST::ExtendedFAST(bool nonmaxSuppression, unsigned char minThreshold, float adaptivity, bool subpixelPrecision, int minBorder) + : nonmaxSuppression(nonmaxSuppression), minThreshold(minThreshold), adaptivity(adaptivity), subpixelPrecision(subpixelPrecision) { + const int reserve = 512; + cornersMin.reserve(reserve); cornersAdapt.reserve(reserve); scores.reserve(reserve); + + if(!lookupInitialized) { + // The first instance initializes the lookup table. + // Should be threadsafe as we always write the same values and set the + // initialization flag last. + for(int i=0; i<=0xFFFF; i++) + lookupTable[i] = findLongestArc(i) >= 9 ? 0xFF : 0; + lookupInitialized = true; + } + } + + ExtendedFAST::~ExtendedFAST() { + } + + inline unsigned char ExtendedFAST::findLongestArc(unsigned short stripe) { + int bestLength = 0; + int startLength = -1; + int currentLength = 0; + + // We iterate over all possible 16-bit permutations + for(int i=1; i<0xFFFF; i = i<<1) { + if(stripe & i) + // Inside an arc segment + currentLength++; + else { + // Outside an arc segment + if(currentLength > bestLength) + bestLength = currentLength; + if(startLength == -1) + startLength = currentLength; + currentLength = 0; + } + } + + // wrap-around case + if(startLength != -1) + currentLength += startLength; + + // Handle last arc segment + if(currentLength > bestLength) + bestLength = currentLength; + + return bestLength; + } + + void ExtendedFAST::initOffsets(int step) { + offsets[0] = step*-3 -1; + offsets[1] = step*-3; + offsets[2] = step*-3 +1; + offsets[3] = step*-2 +2; + offsets[4] = step*-1 +3; + offsets[5] = +3; + offsets[6] = step +3; + offsets[7] = step*+2 +2; + offsets[8] = step*+3 +1; + offsets[9] = step*+3; + offsets[10] = step*+3 -1; + offsets[11] = step*+2 -2; + offsets[12] = step*+1 -3; //Aligned + offsets[13] = -3; //Aligned + offsets[14] = step*-1 -3; //Aligned + offsets[15] = step*-2 -2; + } + + void ExtendedFAST::detectImpl(const Mat& image, vector& keypoints, const Mat& mask) { + + if(mask.data != NULL) + throw Exception("Feature detection masks not supported!"); + else if(image.type() != CV_8U) + throw Exception("Image data has to be of type unsigned char!"); + + // Create offsets for circle pixel + initOffsets(image.step); + + FAST(image, cornersMin, minThreshold, false); + adaptiveThresholdAndScore(image); + + if(nonmaxSuppression) + { + // Perform nonmax suppression + vector nonmaxPoints; + fast9.nonMaxSuppression(cornersAdapt, scores, nonmaxPoints); + + // Copy and optionally refine result + keypoints.reserve(nonmaxPoints.size()); + for(unsigned int i=0; i& input) { + initOffsets((int)input.step); + for(unsigned int i=0; i& input, int x, int y) { + const char* center = (const char*)&(input(y, x)); + + v16qi circle = {*(center + offsets[0]), *(center + offsets[1]), *(center + offsets[2]), *(center + offsets[3]), + *(center + offsets[4]), *(center + offsets[5]), *(center + offsets[6]), *(center + offsets[7]), + *(center + offsets[8]), *(center + offsets[9]), *(center + offsets[10]), *(center + offsets[11]), + *(center + offsets[12]), *(center + offsets[13]), *(center + offsets[14]), *(center + offsets[15]) + }; + + return circle; + } + + __always_inline bool ExtendedFAST::testFeatureAndScore(const cv::Mat_& input, const cv::Point2i pt, bool storeScore) { + // Calculate the adaptive threshold by computing an RMS contrast + // like measure based on absolute values + v16qi uCircle = loadCircleSSE(input, (int)pt.x, (int)pt.y); + v16qi circle = uCircle - const128; + v2di partialSum = __builtin_ia32_psadbw128(uCircle, const0); + int sum = SIMD::element2(partialSum, 0) + SIMD::element2(partialSum, 1); + v16qi avg = SIMD::scalar16(sum/16); + + v2di partialSAD = __builtin_ia32_psadbw128(uCircle, avg); + int sad = SIMD::element2(partialSAD, 0) + SIMD::element2(partialSAD, 1); + unsigned char adaptiveThreshold = cv::saturate_cast(sad * adaptivity / 16); + v16qi adaptiveThresholdVec = SIMD::scalar16(adaptiveThreshold); + + // Perform corner test + v16qi center = SIMD::scalar16(SPARSESTEREO_EXFAST_CENTRAL_VALUE(input, (int)pt.y, (int)pt.x) - 128); + if(detectSingleCornerSSE(circle, center, adaptiveThresholdVec, 9)) { + if(storeScore) // We have to calculate a corner score + scores.push_back(calcSingleScoreSSE(circle, center, adaptiveThresholdVec, 9) - adaptiveThreshold); + return true; + } + else return false; + } + + __always_inline bool ExtendedFAST::detectSingleCornerSSE(const v16qi& circle, const v16qi& center, const v16qi& threshold, + unsigned char minLength) { + + // Find longest brighter arc + v16qi centersPlus = __builtin_ia32_paddsb128(center, threshold); + int arcBrighter = __builtin_ia32_pmovmskb128(__builtin_ia32_pcmpgtb128(circle, centersPlus)); + + if(lookupTable[arcBrighter] >= minLength) + return true; + else { + // Find longest darker arc + v16qi centersMinus = __builtin_ia32_psubsb128(center, threshold); + int arcDarker = __builtin_ia32_pmovmskb128(__builtin_ia32_pcmpgtb128(centersMinus, circle)); + if(lookupTable[arcDarker] >= minLength) + return true; + } + + return false; + } + + __always_inline unsigned char ExtendedFAST::calcSingleScoreSSE(const v16qi& circle, v16qi center, const v16qi& bstartVec, + unsigned char minLength) { + + v16qi bmin = bstartVec; + v16qi bmax = const255; + v16qi b = __builtin_ia32_pavgb128 (bmax, bmin); + center += const128; + + //Compute the score using binary search + for(;;) + { + // Find brighter arc + v16qi centerPlus = __builtin_ia32_paddusb128(center, b) - const128; + int arcBrighter = __builtin_ia32_pmovmskb128(__builtin_ia32_pcmpgtb128(circle, centerPlus)); + + if(lookupTable[arcBrighter] >= minLength) + bmin = b; // corner + else { + // Find darker arc + v16qi centerMinus = __builtin_ia32_psubusb128(center, b) - const128; + int arcDarker = __builtin_ia32_pmovmskb128(__builtin_ia32_pcmpgtb128(centerMinus, circle)); + if(lookupTable[arcDarker] >= minLength) + bmin = b; // corner + else + bmax = b; // Not a corner + } + + unsigned char singleBMin = SIMD::element16(bmin, 0), singleBMax = SIMD::element16(bmax, 0); + if(singleBMin == singleBMax || singleBMin == singleBMax - 1) + return (unsigned char)SIMD::element16(bmin, 0); + + // Update threshold + b = __builtin_ia32_pavgb128 (bmax, bmin); + } + } +} diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.h new file mode 100644 index 00000000..7136ab51 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.h @@ -0,0 +1,77 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#ifndef SPARSESTEREO_EXTENDEDFAST_H +#define SPARSESTEREO_EXTENDEDFAST_H + +#include +#include +#include +#include "simd.h" +#include "fast9.h" + +namespace sparsestereo { + +//#define SPARSESTEREO_EXFAST_CENTRAL_VALUE(img, y, x) img(y, x) +#define SPARSESTEREO_EXFAST_CENTRAL_VALUE(img, y, x) (((int)img(y, x) + (int)img(y-1, x) + (int)img(y+1, x) + (int)img(y, x-1) + (int)img(y, x+1))/5) + + // Extended version of the fast feature detector + class ExtendedFAST: public cv::FeatureDetector { + public: + ExtendedFAST(bool nonmaxSuppression, unsigned char minThreshold, float adaptivity, bool subpixelPrecision, int minBorder = 0); + virtual ~ExtendedFAST(); + + protected: + // Implements the feature detector interface + virtual void detectImpl(const cv::Mat& image, std::vector& keypoints, const cv::Mat& mask=cv::Mat()) const { + // OpenCV tries to force this method to be const, but we don't like that! + const_cast(this)->detectImpl(image, keypoints, mask); + } + void detectImpl(const cv::Mat& image, std::vector& keypoints, const cv::Mat& mask=cv::Mat()); + + private: + // Constants for SSE processing + static const v16qi const0, const128, const255; + + bool nonmaxSuppression; + // Adaptive threshold parameteres + unsigned char minThreshold; + unsigned char maxThreshold; + float adaptivity; + bool subpixelPrecision; + int border; + FAST9 fast9; + + // Feature point data + std::vector cornersAdapt; + std::vector cornersMin; + std::vector scores; + + // Offsets for the circle pixel + int offsets[16]; + // Lookup table for longest arc lengths + static unsigned char lookupTable[1U<<16]; + static bool lookupInitialized; + + // Finds the longest arc for a given comparison pattern + inline unsigned char findLongestArc(unsigned short stripe); + // Initializes offsets for corner detection + void initOffsets(int step); + // Loads 16 circle pixels into one SSE vector + __always_inline v16qi loadCircleSSE(const cv::Mat_& input, int x, int y); + // Performs feature test and score calculation + __always_inline bool testFeatureAndScore(const cv::Mat_& input, const cv::Point2i pt, bool storeScore); + // Performs a corner detection using the circle pixels from an SSE vector + __always_inline bool detectSingleCornerSSE(const v16qi& circle, const v16qi& center, const v16qi& threshold, + unsigned char minLength); + // Performs a score calculation using the circle pixels from an SSE vector + __always_inline unsigned char calcSingleScoreSSE(const v16qi& circle, v16qi center, const v16qi& bstartVec, + unsigned char minLength); + // Applies the adaptive threshold to cornersMin, and calculates the score + void adaptiveThresholdAndScore(const cv::Mat_& input); + }; +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h new file mode 100644 index 00000000..ada45d04 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h @@ -0,0 +1,6016 @@ +// This code is based on the original FAST corner detector by Edward Rosten. +// Below is the original copyright and references + +/* +Copyright (c) 2006, 2008 Edward Rosten +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + *Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + *Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + *Neither the name of the University of Cambridge nor the names of + its contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +/* +The references are: + * Machine learning for high-speed corner detection, + E. Rosten and T. Drummond, ECCV 2006 + * Faster and better: A machine learning approach to corner detection + E. Rosten, R. Porter and T. Drummond, PAMI, 2009 +*/ + +#ifndef SPARSESTEREO_FAST9_INL_H +#define SPARSESTEREO_FAST9_INL_H + +#include "fast9.h" + +namespace sparsestereo { + template + const int FAST9::pixelOffsetsX[16] = {0, 1, 2, 3, 3, 3, 2, 1, 0, -1, -2, -3, -3, -3, -2, -1}; + + template + const int FAST9::pixelOffsetsY[16] = {3, 3, 2, 1, 0, -1, -2, -3, -3, -3, -2, -1, 0, 1, 2, 3}; + + template + FAST9::FAST9():step(-1) { + setStep(640); + } + + template + void FAST9::setStep(int s) { + if(s == step) + return; // noting changed + + step = s; + for(int i=0; i<16; i++) + pixel[i] = pixelOffsetsX[i] + step * pixelOffsetsY[i]; + } + + /* This defines non-strict maxima */ +#define SPARSESTEREO_NONMAX_COMPARE(X, Y) ((X)>=(Y)) + + /* This is a fast, integer only, sparse nonmaximal suppression. */ + /* probably only useful for FAST corner detection */ + template template + void FAST9::nonMaxSuppression(const std::vector& corners, const std::vector& scores, std::vector& ret_nonmax) const { + int i, j, num_corners = (int)corners.size(); + + // Point above points (roughly) to the pixel above the one of interest, if there is a feature there. + int point_above = 0; + int point_below = 0; + + ret_nonmax.clear(); + if(num_corners < 1) + return; + + /* Find where each row begins + (the corners are output in raster scan order). A beginning of -1 signifies + that there are no corners on that row. */ + int last_row = corners[num_corners-1].y; + std::vector row_start(last_row+1); + + for(i=0; i < last_row+1; i++) + row_start[i] = -1; + + int prev_row = -1; + for(i=0; i< num_corners; i++) + if(corners[i].y != prev_row) + { + row_start[corners[i].y] = i; + prev_row = corners[i].y; + } + + + ret_nonmax.reserve(num_corners); + for(i = 0; i < num_corners; i++) { + + T2 score = scores[i]; + cv::Point pos = corners[i]; + + // Check left + if(i > 0 && corners[i-1].x == pos.x-1 && corners[i-1].y == pos.y) { + if(SPARSESTEREO_NONMAX_COMPARE(scores[i-1], score)) + continue; + } + + // Check right + if(i < num_corners-1 && corners[i+1].x == pos.x+1 && corners[i+1].y == pos.y) { + if(SPARSESTEREO_NONMAX_COMPARE(scores[i+1], score)) + continue; + } + + bool suppressed = false; + // Check above (if there is a valid row above) + if(pos.y != 0 && row_start[pos.y - 1] != -1) { + // Make sure that current point_above is one row above. + if(corners[point_above].y < pos.y - 1) + point_above = row_start[pos.y-1]; + + // Make point_above point to the first of the pixels above the current point, if it exists. + for(; corners[point_above].y < pos.y && corners[point_above].x < pos.x - 1; point_above++) + ; + + for(j=point_above; corners[j].y < pos.y && corners[j].x <= pos.x + 1; j++) { + int x = corners[j].x; + if( (x == pos.x - 1 || x ==pos.x || x == pos.x+1)) { + if(SPARSESTEREO_NONMAX_COMPARE(scores[j], score)) { + suppressed = true; + break; + } + } + } + if( suppressed ) + continue; + } + + // Check below (if there is anything below) + if(pos.y != last_row && row_start[pos.y + 1] != -1 && point_below < num_corners) { // Nothing below + if(corners[point_below].y < pos.y + 1) + point_below = row_start[pos.y+1]; + + // Make point below point to one of the pixels belowthe current point, if it exists. + for(; point_below < num_corners && corners[point_below].y == pos.y+1 + && corners[point_below].x < pos.x - 1; point_below++) + ; + + for(j=point_below; j < num_corners && corners[j].y == pos.y+1 && corners[j].x <= pos.x + 1; j++) { + int x = corners[j].x; + if( (x == pos.x - 1 || x ==pos.x || x == pos.x+1)) { + if(SPARSESTEREO_NONMAX_COMPARE(scores[j],score)) { + suppressed = true; + break; + } + } + } + if( suppressed ) + continue; + } + + ret_nonmax.push_back(i); + } + } + + template + int FAST9::cornerScore(const T* p, T c, int bstart) const { + int bmin = bstart; + int bmax = 255; + int b = (bmax + bmin)/2; + + /*Compute the score using binary search*/ + for(;;) + { + int cb = c + b; + int c_b= c - b; + + if( p[pixel[0]] > cb) { + if( p[pixel[1]] > cb) { + if( p[pixel[2]] > cb) { + if( p[pixel[3]] > cb) { + if( p[pixel[4]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + goto is_a_corner; + } else + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[7]] < c_b) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[14]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[6]] < c_b) { + if( p[pixel[15]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[13]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[14]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[13]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[5]] < c_b) { + if( p[pixel[14]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[12]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[13]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[14]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[6]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[12]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[6]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[4]] < c_b) { + if( p[pixel[13]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[11]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[12]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[13]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[5]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[14]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[11]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[5]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[14]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[3]] < c_b) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[4]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[10]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[4]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[4]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[10]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[4]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[13]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[2]] < c_b) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[4]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[3]] > cb) { + if( p[pixel[4]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[9]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[4]] < c_b) { + if( p[pixel[3]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[4]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[3]] > cb) { + if( p[pixel[4]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[9]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[4]] < c_b) { + if( p[pixel[3]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[12]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[1]] < c_b) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[4]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[3]] > cb) { + if( p[pixel[4]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[2]] > cb) { + if( p[pixel[3]] > cb) { + if( p[pixel[4]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[8]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[4]] < c_b) { + if( p[pixel[3]] < c_b) { + if( p[pixel[2]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[4]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[3]] > cb) { + if( p[pixel[4]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[2]] > cb) { + if( p[pixel[3]] > cb) { + if( p[pixel[4]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[8]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[4]] < c_b) { + if( p[pixel[3]] < c_b) { + if( p[pixel[2]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[11]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[0]] < c_b) { + if( p[pixel[1]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[4]] > cb) { + if( p[pixel[3]] > cb) { + if( p[pixel[2]] > cb) { + goto is_a_corner; + } else + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[4]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[3]] < c_b) { + if( p[pixel[4]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[2]] < c_b) { + if( p[pixel[3]] < c_b) { + if( p[pixel[4]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[1]] < c_b) { + if( p[pixel[2]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[4]] > cb) { + if( p[pixel[3]] > cb) { + goto is_a_corner; + } else + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[4]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[3]] < c_b) { + if( p[pixel[4]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[2]] < c_b) { + if( p[pixel[3]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[4]] > cb) { + goto is_a_corner; + } else + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[4]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[3]] < c_b) { + if( p[pixel[4]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[5]] > cb) { + goto is_a_corner; + } else + if( p[pixel[14]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[13]] < c_b) { + if( p[pixel[11]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[12]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[4]] < c_b) { + if( p[pixel[5]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[6]] > cb) { + goto is_a_corner; + } else + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[14]] < c_b) { + if( p[pixel[12]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[13]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[6]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[5]] < c_b) { + if( p[pixel[6]] > cb) { + if( p[pixel[15]] < c_b) { + if( p[pixel[13]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[14]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[6]] < c_b) { + if( p[pixel[7]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[13]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[12]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[6]] > cb) { + goto is_a_corner; + } else + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[5]] > cb) { + goto is_a_corner; + } else + if( p[pixel[14]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[4]] > cb) { + goto is_a_corner; + } else + if( p[pixel[13]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[4]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[9]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[4]] > cb) { + if( p[pixel[3]] > cb) { + goto is_a_corner; + } else + if( p[pixel[12]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[4]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[3]] < c_b) { + if( p[pixel[4]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[8]] > cb) { + if( p[pixel[7]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[10]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[4]] > cb) { + if( p[pixel[3]] > cb) { + if( p[pixel[2]] > cb) { + goto is_a_corner; + } else + if( p[pixel[11]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[4]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[3]] < c_b) { + if( p[pixel[4]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[2]] < c_b) { + if( p[pixel[3]] < c_b) { + if( p[pixel[4]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[7]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[7]] > cb) { + if( p[pixel[8]] > cb) { + if( p[pixel[9]] > cb) { + if( p[pixel[6]] > cb) { + if( p[pixel[5]] > cb) { + if( p[pixel[4]] > cb) { + if( p[pixel[3]] > cb) { + if( p[pixel[2]] > cb) { + if( p[pixel[1]] > cb) { + goto is_a_corner; + } else + if( p[pixel[10]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] > cb) { + if( p[pixel[11]] > cb) { + if( p[pixel[12]] > cb) { + if( p[pixel[13]] > cb) { + if( p[pixel[14]] > cb) { + if( p[pixel[15]] > cb) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else if( p[pixel[7]] < c_b) { + if( p[pixel[8]] < c_b) { + if( p[pixel[9]] < c_b) { + if( p[pixel[6]] < c_b) { + if( p[pixel[5]] < c_b) { + if( p[pixel[4]] < c_b) { + if( p[pixel[3]] < c_b) { + if( p[pixel[2]] < c_b) { + if( p[pixel[1]] < c_b) { + goto is_a_corner; + } else + if( p[pixel[10]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + if( p[pixel[10]] < c_b) { + if( p[pixel[11]] < c_b) { + if( p[pixel[12]] < c_b) { + if( p[pixel[13]] < c_b) { + if( p[pixel[14]] < c_b) { + if( p[pixel[15]] < c_b) { + goto is_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + } else + goto is_not_a_corner; + + is_a_corner: + bmin=b; + goto end_if; + + is_not_a_corner: + bmax=b; + goto end_if; + + end_if: + + if(bmin == bmax - 1 || bmin == bmax) + return bmin; + b = (bmin + bmax) / 2; + } + } + + template + __always_inline int FAST9::cornerTest(const T* p, T c, unsigned char b) const { + int cb = c + b; + int c_b= c - b; + + if(p[pixel[0]] > cb) { + if(p[pixel[1]] > cb) { + if(p[pixel[2]] > cb) { + if(p[pixel[3]] > cb) { + if(p[pixel[4]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + return 0x108; //pixel 0 - 8 + } else + if(p[pixel[15]] > cb) { + return 0x1f7; //pixel 15 - 7 + } else + return 0; + } else if(p[pixel[7]] < c_b) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x1e6; //pixel 14 - 6 + } else + return 0; + } else if(p[pixel[14]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x07f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x1e6; //pixel 14 - 6 + } else + return 0; + } else + return 0; + } else if(p[pixel[6]] < c_b) { + if(p[pixel[15]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + return 0x1d5; //pixel 13 - 5 + } else + return 0; + } else if(p[pixel[13]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[14]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x1d5; //pixel 13 - 5 + } else + return 0; + } else + return 0; + } else if(p[pixel[13]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x07f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[5]] < c_b) { + if(p[pixel[14]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[15]] > cb) { + return 0x1c4; //pixel 12 - 4 + } else + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[12]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[13]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[14]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[6]] < c_b) { + return 0x05e; //pixel 5 - 14 + } else + if(p[pixel[15]] < c_b) { + return 0x07f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x1c4; //pixel 12 - 4 + } else + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[12]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[6]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + if(p[pixel[15]] < c_b) { + return 0x07f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[4]] < c_b) { + if(p[pixel[13]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x1b3; //pixel 11 - 3 + } else + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[11]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[12]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[13]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[5]] < c_b) { + return 0x04d; //pixel 4 - 13 + } else + if(p[pixel[14]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x07f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x1b3; //pixel 11 - 3 + } else + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[11]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[5]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + if(p[pixel[14]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x07f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[3]] < c_b) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x1a2; //pixel 10 - 2 + } else + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[4]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[10]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[4]] < c_b) { + return 0x03b; //pixel 3 - 11 + } else + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x07f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x1a2; //pixel 10 - 2 + } else + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[4]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[10]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[4]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + if(p[pixel[13]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x07f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[2]] < c_b) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x191; //pixel 9 - 1 + } else + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[4]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[3]] > cb) { + if(p[pixel[4]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + return 0x13b; //pixel 3 - 11 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[9]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[4]] < c_b) { + if(p[pixel[3]] < c_b) { + return 0x02a; //pixel 2 - 10 + } else + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x07f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x191; //pixel 9 - 1 + } else + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[4]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[3]] > cb) { + if(p[pixel[4]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + return 0x13b; //pixel 3 - 11 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[9]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[4]] < c_b) { + if(p[pixel[3]] < c_b) { + return 0x03b; //pixel 3 - 11 + } else + if(p[pixel[12]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + return 0; + } else + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x07f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[1]] < c_b) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x180; //pixel 8 - 0 + } else + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[4]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[3]] > cb) { + if(p[pixel[4]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + return 0x13b; //pixel 3 - 11 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[2]] > cb) { + if(p[pixel[3]] > cb) { + if(p[pixel[4]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + return 0x12a; //pixel 2 - 10 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[8]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[4]] < c_b) { + if(p[pixel[3]] < c_b) { + if(p[pixel[2]] < c_b) { + return 0x019; //pixel 1 - 9 + } else + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + return 0x03b; //pixel 3 - 11 + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x07f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x180; //pixel 8 - 0 + } else + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[4]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[3]] > cb) { + if(p[pixel[4]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + return 0x13b; //pixel 3 - 11 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[2]] > cb) { + if(p[pixel[3]] > cb) { + if(p[pixel[4]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + return 0x12a; //pixel 2 - 10 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[8]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[4]] < c_b) { + if(p[pixel[3]] < c_b) { + if(p[pixel[2]] < c_b) { + return 0x02a; //pixel 2 - 10 + } else + if(p[pixel[11]] < c_b) { + return 0x03b; //pixel 3 - 11 + } else + return 0; + } else + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x07f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[0]] < c_b) { + if(p[pixel[1]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[4]] > cb) { + if(p[pixel[3]] > cb) { + if(p[pixel[2]] > cb) { + return 0x119; //pixel 1 - 9 + } else + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + return 0x13b; //pixel 3 - 11 + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x17f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x080; //pixel 8 - 0 + } else + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[4]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[3]] < c_b) { + if(p[pixel[4]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + return 0x03b; //pixel 3 - 11 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[2]] < c_b) { + if(p[pixel[3]] < c_b) { + if(p[pixel[4]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + return 0x02a; //pixel 2 - 10 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[1]] < c_b) { + if(p[pixel[2]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[4]] > cb) { + if(p[pixel[3]] > cb) { + return 0x12a; //pixel 2 - 10 + } else + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x17f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x091; //pixel 9 - 1 + } else + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[4]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[3]] < c_b) { + if(p[pixel[4]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + return 0x03b; //pixel 3 - 11 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[2]] < c_b) { + if(p[pixel[3]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[4]] > cb) { + return 0x13b; //pixel 3 - 11 + } else + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x17f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x0a2; //pixel 10 - 2 + } else + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[4]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[3]] < c_b) { + if(p[pixel[4]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[5]] > cb) { + return 0x14d; //pixel 4 - 13 + } else + if(p[pixel[14]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x17f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[13]] < c_b) { + if(p[pixel[11]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[12]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x0b3; //pixel 11 - 3 + } else + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[4]] < c_b) { + if(p[pixel[5]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[6]] > cb) { + return 0x15e; //pixel 5 - 14 + } else + if(p[pixel[15]] > cb) { + return 0x17f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[14]] < c_b) { + if(p[pixel[12]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[13]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x0c4; //pixel 12 - 4 + } else + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[6]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[5]] < c_b) { + if(p[pixel[6]] > cb) { + if(p[pixel[15]] < c_b) { + if(p[pixel[13]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[14]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + return 0x0d5; //pixel 13 - 5 + } else + return 0; + } else + return 0; + } else + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[6]] < c_b) { + if(p[pixel[7]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[15]] > cb) { + return 0x17f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x0e6; //pixel 14 - 6 + } else + return 0; + } else + return 0; + } else if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + return 0x008; //pixel 0 - 8 + } else + if(p[pixel[15]] < c_b) { + return 0x0f7; //pixel 15 - 7 + } else + return 0; + } else + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x0e6; //pixel 14 - 6 + } else + return 0; + } else + return 0; + } else + if(p[pixel[13]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x17f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x0d5; //pixel 13 - 5 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[12]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[6]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + if(p[pixel[15]] > cb) { + return 0x17f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x0c4; //pixel 12 - 4 + } else + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[5]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + if(p[pixel[14]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x17f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x0b3; //pixel 11 - 3 + } else + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[4]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + if(p[pixel[13]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x17f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x0a2; //pixel 10 - 2 + } else + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[4]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[9]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[4]] > cb) { + if(p[pixel[3]] > cb) { + return 0x13b; //pixel 3 - 11 + } else + if(p[pixel[12]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + return 0; + } else + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x17f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x091; //pixel 9 - 1 + } else + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[4]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[3]] < c_b) { + if(p[pixel[4]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + return 0x03b; //pixel 3 - 11 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[8]] > cb) { + if(p[pixel[7]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[10]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[4]] > cb) { + if(p[pixel[3]] > cb) { + if(p[pixel[2]] > cb) { + return 0x12a; //pixel 2 - 10 + } else + if(p[pixel[11]] > cb) { + return 0x13b; //pixel 3 - 11 + } else + return 0; + } else + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x17f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x080; //pixel 8 - 0 + } else + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[4]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[3]] < c_b) { + if(p[pixel[4]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + return 0x03b; //pixel 3 - 11 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[2]] < c_b) { + if(p[pixel[3]] < c_b) { + if(p[pixel[4]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[7]] < c_b) { + return 0x02a; //pixel 2 - 10 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[7]] > cb) { + if(p[pixel[8]] > cb) { + if(p[pixel[9]] > cb) { + if(p[pixel[6]] > cb) { + if(p[pixel[5]] > cb) { + if(p[pixel[4]] > cb) { + if(p[pixel[3]] > cb) { + if(p[pixel[2]] > cb) { + if(p[pixel[1]] > cb) { + return 0x119; //pixel 1 - 9 + } else + if(p[pixel[10]] > cb) { + return 0x12a; //pixel 2 - 10 + } else + return 0; + } else + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + return 0x13b; //pixel 3 - 11 + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + return 0x14c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + return 0x15d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + return 0x16e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] > cb) { + if(p[pixel[11]] > cb) { + if(p[pixel[12]] > cb) { + if(p[pixel[13]] > cb) { + if(p[pixel[14]] > cb) { + if(p[pixel[15]] > cb) { + return 0x17f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else if(p[pixel[7]] < c_b) { + if(p[pixel[8]] < c_b) { + if(p[pixel[9]] < c_b) { + if(p[pixel[6]] < c_b) { + if(p[pixel[5]] < c_b) { + if(p[pixel[4]] < c_b) { + if(p[pixel[3]] < c_b) { + if(p[pixel[2]] < c_b) { + if(p[pixel[1]] < c_b) { + return 0x019; //pixel 1 - 9 + } else + if(p[pixel[10]] < c_b) { + return 0x02a; //pixel 2 - 10 + } else + return 0; + } else + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + return 0x03b; //pixel 3 - 11 + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + return 0x04c; //pixel 4 - 12 + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + return 0x05d; //pixel 5 - 13 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + return 0x06e; //pixel 6 - 14 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + if(p[pixel[10]] < c_b) { + if(p[pixel[11]] < c_b) { + if(p[pixel[12]] < c_b) { + if(p[pixel[13]] < c_b) { + if(p[pixel[14]] < c_b) { + if(p[pixel[15]] < c_b) { + return 0x07f; //pixel 7 - 15 + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + } else + return 0; + + return 1; + } +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9.h new file mode 100644 index 00000000..16e44888 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9.h @@ -0,0 +1,37 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#ifndef SPARSESTEREO_FAST9_H +#define SPARSESTEREO_FAST9_H + +namespace sparsestereo { + // Some functions of FAST9 used by other classes + template + class FAST9 { + public: + // Pixel offsets used + int pixel[16]; + + FAST9(); + + // Sets the image row step width + void setStep(int step); + // Performs non-maximum suppression + template + void nonMaxSuppression(const std::vector& corners, const std::vector& scores, std::vector& ret_nonmax) const; + + // Autogenerated scoring method + int cornerScore(const T* p, T c, int bstart) const; + // Autogenerated cornet test method + __always_inline int cornerTest(const T* p, T c, unsigned char b) const; + + private: + static const int pixelOffsetsX[16]; + static const int pixelOffsetsY[16]; + int step; + }; +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp new file mode 100644 index 00000000..3b7b2f34 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp @@ -0,0 +1,24 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#include "hammingdistance.h" + +namespace sparsestereo { + bool HammingDistance::tableInitialized = false; + unsigned char HammingDistance::lookupTable[1U<<16] __attribute__ ((aligned (16))); + + HammingDistance::HammingDistance() { + if(!tableInitialized) + initTable(); + } + + void HammingDistance::initTable() { + // This is threadsafe because we always write the same values and + // set the initialization flag at the end + for(unsigned int i=0; i < sizeof(lookupTable)/sizeof(*lookupTable); i++) + lookupTable[i] = countNotPrecomputed(i); + tableInitialized = true; + } +} \ No newline at end of file diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h new file mode 100644 index 00000000..15454872 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h @@ -0,0 +1,80 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#ifndef SPARSESTEREO_HAMMINGDISTANCE_H +#define SPARSESTEREO_HAMMINGDISTANCE_H + +namespace sparsestereo { + // Calculates the bitwise hamming distance between two integers + // or counts the number of set bits. + // NOTE: Prefer popcnt instruction if available + class HammingDistance { + public: + HammingDistance(); + + // Calculates the hamming distance for 32-bit integers + unsigned char calculate(unsigned int x, unsigned int y) const { + return countBits(x^y); + } + + // Calculates the hamming distance for 16-bit integers + unsigned char calculate(unsigned short x, unsigned short y) const { + return countBits((unsigned short)(x^y)); + } + + // Determines the number of set bits for a 64-bit integer + unsigned char countBits(unsigned long long x) const { + // Count using precomputed table: + int buf = lookupTable[x & 0xFFFFU]; + x = x >> 16; + buf += lookupTable[x & 0xFFFFU]; + x = x >> 16; + return buf + lookupTable[x & 0xFFFFU] + lookupTable[x >> 16]; + } + + // Determines the number of set bits for a 32-bit integer + unsigned char countBits(unsigned int x) const { + // Count using precomputed table: + return lookupTable[x & 0xFFFFU] + lookupTable[x >> 16]; + } + + // Determines the number of set bits for a 16-bit integer + unsigned char countBits(unsigned short x) const { + return lookupTable[x]; + } + + // Returns a pointer to the static lookup table + const unsigned char* getLookupTable() const {return lookupTable;} + + private: + static bool tableInitialized; + static unsigned char lookupTable[1U<<16]; + + // Initializes the lookup table + void initTable(); + + // Methods for parallel bit counting algorithm + unsigned int getBit(int i) {return 0x1U << i;} + unsigned int getMask(int i) { + return (((unsigned int)(-1)) / (getBit(getBit(i)) + 1U)); + } + unsigned int countBlocks(int x, int level) { + return ((x) & getMask(level)) + (((x) >> (getBit(level))) & getMask(level)); + } + + // Performs a parallel bit counting without using the precomputed table + unsigned char countNotPrecomputed(unsigned int i) { + i = countBlocks(i, 0); + i = countBlocks(i, 1); + i = countBlocks(i, 2); + i = countBlocks(i, 3); + i = countBlocks(i, 4); + // i = countBlocks(i, 5); //For 64 Bits + return i; + } + }; +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp new file mode 100644 index 00000000..4386e138 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp @@ -0,0 +1,95 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#include "imageconversion.h" +#include "simd.h" + +namespace sparsestereo { + using namespace cv; + + double ImageConversion::get8UScalingFactor(int type) { + if(type == CV_16U) + return 1.0/256.0; + else if(type == CV_32F || type == CV_64F) + return 255.0; + else return 1.0; + } + + double ImageConversion::get16UScalingFactor(int type) { + if(type == CV_8U) + return 256.0; + else if(type == CV_32F || type == CV_64F) + return 65535.0; + else return 1.0; + } + + bool ImageConversion::convert(const Mat& input, Mat* output, int channels, int type, + double scalingFactor, int colorConv) { + if(input.channels() == channels && input.type() == type) { + // No conversion neccessary + (*output) = input; + return false; + } + else if(input.type() != type) { + // Convert the data type + Mat buffer; + input.convertTo(buffer, type, scalingFactor); + if(input.channels() != channels) + // Then convert color + cvtColor(buffer, *output, colorConv); + else (*output) = buffer; + } else { + // Only convert color + cvtColor(input, *output, colorConv); + } + + return true; + } + + bool ImageConversion::convertTo8U(const Mat& input, cv::Mat_* output) { + return convert(input, output, 1, CV_8U, get8UScalingFactor(input.type()), CV_RGB2GRAY); + } + + bool ImageConversion::convertTo16U(const Mat& input, cv::Mat_* output) { + return convert(input, output, 1, CV_16U, get8UScalingFactor(input.type()), CV_RGB2GRAY); + } + + bool ImageConversion::convertToColor(const Mat& input, cv::Mat_* output) { + return convert(input, output, 3, CV_8U, get8UScalingFactor(input.type()), CV_GRAY2RGB); + } + + void ImageConversion::unsignedToSigned(const Mat_& src, Mat_* dst) { + // Adds an offset of -128 to all values + if(src.cols % 16 == 0) { + // SSE Optimized + v16qi offset = SIMD::scalar16(-128); + + for(int y=0; y& src, Mat_* dst) { + // Adds an offset of 128 to all values + if(src.cols%16 != 0) { + // SSE Optimized + v16qi offset = SIMD::scalar16(128); + + for(int y=0; y + +namespace sparsestereo { + // Static methods for image conversion. If the return value is false + // then the output image points to the data of the input image. In this + // case, the input image should not be deleted. + class ImageConversion { + public: + // Converts an image to 8-bit grayscale + static bool convertTo8U(const cv::Mat& input, cv::Mat_* output); + // Converts an image to 16-bit grayscale + static bool convertTo16U(const cv::Mat& input, cv::Mat_* output); + // Converts an image to a color image + static bool convertToColor(const cv::Mat& input, cv::Mat_* output); + // Converts an unsigned char to a signed char image + static void unsignedToSigned(const cv::Mat_& src,cv::Mat_* dst); + // Converts a signed char to an unsigned char image + static void signedToUnsigned(const cv::Mat_& src,cv::Mat_* dst); + + private: + // Methods for calculating scaling factors + static double get8UScalingFactor(int type); + static double get16UScalingFactor(int type); + static bool convert(const cv::Mat& input, cv::Mat* output, int channels, int type, + double scalingFactor, int colorConv); + }; +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.cpp b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.cpp new file mode 100644 index 00000000..0cb0f425 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.cpp @@ -0,0 +1,29 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#include "simd.h" + +#define S16(x) SIMD::scalar16NonLookup(x) + +namespace sparsestereo { + v16qi SIMD::v16Constants[256] __attribute__((aligned (16))) = { + S16(0), S16(1), S16(2), S16(3), S16(4), S16(5), S16(6), S16(7), S16(8), S16(9), S16(10), S16(11), S16(12), S16(13), S16(14), S16(15), + S16(16), S16(17), S16(18), S16(19), S16(20), S16(21), S16(22), S16(23), S16(24), S16(25), S16(26), S16(27), S16(28), S16(29), S16(30), S16(31), + S16(32), S16(33), S16(34), S16(35), S16(36), S16(37), S16(38), S16(39), S16(40), S16(41), S16(42), S16(43), S16(44), S16(45), S16(46), S16(47), + S16(48), S16(49), S16(50), S16(51), S16(52), S16(53), S16(54), S16(55), S16(56), S16(57), S16(58), S16(59), S16(60), S16(61), S16(62), S16(63), + S16(64), S16(65), S16(66), S16(67), S16(68), S16(69), S16(70), S16(71), S16(72), S16(73), S16(74), S16(75), S16(76), S16(77), S16(78), S16(79), + S16(80), S16(81), S16(82), S16(83), S16(84), S16(85), S16(86), S16(87), S16(88), S16(89), S16(90), S16(91), S16(92), S16(93), S16(94), S16(95), + S16(96), S16(97), S16(98), S16(99), S16(100), S16(101), S16(102), S16(103), S16(104), S16(105), S16(106), S16(107), S16(108), S16(109), S16(110), S16(111), + S16(112), S16(113), S16(114), S16(115), S16(116), S16(117), S16(118), S16(119), S16(120), S16(121), S16(122), S16(123), S16(124), S16(125), S16(126), S16(127), + S16(128), S16(129), S16(130), S16(131), S16(132), S16(133), S16(134), S16(135), S16(136), S16(137), S16(138), S16(139), S16(140), S16(141), S16(142), S16(143), + S16(144), S16(145), S16(146), S16(147), S16(148), S16(149), S16(150), S16(151), S16(152), S16(153), S16(154), S16(155), S16(156), S16(157), S16(158), S16(159), + S16(160), S16(161), S16(162), S16(163), S16(164), S16(165), S16(166), S16(167), S16(168), S16(169), S16(170), S16(171), S16(172), S16(173), S16(174), S16(175), + S16(176), S16(177), S16(178), S16(179), S16(180), S16(181), S16(182), S16(183), S16(184), S16(185), S16(186), S16(187), S16(188), S16(189), S16(190), S16(191), + S16(192), S16(193), S16(194), S16(195), S16(196), S16(197), S16(198), S16(199), S16(200), S16(201), S16(202), S16(203), S16(204), S16(205), S16(206), S16(207), + S16(208), S16(209), S16(210), S16(211), S16(212), S16(213), S16(214), S16(215), S16(216), S16(217), S16(218), S16(219), S16(220), S16(221), S16(222), S16(223), + S16(224), S16(225), S16(226), S16(227), S16(228), S16(229), S16(230), S16(231), S16(232), S16(233), S16(234), S16(235), S16(236), S16(237), S16(238), S16(239), + S16(240), S16(241), S16(242), S16(243), S16(244), S16(245), S16(246), S16(247), S16(248), S16(249), S16(250), S16(251), S16(252), S16(253), S16(254), S16(255) + }; +} diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.h new file mode 100644 index 00000000..854f91e5 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.h @@ -0,0 +1,150 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#ifndef SPARSESTEREO_SIMD_H +#define SPARSESTEREO_SIMD_H + +#include +#include +#include + +// SIMD Headers +#ifdef __SSE3__ +#include +#else +#error "SSE3 support not available" +#endif + +#ifdef __SSSE3__ +#include +#else +#warning "SSSE3 support not available" +#endif + +#ifdef __SSE4_1__ +#include +#else +#warning "SSE4.1 support not available" +#endif + +#ifdef __SSE4_2__ +#include +#else +#warning "SSE4.2 support not available" +#endif + +/* This file contains declarations required for SIMD programming */ + +namespace sparsestereo { + // Integer types + typedef char v16qi __attribute__ ((vector_size (16), aligned (16))); + typedef short v8hi __attribute__ ((vector_size (16), aligned (16))); + typedef int v4si __attribute__ ((vector_size (16), aligned (16))); + typedef long long v2di __attribute__ ((vector_size (16), aligned (16))); + + // Floating point types + typedef float v4sf __attribute__ ((vector_size (16), aligned (16))); + typedef double v2sf __attribute__ ((vector_size (16), aligned (16))); + + class SIMD { + public: + // Methods for creating constants with just one scalar + static __always_inline const v16qi& scalar16(char c) { + /*const v16qi ret = {c, c, c, c, c, c, c, c, c, c, c, c, c, c, c, c}; + return ret;*/ + return v16Constants[(unsigned char)c]; + } + + static v16qi scalar16NonLookup(char c) { + const v16qi ret = {c, c, c, c, c, c, c, c, c, c, c, c, c, c, c, c}; + return ret; + } + + static __always_inline v8hi scalar8(short s) { + const v8hi ret = {s, s, s, s, s, s, s, s}; + return ret; + } + + static __always_inline v4si scalar4(int i) { + const v4si ret = {i, i, i, i}; + return ret; + } + + static __always_inline v2di scalar2(long long i) { + const v2di ret = {i, i}; + return ret; + } + + // Methods for accessing vector elements + + static __always_inline char& element16(v16qi& vec, int index) { + union Accessor { + v16qi vector; + char elements[16]; + } __attribute__((may_alias)); + + return ((Accessor*)&vec)->elements[index]; + } + + static __always_inline short& element8(v8hi& vec, int index) { + union Accessor { + v8hi vector; + short elements[8]; + } __attribute__((may_alias)); + + return ((Accessor*)&vec)->elements[index]; + } + + static __always_inline int& element4(v4si& vec, int index) { + union Accessor { + v4si vector; + int elements[16]; + } __attribute__((may_alias)); + + return ((Accessor*)&vec)->elements[index]; + } + + static __always_inline long long& element2(v2di& vec, int index) { + union Accessor { + v2di vector; + long long elements[2]; + } __attribute__((may_alias)); + + return ((Accessor*)&vec)->elements[index]; + } + + // Allocates a memory aligned array (only use primitive types!) + template + static __always_inline boost::shared_array alignedNew(int size, int alignment = 16) { + + char* ptr = new char[sizeof(T) * size + alignment -1]; + T* alignedPtr = (T*)((size_t(ptr) + alignment-1) & -alignment); + + AlignedDeallocator d = {ptr}; + return boost::shared_array(alignedPtr, d); + } + + // Packed rotate left, 16-bit + static __always_inline v8hi prol16(const v8hi& value) { + v8hi lsb = __builtin_ia32_psrlwi128(value, 15); + return (value+value) | lsb; + } + + private: + // Required for dealocating aligned memory areas + template + struct AlignedDeallocator { + char* ptr; + void operator() (T*) { + delete [] ptr; + } + }; + + // Lookup table for SSE 8-bit constants + static v16qi v16Constants[256] __attribute__((aligned (16))); + }; +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsematch.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsematch.h new file mode 100644 index 00000000..c1a009ef --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsematch.h @@ -0,0 +1,54 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#ifndef SPARSESTEREO_SPARSEMATCH_H +#define SPARSESTEREO_SPARSEMATCH_H + +#include +#include "stereorectification.h" +#include "exception.h" + +namespace sparsestereo { + // Struct for a single sparse stereo match + struct SparseMatch { + SparseMatch() + : imgLeft(NULL), imgRight(NULL), rectLeft(), rectRight(), cost(0) {} + SparseMatch(const cv::KeyPoint* imgLeft, const cv::KeyPoint* imgRight, + const cv::Point2f& rectLeft, const cv::Point2f& rectRight, short cost = 0) + : imgLeft(imgLeft), imgRight(imgRight), rectLeft(rectLeft), rectRight(rectRight), cost(cost) {} + SparseMatch(const cv::KeyPoint* imgLeft, const cv::KeyPoint* imgRight, short cost = 0) + : imgLeft(imgLeft), imgRight(imgRight), rectLeft(imgLeft->pt), rectRight(imgRight->pt), cost(cost) {} + + const cv::KeyPoint* imgLeft; + const cv::KeyPoint* imgRight; + cv::Point2f rectLeft; + cv::Point2f rectRight; + short cost; + + float disparity() const { + return rectLeft.x - rectRight.x; + } + + // Converts a set of image coordinates and disparity values to 3d points + static void projectMatches(const std::vector& matches, std::vector* points, + const StereoRectification* rect) { + if(rect != NULL) { + const CalibrationResult& calib = rect->getCalibrationResult(); + cv::Mat_ qFloat(calib.Q.rows, calib.Q.cols); + calib.Q.convertTo(qFloat, CV_32F); + + points->reserve(matches.size()); + for(unsigned int i=0; i product = qFloat * cv::Mat_(4, 1, point); + points->push_back(cv::Point3f(product(0)/product(3), product(1)/product(3), product(2)/product(3))); + } + } + else throw Exception("Projection of 3d points is only possible for calibrated cameras."); + } + }; +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp new file mode 100644 index 00000000..c5373d13 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp @@ -0,0 +1,111 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#include "sparserectification.h" +#include "stereorectification.h" + +namespace sparsestereo { + using namespace std; + using namespace cv; + + SparseRectification::SparseRectification(bool subpixelFeatures, StereoRectification* rect) + : subpixelFeatures(subpixelFeatures), rect(rect) { + } + + void SparseRectification::rectify(const vector& inLeft, const vector& inRight, vector* outLeft, vector* outRight) { + + outLeft->resize(inLeft.size()); + outRight->resize(inRight.size()); + + //#pragma omp parallel sections default(shared) num_threads(2) + { + //#pragma omp section + { + if(rect != NULL) { + // Rectify left features + if(subpixelFeatures) + for(int i=0; i<(int)inLeft.size(); i++) { + (*outLeft)[i].imgPoint = &inLeft[i]; + (*outLeft)[i].rectPoint = rect->rectifyLeftPoint(inLeft[i].pt); + } + else for(int i=0; i<(int)inLeft.size(); i++) { + (*outLeft)[i].imgPoint = &inLeft[i]; + (*outLeft)[i].rectPoint = rect->rectifyLeftPoint(cv::Point2i(inLeft[i].pt.x, inLeft[i].pt.y)); + } + } else + // Copy unrectified + for(int i=0; i<(int)inLeft.size(); i++) { + (*outLeft)[i].imgPoint = &inLeft[i]; + (*outLeft)[i].rectPoint = inLeft[i].pt; + } + + // Sort features + sort(outLeft->begin(), outLeft->end(), featureSortComp); + } + + //#pragma omp section + { + if(rect != NULL) { + // Rectify right features + if(subpixelFeatures) + for(int i=0; i<(int)inRight.size(); i++) { + (*outRight)[i].imgPoint = &inRight[i]; + (*outRight)[i].rectPoint = rect->rectifyRightPoint(inRight[i].pt); + } + else for(int i=0; i<(int)inRight.size(); i++) { + (*outRight)[i].imgPoint = &inRight[i]; + (*outRight)[i].rectPoint = rect->rectifyRightPoint(cv::Point2i(inRight[i].pt.x, inRight[i].pt.y)); + } + } else + // Copy unrectified + for(int i=0; i<(int)inRight.size(); i++) { + (*outRight)[i].imgPoint = &inRight[i]; + (*outRight)[i].rectPoint = inRight[i].pt; + } + + // Sort features + sort(outRight->begin(), outRight->end(), featureSortComp); + } + } + } + + void SparseRectification::precomputeEpilinesStart(int imageWidth, int imageHeight, Mat_* dst) { + Epiline::setMaxEpilineLength(imageWidth); + (*dst) = cv::Mat_(imageHeight, imageWidth, (short)0); + + for(int y=0; ygetCalibrationResult().imageSize.width-1, rightRectX)); + + for(int i=0; i epsilon; i++) { + Epiline epiline = rect->getLeftEpiline(cv::Point2f(x1, leftImgY)); + if(!epiline.isValid()) + break; // Lets quit early if we can't find a valid epiline + float y1 = epiline.at(int(x1+0.5)); + + y1 = max(0.0F, min((float)rect->getCalibrationResult().imageSize.height-1, y1)); + + cv::Point2f guess = rect->rectifyLeftPoint(cv::Point2f(x1, y1)); + float d = guess.x - rightRectX; + + x1=max(0.0F, min((float)rect->getCalibrationResult().imageSize.width-1, x1-d)); + } + return x1; + } else { + // We don't have to guess, we know it! + return rightRectX; + } + } +} diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.h new file mode 100644 index 00000000..8145b72d --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.h @@ -0,0 +1,49 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#ifndef SPARSESTEREO_SPARSERECTIFICATION_H +#define SPARSESTEREO_SPARSERECTIFICATION_H + +#include +#include + +namespace sparsestereo { + class StereoRectification; + + // Rectifies two sets of features from the left and right image + class SparseRectification { + public: + // Stores a pair of rectified and unrectified feature points + struct RectifiedFeature { + const cv::KeyPoint* imgPoint; + cv::Point2f rectPoint; + }; + + SparseRectification(bool subpixelFeatures, StereoRectification* rect); + + // Rectifies a set of sparse features + void rectify(const std::vector& inLeft, const std::vector& inRight, + std::vector* outLeft, std::vector* outRight); + + // Precomputes epilines start position + void precomputeEpilinesStart(int imageWidth, int imageHeight, cv::Mat_* dst); + + private: + // Comparision operator used for sorting feature points + static bool featureSortComp(const RectifiedFeature& left, const RectifiedFeature& right) { + return left.rectPoint.y < right.rectPoint.y || (left.rectPoint.y == right.rectPoint.y && + left.rectPoint.x < right.rectPoint.x); + } + + bool subpixelFeatures; + StereoRectification* rect; + + // Estimates the distorted point in the left image, for which the disparity between rectified left and + // right image is 0. The given left point just serves for a first estimate. + inline float estimateDistortedInfiniteLeftX(int leftImgY, int rightRectX); + }; +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h new file mode 100644 index 00000000..e0fcee22 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h @@ -0,0 +1,225 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#ifndef SPARSESTEREO_SPARSESTEREO_INL_H +#define SPARSESTEREO_SPARSESTEREO_INL_H + +#include "sparsestereo.h" +#include +#include +#include +#include "exception.h" +#include "stereorectification.h" +#include "simd.h" + +namespace sparsestereo { + using namespace std; + + template + SparseStereo::SparseStereo(int maxDisparity, float yTolerance, float uniqueness, + StereoRectification* rect, bool subpixelFeatures, bool storeUnmatched, int leftRightStep) + : maxDisparity(maxDisparity), yTolerance(yTolerance), uniqueness(uniqueness), rect(rect), + storeUnmatched(storeUnmatched), leftRightStep(leftRightStep), precompEpilinesStart(0, 0), + sparseRect(subpixelFeatures, rect) { + } + + template + SparseStereo::~SparseStereo() { + } + + template + void SparseStereo::match(const cv::Mat& left, const cv::Mat& right, + const std::vector& leftFeat, const std::vector& rightFeat, + cv::vector* matches) { + + if(left.size() != right.size() || (rect != NULL && left.size() != rect->getCalibrationResult().imageSize)) + throw Exception("Mismatching image sizes"); + if(leftFeat.size() == 0 || rightFeat.size() == 0) + return; // Can't match anything + + // For the first time, or when the image size changes, compute epiline lookup table + if(left.cols != precompEpilinesStart.cols || left.rows != precompEpilinesStart.rows) + sparseRect.precomputeEpilinesStart(left.cols, left.rows, &precompEpilinesStart); + + // Rectify feature points + sparseRect.rectify(leftFeat, rightFeat, &leftFeatures, &rightFeatures); + + // Features are now sorted from top left to bottom right + boost::shared_array offsets = SIMD::alignedNew(left.rows); + int maxRowLen __attribute__((unused)) = getRowOffsets(rightFeatures, offsets.get(), left.rows); + + minimumMatches.resize(leftFeatures.size()); + + // Perform matching + calcCosts(left, right, offsets.get()); + + // Perform left/right consistency check + denseConsistencyCheck(left, right); + + // Compose sparse disparity list + for(int i=0; i<(int)leftFeatures.size(); i++) { + const cv::KeyPoint* rightFeature = NULL; + cv::Point2f rightRect = leftFeatures[i].rectPoint + cv::Point2f(1, 0); //Disparity -1 + short cost = 0; + + if(minimumMatches[i].first != -1) { + rightFeature = rightFeatures[minimumMatches[i].first].imgPoint; + rightRect = rightFeatures[minimumMatches[i].first].rectPoint; + cost = minimumMatches[i].second; + } + + if(rightFeature != NULL || storeUnmatched) + matches->push_back(SparseMatch(leftFeatures[i].imgPoint, rightFeature, leftFeatures[i].rectPoint, rightRect, cost)); + } + } + + template + void SparseStereo::calcCosts(const cv::Mat& left, const cv::Mat& right, unsigned int* rowOffsets) { + + int lastRow = -1e9; //Invalid value + CORRELATION correlation; + correlation.setReferenceImage(left); + correlation.setComparisonImage(right); + + for(int l=0; l<(int)leftFeatures.size(); l++) { + // Find row start and end points + int ly = (int)(leftFeatures[l].rectPoint.y + 0.5); + int rightStart = rowOffsets[min(left.rows-1, max(0, int(ly - yTolerance)))]; + int rightEnd = rowOffsets[min(left.rows-1, max(0, int(ly + yTolerance) + 2))]; + + if(ly != lastRow) { + // Skip top and bottom + if(ly < correlation.getWindowSize()/2) { + continue; + } + else if(ly >= left.rows - correlation.getWindowSize()/2 -1) { + // We reached the bottom of the image. Lets invalidate the remaining features. + for(; l<(int)leftFeatures.size(); l++) + minimumMatches[l].first = -1; + break; + } + + // We have to initialize a new cost cube row + lastRow = ly; + } + + COST_TYPE minCost = numeric_limits::max(); + int minRightFeature = -1; + + correlation.setReferencePoint(cv::Point2i(int(leftFeatures[l].imgPoint->pt.x + 0.5), + int(leftFeatures[l].imgPoint->pt.y + 0.5))); + + for(int r=rightStart; r= leftFeatures[l].rectPoint.x - maxDisparity && + fabs(leftFeatures[l].rectPoint.y - rightFeatures[r].rectPoint.y) <= yTolerance && + rightFeatures[r].rectPoint.x >= correlation.getWindowSize()/2 && + rightFeatures[r].rectPoint.x < left.cols - correlation.getWindowSize()/2) + { + // It is! Let's compute a cost + COST_TYPE currentCost = correlation.match( + cv::Point2i(int(rightFeatures[r].imgPoint->pt.x + 0.5), int(rightFeatures[r].imgPoint->pt.y + 0.5))); + + if(currentCost < minCost) { // Only store smaller costs + minCost = currentCost; + minRightFeature = r; + } + } + } + + minimumMatches[l] = std::pair(minRightFeature, minCost); + } + } + + template + int SparseStereo::getRowOffsets(const std::vector& features, + unsigned int* offsets, int maxRows) { + int lastRow=-1, lastOffset = 0, longestRowLen = -1; + + for(unsigned int i=0; i= maxRows -1) + break; // The rectified point is outside the visible image + else if(y<0) + continue; + + if(y != lastRow) { + // Save offset + for(int j=lastRow+1; j<=y; j++) + offsets[j] = i; + + if((int)i - lastOffset > longestRowLen) + longestRowLen = i - lastOffset; + + lastRow = y; + lastOffset = i; + } + } + + // Save offset for the remaining items + for(int i=lastRow+1; i < maxRows; i++) + offsets[i] = features.size(); + if((int)features.size() - lastOffset > longestRowLen) + longestRowLen = features.size() - lastOffset; + + return longestRowLen; //Return the maximum row length + } + + template + void SparseStereo::denseConsistencyCheck(const cv::Mat& left, const cv::Mat& right) { + int lastRow = -1e9; //Invalid value + CORRELATION correlation; + correlation.setReferenceImage(right); + correlation.setComparisonImage(left); + + for(int l = 0; l < (int)leftFeatures.size(); l++) { + int ly = (int)(leftFeatures[l].rectPoint.y + 0.5); + if(ly != lastRow) { + // Skip top and bottom + if(ly < correlation.getWindowSize()/2) + continue; + else if(ly >= left.rows - correlation.getWindowSize()/2 -1) + break; + + lastRow = ly; + } + + // Get the minimum match and cost + int r = minimumMatches[l].first; + COST_TYPE minCost = minimumMatches[l].second; + + if(r == -1) + continue; + + // Get epiline start and end + float leftStartX = precompEpilinesStart(leftFeatures[l].imgPoint->pt.y, max(0, min( + precompEpilinesStart.cols, (int)(rightFeatures[r].rectPoint.x+0.5)))); + int startX = max(correlation.getWindowSize()/2, int(leftStartX /*+ 0.5*/)); + int endX = min(precompEpilinesStart.cols - correlation.getWindowSize()/2, + int(leftStartX + maxDisparity /*+ 0.5*/)); + Epiline epiline = rect!=NULL ? rect->getLeftEpiline(leftFeatures[l].imgPoint->pt) : Epiline(leftFeatures[l].imgPoint->pt.y); + if(!epiline.isValid()) + continue; + + // Preform consistency check + correlation.setReferencePoint(cv::Point2i(int(rightFeatures[r].imgPoint->pt.x + 0.5), int(rightFeatures[r].imgPoint->pt.y + 0.5))); + for(int x = startX; x < endX; x+= leftRightStep) { + int y = (int)(epiline.at(x) + 0.5); + if(y < correlation.getWindowSize()/2 || y >= left.rows - correlation.getWindowSize()/2) + continue; + + COST_TYPE currentCost = correlation.match(cv::Point2i(x, y)); + + if(currentCost < minCost/uniqueness && fabs(x - int(leftFeatures[l].imgPoint->pt.x+0.5)) > leftRightStep) { + minimumMatches[l].first = -1; + break; + } + } + } + } +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h new file mode 100644 index 00000000..d4d44568 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h @@ -0,0 +1,57 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#ifndef SPARSESTEREO_SPARSESTEREO_H +#define SPARSESTEREO_SPARSESTEREO_H + +#include +#include +#include +#include +#include +#include "sparsematch.h" +#include "sparserectification.h" + +namespace sparsestereo { + class StereoRectification; + + // Class bundling sparse stereo algorithms + template + class SparseStereo { + public: + SparseStereo(int maxDisparity, float yTolerance = 1, float uniqueness = 0.6, StereoRectification* rect = NULL, + bool subpixelFeatures = false, bool storeUnmatched = false, int leftRightStep = 1); + ~SparseStereo(); + + // Matches using a census window + void match(const cv::Mat& left, const cv::Mat& right, const std::vector& leftFeat, + const std::vector& rightFeat, std::vector* matches); + + private: + std::vector leftFeatures, rightFeatures; + int maxDisparity; + float yTolerance; + float uniqueness; + StereoRectification* rect; + bool storeUnmatched; + int leftRightStep; + std::vector > minimumMatches; + cv::Mat_ precompEpilinesStart; // Preocomputed epilines start positions + cv::Size frameSize; + SparseRectification sparseRect; + + // Gets the starting offsets of each row and returns the maximum row length + int getRowOffsets(const std::vector& features, unsigned int* offsets, + int maxRows); + + // Calculates the matching costs using census windows + void calcCosts(const cv::Mat& left, const cv::Mat& right, unsigned int* rowOffsets); + + // Performs a left/right consistency check that is dense in the left image + void denseConsistencyCheck(const cv::Mat& left, const cv::Mat& right); + }; +} + +#endif diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp new file mode 100644 index 00000000..1ccd67e7 --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp @@ -0,0 +1,121 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#include "stereorectification.h" +#include +#include + +namespace sparsestereo { + using namespace std; + using namespace cv; + using namespace boost; + + scoped_array Epiline::dummyLine; + int Epiline::dummyLineLength = -1; + + void Epiline::setMaxEpilineLength(int len) { + if(dummyLineLength != len) { + dummyLine.reset(new float[len]); + memset(dummyLine.get(), 0, sizeof(float)*len); + dummyLineLength = len; + } + } + + StereoRectification::StereoRectification(const CalibrationResult& calibRes, Interpolation interpolation) + : interpolation(interpolation), calibRes(calibRes) + { + initImageRectMap(); + initPointRectMap(); + initEpilines(); + } + + void StereoRectification::initImageRectMap() { + // Create rectification maps for image rectification + initUndistortRectifyMap(calibRes.cameraMatrix[0], calibRes.distCoeffs[0], calibRes.R[0], + calibRes.P[0], calibRes.imageSize, CV_16SC2, imageRectMap[0][0], imageRectMap[0][1]); + initUndistortRectifyMap(calibRes.cameraMatrix[1], calibRes.distCoeffs[1], calibRes.R[1], + calibRes.P[1], calibRes.imageSize, CV_16SC2, imageRectMap[1][0], imageRectMap[1][1]); + } + + void StereoRectification::initPointRectMap() { + // Create rectification maps for integer point rectification + // Collect all image point coordinates + vector distortedPoints((calibRes.imageSize.width + 1) * (calibRes.imageSize.height + 1)); + for(int y=0; y<=calibRes.imageSize.height; y++) + for(int x=0; x<=calibRes.imageSize.width; x++) + distortedPoints[y * (calibRes.imageSize.width + 1) + x]/*, 0)*/ = Point2f(x, y); + for(int i=0; i<2; i++) { + // Perform rectification + vector undistortedPoints(distortedPoints.size()); + undistortPoints(distortedPoints, undistortedPoints, calibRes.cameraMatrix[i], calibRes.distCoeffs[i], + calibRes.R[i], calibRes.P[i]); + // Store results + pointRectMap[i] = Mat_(calibRes.imageSize.height + 1, calibRes.imageSize.width + 1, Point2f(-1, -1)); + for(int y=0; y<= calibRes.imageSize.height; y++) + for(int x=0; x<= calibRes.imageSize.width; x++) + pointRectMap[i](y,x) = undistortedPoints[y * (calibRes.imageSize.width + 1) + x];//, 0); + } + } + + void StereoRectification::initEpilines() { + for(int i=0; i<2; i++) { + epilines[i] = Mat_(calibRes.imageSize, -1e6); + // First calculate a float undistortion map + Mat_ rectMap[2]; + initUndistortRectifyMap(calibRes.cameraMatrix[i], calibRes.distCoeffs[i], calibRes.R[i], + calibRes.P[i], calibRes.imageSize, CV_32F, rectMap[0], rectMap[1]); + + //Calculate epilines + for(int y=0; y < calibRes.imageSize.height; y++) { + for(int x=0; x=0 && imgX < calibRes.imageSize.width) { + double interpolated = rectMap[1](y, x) + dy / dx * (imgX - rectMap[0](y, x)); + if(interpolated >= 0 && interpolated < calibRes.imageSize.height) { + epilines[i](y, imgX) = interpolated; + } + } + } + } + } + + // Fill epilines index + epilineIndex[i] = Mat_(calibRes.imageSize, -1); + for(int line = 0; line < calibRes.imageSize.height - 1; line++) + for(int x=0; x=0 && imgY < calibRes.imageSize.height) { + if(fabs(imgY - epilines[i](line, x)) < fabs(imgY - epilines[i](line+1, x))) + epilineIndex[i](imgY, x) = line; + else epilineIndex[i](imgY, x) = line + 1; + } + } + } + } + + Point2f StereoRectification::highPrecisionRectifyLeftPoint(Point2f inLeft) const { + Mat_ inLeftPoint(1, 1, inLeft); + vector outLeftPoint; + + undistortPoints(inLeftPoint, outLeftPoint, calibRes.cameraMatrix[0], calibRes.distCoeffs[0], + calibRes.R[0], calibRes.P[0]); + + return outLeftPoint[0]; + } + + Point2f StereoRectification::highPrecisionRectifyRightPoint(Point2f inRight) const { + Mat_ inRightPoint(1, 1, inRight); + vector outRightPoint; + + undistortPoints(inRightPoint, outRightPoint, calibRes.cameraMatrix[1], calibRes.distCoeffs[1], + calibRes.R[1], calibRes.P[1]); + + return outRightPoint[0]; + } +} diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.h b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.h new file mode 100644 index 00000000..d9da153e --- /dev/null +++ b/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.h @@ -0,0 +1,163 @@ +/* + * Author: Konstantin Schauwecker + * Year: 2012 + */ + +#ifndef SPARSESTEREO_STEREORECTIFICATION_H +#define SPARSESTEREO_STEREORECTIFICATION_H + +#include +#include +#include +#include +#include +#include "calibrationresult.h" + +namespace sparsestereo { + // Stores one epiline and provides element access + class Epiline{ + public: + Epiline(): line(NULL), dy(0) {} + + // Stores one real epiline + Epiline(const cv::Mat_& refLines, const cv::Mat_ refLineIndices, const cv::Point2f& pointOnLine) { + int index = refLineIndices(int(pointOnLine.y + 0.5), int(pointOnLine.x + 0.5)); + valid = (index != -1); + if(valid) { + assert(index != -1); + dy = pointOnLine.y - refLines(index, (int)round(pointOnLine.x)); + line = &refLines(index, 0); +#ifndef NDEBUG + width = refLines.cols; +#endif + } +#ifndef NDEBUG + else width = 0; +#endif + } + + // Creates a dummy epiline + Epiline(float y): line(dummyLine.get()), dy(y), valid(true) { +#ifndef NDEBUG + width=dummyLineLength; +#endif + } + + // Returns the y-coordinate for the epiline at position x + float at(int x) const { + assert(x >= 0 && x dummyLine; + static int dummyLineLength; + const float* line; + float dy; +#ifndef NDEBUG + int width; +#endif + bool valid; + }; + + // Class for rectifying input images + class StereoRectification { + public: + enum Interpolation { + Cubic = cv::INTER_CUBIC, + Linear = cv::INTER_LINEAR, + Nearest = cv::INTER_NEAREST + }; + + StereoRectification(const CalibrationResult& calibRes, Interpolation iterpolation = Linear); + + // Rectifies a stereo pair + template + void rectifyStereoPair(const std::pair, cv::Mat_ >& input, std::pair, cv::Mat_ >* output) const { + rectifyLeftImage(input.first, &(output->first)); + rectifyRightImage(input.second, &(output->second)); + } + + // Rectifies the left image + template + void rectifyLeftImage(const cv::Mat_& input, cv::Mat_* output) const { + cv::remap(input, *output, imageRectMap[0][0], imageRectMap[0][1], interpolation); + } + + // Rectifies the right image + template + void rectifyRightImage(const cv::Mat_& input, cv::Mat_* output) const { + cv::remap(input, *output, imageRectMap[1][0], imageRectMap[1][1], interpolation); + } + + // Rectifies a left integer point + cv::Point2f rectifyLeftPoint(cv::Point2i inLeft) const { + return pointRectMap[0](inLeft.y, inLeft.x); + } + + // Rectifies a right integer point + cv::Point2f rectifyRightPoint(cv::Point2i inRight) const { + return pointRectMap[1](inRight.y, inRight.x); + } + + // Rectifies a left float point + cv::Point2f rectifyLeftPoint(cv::Point2f inLeft) const { + return interpolatedLookup(pointRectMap[0], inLeft); + } + + // Rectifies a right float point + cv::Point2f rectifyRightPoint(cv::Point2f inRight) const { + return interpolatedLookup(pointRectMap[1], inRight); + } + + // Rectifies a left float point. This implementation is slow! + cv::Point2f highPrecisionRectifyLeftPoint(cv::Point2f inLeft) const; + + // Rectifies a right float point. This implementation is slow! + cv::Point2f highPrecisionRectifyRightPoint(cv::Point2f inRight) const; + + // Returns an epiline going through the given left point + Epiline getLeftEpiline(cv::Point2f p) const { + return Epiline(epilines[0], epilineIndex[0], p); + } + + // Returns an epiline going through the given left point + Epiline getRightEpiline(cv::Point2f p) const { + return Epiline(epilines[1], epilineIndex[1], p); + } + + // Returns the calibration results + const CalibrationResult& getCalibrationResult() const {return calibRes;} + + private: + cv::Mat imageRectMap[2][2]; + cv::Mat_ pointRectMap[2]; + cv::Mat_ epilines[2]; + cv::Mat_ epilineIndex[2]; + Interpolation interpolation; + CalibrationResult calibRes; + + void initImageRectMap(); + void initPointRectMap(); + void initEpilines(); + + // Preforms an bilinear interpolated lookup + cv::Point2f interpolatedLookup(const cv::Mat_& map, cv::Point2f pt) const { + float dx = pt.x - int(pt.x); + float dy = pt.y - int(pt.y); + + cv::Point2f top = (1.0 - dx) * map(int(pt.y), int(pt.x)) + dx * map(int(pt.y), int(pt.x + 1)); + cv::Point2f bottom = (1.0 - dx) * map(int(pt.y + 1), int(pt.x)) + dx * map(int(pt.y + 1), int(pt.x + 1)); + return (1.0 - dy) * top + dy * bottom; + } + }; +} + +#endif diff --git a/perception/navigator_vision/include/navigator_vision_lib/camera_frame.hpp b/perception/navigator_vision/include/navigator_vision_lib/camera_frame.hpp new file mode 100644 index 00000000..a955f4fa --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/camera_frame.hpp @@ -0,0 +1,223 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +namespace nav +{ + +// Helper function prototype +std::string __getROSImgEncoding(const sensor_msgs::ImageConstPtr &image_msg_ptr); + +enum class PixelType +{ +/* + Enumeration for dealing with different image pixel types + The underlying integers for these enums are compatible with OpenCV's + "CV_C" + macros. The benefit to having these is that we do not have to have OpenCV as a dependency. + Theoretically, the CameraFrame Objects need not use a cv::Mat and this class' functionality + would not be affected. The underscore in front of these enums is to solve naming conflicts + with commonly OpenCV and Eigen macros. +*/ + _8UC1=0, _8SC1, _16UC1, _16SC1, _32SC1, _32FC1, _64FC1, + _8UC2=8, _8SC2, _16UC2, _16SC2, _32SC2, _32FC2, _64FC2, + _8UC3=16, _8SC3, _16UC3, _16SC3, _32SC3, _32FC3, _64FC3, + _8UC4=24, _8SC4, _16UC4, _16SC4, _32SC4, _32FC4, _64FC4, _UNKNOWN = -1 +}; + +template, + typename time_t_ = ros::Time, + typename img_scalar_t = uint8_t, + typename float_t = float> +class CameraFrame +{ +/* + This class is used to represent a single frame from a camera. It contains information about + the time the image was taken, the id and camera parameters of the camera that took it and the + image itself. This cis a templated class whose first template parameter is the type of the + pixel elements (commonly uint8_t for grayscale images and cv::Vec3b for RGB images) +*/ + +public: + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Constructors and Destructors /////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + CameraFrame() // Default Constructor + { + } + + CameraFrame(const CameraFrame &other) // Copy Constructor + { + this->_seq = other._seq; + this->_stamp = other._stamp; + this->_image = other._image.clone(); // Object will have unoque copy of image data + this->cam_model_ptr = other.cam_model_ptr; + } + + // From ROS img msg + CameraFrame(const sensor_msgs::ImageConstPtr &image_msg_ptr, boost::shared_ptr &cam_model_ptr, + bool is_rectified = false, float_t store_at_scale = 1.0); + + CameraFrame(const sensor_msgs::ImageConstPtr &image_msg_ptr, + const sensor_msgs::CameraInfoConstPtr &info_msg_ptr) + { + } + + ~CameraFrame() + { + } + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Public Methods ///////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + cam_model_ptr_t getCameraModelPtr() const + { + return cam_model_ptr; + } + + unsigned int seq() const + { + return _seq; + } + + time_t_ stamp() const + { + return _stamp; + } + + const cv::Mat_& image() const + { + return _image; + } + + bool rectified() const + { + return _rectified; + } + + float_t getImageScale() const + { + return _img_scale; + } + + void copyImgTo(cv::Mat dest) const + { + dest = image.clone(); + } + + bool isCameraGeometryKnown() const + { + return cam_model_ptr == nullptr; + } + + +private: + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Private Members //////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + // Frames from the same source will have sequentially increasing seq numbers + unsigned int _seq; + + // Time that this image was taken + time_t_ _stamp; + + // Stores the image data (not a cv:Mat header pointing to shared image data) + cv::Mat_ _image; + + // Points to a camera model object that stores information about the intrinsic and extrinsic + // geometry of the camera used to take this image + cam_model_ptr_t cam_model_ptr; + + // Identifies if this image has already been rectified with the distortion parameters in the + // associated camera model object + bool _rectified = false; + + // Scale of the image compared to that which would be generated by projecting using the camera + // geometry expresed in the associated camera model object + float_t _img_scale = 1.0f; + + // Stores the pixel data type + nav::PixelType TYPE = nav::PixelType::_UNKNOWN; + + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Private Methods //////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + void project3DPointToImagePlane(Eigen::Matrix cam_frame_pt); +}; + +/////////////////////////////////////////////////////////////////////////////////////////////////// +////// Templated function implementations ///////////////////////////////////////////////////////// +/////////////////////////////////////////////////////////////////////////////////////////////////// + +// Constructor from ROS image message and ROS pinhole camera model +template +CameraFrame::CameraFrame( + const sensor_msgs::ImageConstPtr &image_msg_ptr, boost::shared_ptr &cam_model_ptr, + bool is_rectified, float_t store_at_scale) +try +{ + // ROS image message decoding + cv_bridge::CvImageConstPtr _ros_img_bridge; + std::string encoding = __getROSImgEncoding(image_msg_ptr); + _ros_img_bridge = cv_bridge::toCvShare(image_msg_ptr, encoding); + _ros_img_bridge->image.copyTo(_image); + + // Resize image as requested + if(store_at_scale != 1.0) + { + cv::resize(_image, _image, cv::Size(0, 0), store_at_scale, store_at_scale); + this->_img_scale = store_at_scale; + } + + // Store ptr to cam model object + this->cam_model_ptr = cam_model_ptr; + + this->_rectified = is_rectified; + + // Get header information + _seq = image_msg_ptr->header.seq; + _stamp = image_msg_ptr->header.stamp; +} +catch(cv_bridge::Exception &e) +{ + ROS_WARN("Error converting sensor_msgs::ImageConstPtr to cv::Mat."); +} +catch( cv::Exception &e ) +{ + std::string err_msg = "Error copying cv::Mat created from ROS image message to the cv::Mat stored in the Camera Frame Object: " + + std::string(e.what()); + ROS_WARN(err_msg.c_str()); + std::cout << "exception caught: " << err_msg << std::endl; +} +catch(const std::exception &e) +{ + ROS_WARN(e.what()); +} + +/////////////////////////////////////////////////////////////////////////////////////////////////// +// Helper Functions /////////////////////////////////////////////////////////////////////////////// +/////////////////////////////////////////////////////////////////////////////////////////////////// + +std::string __getROSImgEncoding(const sensor_msgs::ImageConstPtr &image_msg_ptr) +{ + return image_msg_ptr->encoding; +} + +} // namespace nav + diff --git a/perception/navigator_vision/include/navigator_vision_lib/camera_frame_sequence.hpp b/perception/navigator_vision/include/navigator_vision_lib/camera_frame_sequence.hpp new file mode 100644 index 00000000..0e077b02 --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/camera_frame_sequence.hpp @@ -0,0 +1,118 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +namespace nav +{ + +template, + typename time_t_ = ros::Time, + typename img_scalar_t = uint8_t, + typename float_t = float> +class CameraFrameSequence +{ +/* + This class is used to store and operate on camera frame sequences (videos). It contains methods + to make it more convenient to create visual algorithms that take into account temporal + information. + It also has convenient constructors to construct these sequences by subscribing to + ROS camera topics, by taking in frames from a v4l camera or by, loading frames from a saved + video file. + It is also possible to draw on contained camera frames as a batch. +*/ + +// Type Alises +using CircularBuffer = boost::circular_buffer>>; + +public: + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Constructors and Destructors /////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + // Default Constructor + CameraFrameSequence(size_t capacity) : _frame_ptr_circular_buffer(capacity) + { + } + + // TODO: implement copy and move constructors + + ~CameraFrameSequence() + { + } + + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Public Methods ///////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + virtual typename CircularBuffer::iterator begin() + { + return _frame_ptr_circular_buffer.begin(); + } + + virtual typename CircularBuffer::iterator end() + { + return _frame_ptr_circular_buffer.end(); + } + + size_t length() const + { + return _frame_ptr_circular_buffer.size(); + } + + // Accessors + + // Returns a copy of the CameraFrame taken closest to the given time + virtual boost::shared_ptr const> + getFrameFromTime(time_t_) = 0; + + // Returns reference to the nth frame from the most recent. For example frame_sequence[0] is + // the most recent frame, frame_sequence[-1] is the oldest frame, and frame_sequence[-2] is + // the second oldest frame + virtual boost::shared_ptr const> + operator[](int) = 0; + + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Public Members ///////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + +protected: + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Protected Methods ////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + virtual void _addFrame(boost::shared_ptr> &new_frame_ptr) = 0; + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Private Members //////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + // Container for all included CameraFrame objects + CircularBuffer _frame_ptr_circular_buffer; + + // cv::Ptr to a shared CameraInfo object for all frames in the sequence. If null, it indicates + // that the Frames have different CameraInfo objects which should be examined individually + cam_model_ptr_t _cam_model_ptr; + + // Shared CameraFrame properties + int COLS = -1; + int ROWS = -1; + nav::PixelType TYPE = nav::PixelType::_UNKNOWN; + + bool CONST_CAM_GEOMETRY = false; + bool KNOWN_CAM_GEOMETRY = false; + + time_t_ _start_time {}; + time_t_ _end_time {}; +}; + +} // namespace nav \ No newline at end of file diff --git a/perception/navigator_vision/include/navigator_vision_lib/camera_model.hpp b/perception/navigator_vision/include/navigator_vision_lib/camera_model.hpp new file mode 100644 index 00000000..329430b4 --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/camera_model.hpp @@ -0,0 +1,62 @@ +#pragma once + +#include +#include + +namespace nav{ + +template +class CameraModel{ +public: + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Constructors and Destructors /////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + CameraModel() + { + } + + ~CameraModel() + { + } + +private: + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Private Methods //////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + Eigen::Matrix get_projection_matrix(); + + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Private Members //////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + int ROWS = 0; + int COLS = 0; + + // Distortion model (only the plum-bob model is currently supported) + T D[] = {0, 0, 0, 0, 0}; + + // Camera Geometry + Eigen::Matrix K; // Camera intrinsics + Eigen::Matrix C; // Center of projection in world frame + Eigen::Matrix R; // Orientation of camera frame relative to world frame + + +}; + + +template +Eigen::Matrix CameraModel::get_projection_matrix() +{ + Eigen::Matrix v; + Eigen::DiagonalMatrix I = v.asDiagonal(); + Eigen::Matrix aug; + aug.block(0, 0, 2, 2) = I; + aug. col(2) = C; + return K * R * aug; +} + +} // namespace nav \ No newline at end of file diff --git a/perception/navigator_vision/include/navigator_vision_lib/cv_tools.hpp b/perception/navigator_vision/include/navigator_vision_lib/cv_tools.hpp new file mode 100644 index 00000000..f6669481 --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/cv_tools.hpp @@ -0,0 +1,177 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include + +#include + +#include +#include +#include +#include + +// #define SEGMENTATION_DEBUG + +namespace nav { + +/// UTILS + +template +bool pseudoInverse( + const _Matrix_Type_ &a, _Matrix_Type_ &result, + double epsilon = + std::numeric_limits::epsilon()); + +typedef std::vector Contour; + +// Compute the centroid of an OpenCV contour (Not templated) +cv::Point contour_centroid(Contour &contour); + +// Used as a comparison function for std::sort(), sorting in order of decreasing +// perimeters +// returns true if the contourArea(c1) > contourArea(c2) +bool larger_contour(const Contour &c1, const Contour &c2); + +// Filter a histogram of type cv::MatND generated by cv::calcHist using a +// gaussian kernel +cv::MatND smooth_histogram(const cv::MatND &histogram, + size_t filter_kernel_size = 3, float sigma = 1.0); + +// Generate a one-dimensional gaussian kernel given a kernel size and it's +// standard deviation +// (sigma) +std::vector generate_gaussian_kernel_1D(size_t kernel_size = 3, + float sigma = 1.0); + +// Finds positive local maxima greater than (global maximum * thresh_multiplier) +std::vector find_local_maxima(const cv::MatND &histogram, + float thresh_multiplier); + +// Finds negative local minima less than (global minimum * thresh_multiplier) +std::vector find_local_minima(const cv::MatND &histogram, + float thresh_multiplier); + +// Selects the mode of a multi-modal distribution closest to a given target +// value +unsigned int select_hist_mode(std::vector &histogram_modes, + unsigned int target); + +// Takes in a grayscale image and segments out a semi-homogenous foreground +// object with pixel intensities close to . Tuning of last three +// parameters may imrove results but default values should work well in +// most cases. +void statistical_image_segmentation(const cv::Mat &src, cv::Mat &dest, + cv::Mat &debug_img, const int hist_size, + const float **ranges, const int target, + std::string image_name = "Unnamed Image", + bool ret_dbg_img = false, + const float sigma = 1.5, + const float low_thresh_gain = 0.5, + const float high_thresh_gain = 0.5); + +cv::Mat triangulate_Linear_LS(cv::Mat mat_P_l, cv::Mat mat_P_r, + cv::Mat undistorted_l, cv::Mat undistorted_r); + +Eigen::Vector3d kanatani_triangulation(const cv::Point2d &pt1, + const cv::Point2d &pt2, + const Eigen::Matrix3d &essential, + const Eigen::Matrix3d &R); + +Eigen::Vector3d lindstrom_triangulation(const cv::Point2d &pt1, + const cv::Point2d &pt2, + const Eigen::Matrix3d &essential, + const Eigen::Matrix3d &R); + +struct ImageWithCameraInfo { + /** + Packages corresponding sensor_msgs::ImageConstPtr and + sensor_msgs::CameraInfoConstPtr + info_msg + into one object. Containers of these objects can be sorted by their + image_time attribute + */ +public: + ImageWithCameraInfo() {} + ImageWithCameraInfo(sensor_msgs::ImageConstPtr _image_msg_ptr, + sensor_msgs::CameraInfoConstPtr _info_msg_ptr); + sensor_msgs::ImageConstPtr image_msg_ptr; + sensor_msgs::CameraInfoConstPtr info_msg_ptr; + ros::Time image_time; + bool operator<(const ImageWithCameraInfo &right) const { + return this->image_time < right.image_time; + } +}; + +class FrameHistory { + /** + Object that subscribes itself to an image topic and stores up to a + user defined + number of ImageWithCameraInfo objects. The frame history can then be + retrieved + in whole or just a portion. + */ +public: + FrameHistory(std::string img_topic, unsigned int hist_size); + ~FrameHistory(); + void image_callback(const sensor_msgs::ImageConstPtr &image_msg, + const sensor_msgs::CameraInfoConstPtr &info_msg); + std::vector + get_frame_history(unsigned int frames_requested); + int frames_available(); + + const std::string topic_name; + const size_t history_size; + +private: + ros::NodeHandle nh; + image_transport::CameraSubscriber _image_sub; + image_transport::ImageTransport _image_transport; + std::vector _frame_history_ring_buffer; + size_t frame_count; +}; + +/// Param Helpers + +struct Range { + cv::Scalar lower; + cv::Scalar upper; +}; + +void range_from_param(std::string ¶m_root, Range &range); + +void inParamRange(cv::Mat &src, Range &range, cv::Mat &dest); + +/// Templated pseudoinverse function implementation +template +bool pseudoInverse( + const _Matrix_Type_ &a, _Matrix_Type_ &result, + double epsilon = + std::numeric_limits::epsilon()) { + if (a.rows() < a.cols()) + return false; + + Eigen::JacobiSVD<_Matrix_Type_> svd = a.jacobiSvd(); + + typename _Matrix_Type_::Scalar tolerance = + epsilon * std::max(a.cols(), a.rows()) * + svd.singularValues().array().abs().maxCoeff(); + + result = svd.matrixV() * + _Matrix_Type_( + _Matrix_Type_((svd.singularValues().array().abs() > tolerance) + .select(svd.singularValues().array().inverse(), + 0)).diagonal()) * + svd.matrixU().adjoint(); +} +} // namespace sub diff --git a/perception/navigator_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp b/perception/navigator_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp new file mode 100644 index 00000000..12558c2b --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp @@ -0,0 +1,70 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace nav{ + +class PcdSubPubAlgorithm{ + /* + virtual base class for algorithms that subscribe to point cloud ROS topics, + operate on the clouds and publish output clouds to a different topic + */ + typedef sensor_msgs::PointCloud2 PCD; + +public: + PcdSubPubAlgorithm(){}; + ~PcdSubPubAlgorithm(){}; + +protected: + virtual void cloud_cb(const PCD &cloud_msg ) = 0; // runs algorithm pipeline when a msg is received + PCD input_pcd; + PCD output_pcd; + ros::NodeHandle nh; + ros::Subscriber cloud_sub; + ros::Publisher cloud_pub; + std::string input_pcd_topic; + std::string output_pcd_topic; +}; + + +class PcdColorizer : public PcdSubPubAlgorithm{ + /* + This class takes adds color information to XYZ only point clouds if the + points in the cloud can be observed by a camera that takes color images + Note: needs accurate TF because it uses the ros tf package to transform + arbitrary point cloud frames into the frame of the color pinhole camera + Note: the rgb camera topic should be streaming rectified images + */ + + typedef sensor_msgs::PointCloud2 PCD; + +public: + PcdColorizer(ros::NodeHandle nh, std::string input_pcd_topic, std::string output_pcd_topic, std::string rgb_cam_topic, std::string rgb_cam_frame); + ~PcdColorizer(){} + void _transform_to_cam(); + void _color_pcd(); + +private: + void cloud_cb(const sensor_msgs::PointCloud2 &cloud_msg); + std::string rgb_cam_frame; + std::string rgb_cam_topic; + tf::TransformListener tf_listener; + PCD transformed_pcd; // input pcd transformed to the frame of the rgb camera + image_transport::ImageTransport img_transport {nh}; + image_transport::CameraSubscriber rgb_cam_sub; + + int seq = 0; + + Eigen::Matrix3f cam_intrinsics; + bool _intrinsics_set = false; + sensor_msgs::ImageConstPtr latest_frame_img_msg; + sensor_msgs::CameraInfoConstPtr latest_frame_info_msg; +}; + +} // namwspace nav \ No newline at end of file diff --git a/perception/navigator_vision/include/navigator_vision_lib/ros_camera_stream.hpp b/perception/navigator_vision/include/navigator_vision_lib/ros_camera_stream.hpp new file mode 100644 index 00000000..39fb631f --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/ros_camera_stream.hpp @@ -0,0 +1,267 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// DBG +#include + + +namespace nav +{ + +template +class ROSCameraStream : public CameraFrameSequence, ros::Time, img_scalar_t, float_t> +{ + +// Type Aliases +using CamFrame = CameraFrame, ros::Time, img_scalar_t, float_t>; +using CamFramePtr = boost::shared_ptr, ros::Time, img_scalar_t, float_t>>; +using CamFrameConstPtr = boost::shared_ptr, ros::Time, img_scalar_t, float_t> const>; +using CamFrameSequence = CameraFrameSequence, ros::Time, img_scalar_t, float_t>; + + +public: + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Constructors and Destructors /////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + // Default Constructor + ROSCameraStream(ros::NodeHandle nh, size_t buffer_size) + : CamFrameSequence(buffer_size), + _it(nh) + { + this->_nh = nh; + this->_capacity = buffer_size; + } + + ~ROSCameraStream(); + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Public Methods ///////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + bool init(std::string camera_topic); + + CamFrameConstPtr getFrameFromTime(ros::Time desired_time); + + CamFrameConstPtr operator[](int i); + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Public Members ///////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + +private: + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Private Methods //////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + void _addFrame(CamFramePtr &new_frame_ptr); + + void _newFrameCb(const sensor_msgs::ImageConstPtr &image_msg_ptr); + + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Private Members //////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + // Initialization flag + bool _initialized = false; + + // Mutex for multithreaded to camera frame data + std::mutex _mtx; + + // ROS node handle + ros::NodeHandle _nh; + + // ROS image transportg + image_transport::ImageTransport _it; + + // Custom ROS Callback Queue + ros::CallbackQueue _cb_queue; + + // Flexible topic subscription with potential for filtering and chaining + message_filters::Subscriber img_sub; + + // ROS spinner to handle callbacks in a background thread + ros::AsyncSpinner async_spinner {1, &_cb_queue}; + + // The maximum amount of frames that this object will hold at one time + size_t _capacity = -1; + + // The ros topic we will subscribe to + std::string _img_topic = "uninitialized"; +}; + +/////////////////////////////////////////////////////////////////////////////////////////////////// +////// Templated function implementations ///////////////////////////////////////////////////////// +/////////////////////////////////////////////////////////////////////////////////////////////////// + +// Constructor for when there is an image msg being published w/ a corresponding camera info msg +template +bool ROSCameraStream::init(std::string camera_topic) +{ + this->_img_topic = camera_topic; + bool success = false; + image_transport::CameraSubscriber cam_sub; + + // subscribes to image msg and camera info and initializes CameraModel Object then disconnets subscriber + auto init_lambda = + [&](const sensor_msgs::ImageConstPtr &image_msg_ptr, const sensor_msgs::CameraInfoConstPtr &info_msg_ptr) mutable + { + ROS_WARN("Initializing ROSCameraStream object."); + // Set ptr to camera model object + boost::shared_ptr camera_model_ptr{new image_geometry::PinholeCameraModel()}; + camera_model_ptr->fromCameraInfo(info_msg_ptr); + this->_cam_model_ptr = camera_model_ptr; + + // Set metadata attributes + this->ROWS = image_msg_ptr->height; + this->COLS = image_msg_ptr->width; + this->CONST_CAM_GEOMETRY = true; + this->KNOWN_CAM_GEOMETRY = true; + success = true; + cam_sub.shutdown(); + ROS_WARN("ROSCameraStream object initialized."); + return; + }; + + // Subscribe to both camera topic and camera info topic + cam_sub = _it.subscribeCamera(camera_topic, 100, init_lambda); + + // The amount of time that we will try to wait for a cb from cam_sub + ros::Duration timeout{5, 0}; + auto sub_start = ros::Time::now(); + + // Loop to check if we get callbacks from cam_sub + ros::Rate rate{10}; // 10 HZ so we dont spam cpu + + while(ros::Time::now() - sub_start < timeout) + { + ros::spinOnce(); + if(success) + { + // Subscribe to ROS image topic + img_sub.subscribe(_nh, this->_img_topic, 100, ros::TransportHints(), &_cb_queue); + + // Register callback to process frames published on camera img topic + auto img_cb = [this](const sensor_msgs::ImageConstPtr &image_msg_ptr){this->_newFrameCb(image_msg_ptr);}; + img_sub.registerCallback(img_cb); + + // Start listening for image messages in a background thread + async_spinner.start(); + + this->_initialized = true; + return true; + } + rate.sleep(); + } + + std::string err_msg {"timeout: failed to initialize ROSCameraStream object with camera topic '"}; + err_msg += camera_topic + "'."; + ROS_WARN(err_msg.c_str()); + this->_initialized = false; + return false; +} +// TODO: handle construction from img msg when there is no matching camera info msg + +template +typename ROSCameraStream::CamFrameConstPtr +ROSCameraStream::getFrameFromTime(ros::Time desired_time) +{ + CamFrameConstPtr closest_in_time = this->operator[](0); + double min_abs_nsec_time_diff = fabs((this->operator[](0)->stamp() - desired_time).toNSec()); + double abs_nsec_time_diff = -1; + for(CamFrameConstPtr frame_ptr : this->_frame_ptr_circular_buffer) + { + abs_nsec_time_diff = fabs((frame_ptr->stamp() - desired_time).toNSec()); + if(abs_nsec_time_diff < min_abs_nsec_time_diff) + { + closest_in_time = frame_ptr; + min_abs_nsec_time_diff = abs_nsec_time_diff; + } + } + return closest_in_time; +} + +template +typename ROSCameraStream::CamFrameConstPtr +ROSCameraStream::operator[](int i) +{ + // Prevent adding new frames while frames are being accessed + _mtx.lock(); + + // shared_ptr to a dynamically allocated reference frame object + CamFrameConstPtr shared_ptr_to_const_frame; + + try + { + if(i >= 0) // regular access for non-negative indices + { + shared_ptr_to_const_frame = this->_frame_ptr_circular_buffer[i]; + } + else{ // reverse access for negative indices (ex. [-1] refers to the last element) + size_t past_last_idx = this->_frame_ptr_circular_buffer.end() - this->_frame_ptr_circular_buffer.begin(); + shared_ptr_to_const_frame = this->_frame_ptr_circular_buffer[past_last_idx + i]; + } + } + catch(std::exception &e) + { + std::string err_msg = "The circular buffer index you are trying to acess is out of bounds:\n" + std::string(e.what()); + ROS_WARN(err_msg.c_str()); + } + + + + _mtx.unlock(); + return shared_ptr_to_const_frame; +} + +template +void +ROSCameraStream::_addFrame(CamFramePtr &new_frame_ptr) +{ + // Prevent accessing frames while new frames are being added + _mtx.lock(); + this->_frame_ptr_circular_buffer.push_front(new_frame_ptr); + _mtx.unlock(); +} + +template +void ROSCameraStream::_newFrameCb(const sensor_msgs::ImageConstPtr &image_msg_ptr) +{ + // Check if the topic name contains the string "rect" + bool rectified = this->_img_topic.find(std::string("rect")) != std::string::npos; + + // Create shared pointer to dynamically allocated Camera Frame object constructed from ros img msg + boost::shared_ptr, ros::Time, img_scalar_t, float_t>> new_frame_ptr + {new CameraFrame, ros::Time, img_scalar_t, float_t> + (image_msg_ptr, this->_cam_model_ptr, rectified, 1.0)}; + + // Add shared pointer to CameraFrame object to the circular buffer + _addFrame(new_frame_ptr); + + // Update the bounding timestamps for the frame sequence + this->_start_time = image_msg_ptr->header.stamp; + this->_end_time = this->_frame_ptr_circular_buffer.back()->stamp(); +} + +template +ROSCameraStream::~ROSCameraStream() +{ + img_sub.unsubscribe(); + _cb_queue.clear(); + _cb_queue.disable(); +} + +} // namespace nav \ No newline at end of file diff --git a/perception/navigator_vision/include/navigator_vision_lib/visualization.hpp b/perception/navigator_vision/include/navigator_vision_lib/visualization.hpp new file mode 100644 index 00000000..d3c420c0 --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/visualization.hpp @@ -0,0 +1,24 @@ +#pragma once + +// #include "geometry.hpp" +// #include "segmentation.hpp" +// #include "typedefs.hpp" +#include +#include +#include "ros/ros.h" +#include "geometry_msgs/Pose.h" + +namespace nav { + +// ******* 3D Visualization ******* +class RvizVisualizer { + public: + ros::Publisher rviz_pub; + ros::NodeHandle nh; + RvizVisualizer(std::string rviz_topic); + void visualize_buoy(geometry_msgs::Pose &pose, std::string &frame_id); + void visualize_torpedo_board(geometry_msgs::Pose& pose, Eigen::Quaterniond orientation, + std::vector& targets, std::vector& corners3d, + std::string& frame_id); +}; +} diff --git a/perception/navigator_vision/nodes/camera_stream_demo.cpp b/perception/navigator_vision/nodes/camera_stream_demo.cpp new file mode 100644 index 00000000..b7ca25c2 --- /dev/null +++ b/perception/navigator_vision/nodes/camera_stream_demo.cpp @@ -0,0 +1,39 @@ +#include +#include +#include +#include // dont forget this include for camera stream functionality + +using namespace std; + +int main(int argc, char **argv) +{ + // Node setup + string cam_topic {"stereo/left/image_rect_color"}; + size_t history_length{200}; + ros::init(argc, argv, "camera_stream_demo"); + ros::NodeHandle nh; + + // These two lines are all you need to create a self updating buffer + // of images published to an image topic. + // Template argument should be cv::Vec3b for color images or uint8_t + // For grayscale images + nav::ROSCameraStream left_cam_stream(nh, history_length); + left_cam_stream.init(cam_topic); + + // Display most recent and oldest frame in the buffer + while(cv::waitKey(100) == -1) + { + size_t size = left_cam_stream.length(); + if(size == 200) + { + cv::imshow("newest_frame", left_cam_stream[0]->image()); + cv::imshow("oldest_frame", left_cam_stream[-1]->image()); + + // It is also possible to request a frame taken at a specific time + /* + Mat frame_at_time_t = left_cam_stream.getFrameFromTime(desired_time)->image(); + */ + } + } + +} diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp new file mode 100644 index 00000000..7d7ed915 --- /dev/null +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp @@ -0,0 +1,199 @@ +#include "CameraLidarTransformer.hpp" + +CameraLidarTransformer::CameraLidarTransformer() + : nh("camera_lidar_transformer"), + tfBuffer(), + tfListener(tfBuffer, nh), + lidarSub(nh, "/velodyne_points", 10), + lidarCache(lidarSub, 10) +#ifdef DO_ROS_DEBUG + , + image_transport(nh) +#endif +{ +#ifdef DO_ROS_DEBUG + points_debug_publisher = + image_transport.advertise("/camera_lidar_transformer/points_debug", 1); + pubMarkers = nh.advertise( + "/dock_shapes/raylider/markers", 10); +#endif + nh.param("camera_info_topic",camera_info_topic,"/right/right/camera_info"); + // ~nh.param("MIN_Z",MIN_Z,1); + // ~nh.param("MAX_Z_ERROR",MAX_Z_ERROR,0.2); + std::cout << "Constructed" << std::endl; + cameraInfoSub = + nh.subscribe(camera_info_topic, 1, + &CameraLidarTransformer::cameraInfoCallback, this); + transformServiceServer = nh.advertiseService( + "transform_camera", &CameraLidarTransformer::transformServiceCallback, + this); +} +void CameraLidarTransformer::cameraInfoCallback(sensor_msgs::CameraInfo info) +{ + camera_info = info; +} +bool CameraLidarTransformer::transformServiceCallback( + navigator_msgs::CameraToLidarTransform::Request &req, + navigator_msgs::CameraToLidarTransform::Response &res) { + visualization_msgs::MarkerArray markers; + sensor_msgs::PointCloud2ConstPtr scloud = + lidarCache.getElemAfterTime(req.header.stamp); + if (!scloud) { + res.success = false; + res.error = navigator_msgs::CameraToLidarTransform::Response::CLOUD_NOT_FOUND; + return true; + } + geometry_msgs::TransformStamped transform = + tfBuffer.lookupTransform(req.header.frame_id, "velodyne", req.header.stamp); + sensor_msgs::PointCloud2 cloud_transformed; + tf2::doTransform(*scloud, cloud_transformed, transform); +#ifdef DO_ROS_DEBUG + cv::Mat debug_image(camera_info.height, camera_info.width, CV_8UC3, + cv::Scalar(0)); +#endif + Eigen::Matrix3d matrixFindPlaneA; + matrixFindPlaneA << 0, 0, 0, 0, 0, 0, 0, 0, 0; + Eigen::Vector3d matrixFindPlaneB(0, 0, 0); + + cv::circle(debug_image, cv::Point(req.point.x, req.point.y), 3, + cv::Scalar(255, 0, 0), 5); + //Tracks the closest lidar point to the requested camera point + double minDistance = std::numeric_limits::max(); + for (auto ii = 0, jj = 0; ii < cloud_transformed.width; + ++ii, jj += cloud_transformed.point_step) { + floatConverter x, y, z, i; + for (int kk = 0; kk < 4; ++kk) { + x.data[kk] = cloud_transformed.data[jj + kk]; + y.data[kk] = cloud_transformed.data[jj + 4 + kk]; + z.data[kk] = cloud_transformed.data[jj + 8 + kk]; + i.data[kk] = cloud_transformed.data[jj + 16 + kk]; + } + cam_model.fromCameraInfo(camera_info); + if (int(z.f) < 0 || int(z.f) > 30) continue; + cv::Point2d point = cam_model.project3dToPixel(cv::Point3d(x.f, y.f, z.f)); + + if (int(point.x) > 0 && int(point.x) < camera_info.width && + int(point.y) > 0 && int(point.y) < camera_info.height) { +#ifdef DO_ROS_DEBUG + visualization_msgs::Marker marker_point; + marker_point.header = req.header; + marker_point.header.seq = 0; + marker_point.header.frame_id = req.header.frame_id; + marker_point.id = (int)ii; + marker_point.type = visualization_msgs::Marker::CUBE; + marker_point.pose.position.x = x.f; + marker_point.pose.position.y = y.f; + marker_point.pose.position.z = z.f; + marker_point.scale.x = 1; + marker_point.scale.y = 0.1; + marker_point.scale.z = 0.1; + marker_point.color.a = 1.0; + marker_point.color.r = 0.0; + marker_point.color.g = 1.0; + marker_point.color.b = 0.0; + //marker_point.lifetime = ros::Duration(0.5); + markers.markers.push_back(marker_point); + + // Draw + if (int(point.x - 20) > 0 && int(point.x + 20) < camera_info.width && + int(point.y - 20) > 0 && int(point.y + 20) < camera_info.height) { + cv::circle(debug_image, point, 3, + cv::Scalar(0, 0, 255 * std::min(1.0, z.f / 20.0)), -1); + } +#endif + double distance = sqrt(pow(point.x - req.point.x ,2) + pow(point.y - req.point.y,2) ); //Distance (2D) from request point to projected lidar point + if (distance < req.tolerance) { +#ifdef DO_ROS_DEBUG + if (int(point.x - 20) > 0 && int(point.x + 20) < camera_info.width && + int(point.y - 20) > 0 && int(point.y + 20) < camera_info.height) { + cv::circle(debug_image, point, 3, cv::Scalar(0, 255, 0), -1); + } +#endif + matrixFindPlaneA(0, 0) += x.f * x.f; + matrixFindPlaneA(1, 0) += y.f * x.f; + matrixFindPlaneA(2, 0) += x.f; + matrixFindPlaneA(0, 1) += x.f * y.f; + matrixFindPlaneA(1, 1) += y.f * y.f; + matrixFindPlaneA(2, 1) += y.f; + matrixFindPlaneA(0, 2) += x.f; + matrixFindPlaneA(1, 2) += y.f; + matrixFindPlaneA(2, 2) += 1; + matrixFindPlaneB(0, 0) -= x.f * z.f; + matrixFindPlaneB(1, 0) -= y.f * z.f; + matrixFindPlaneB(2, 0) -= z.f; + + geometry_msgs::Point geo_point; + geo_point.x = x.f; + geo_point.y = y.f; + geo_point.z = z.f; + if (distance < minDistance) + { + res.closest = geo_point; + minDistance = distance; + } + res.transformed.push_back(geo_point); + } + } + } + if (res.transformed.size() < 1) { + res.success = false; + res.error = navigator_msgs::CameraToLidarTransform::Response::NO_POINTS_FOUND; + return true; + } + + //Filter out lidar points behind the plane (I don't think this will work, no good way to do this) + // ~double min_z = (*std::min_element(res.transformed.begin(),res.transformed.end(), [=] (const geometry_msgs::Point& a,const geometry_msgs::Point& b) { + // ~return a.z < b.z && a.z > MIN_Z; + // ~})).z; + // ~res.transformed.erase(std::remove_if(res.transformed.begin(),res.transformed.end(),[min_z] (const geometry_msgs::Point& p) { + // ~return (p.z - min_z) < MAX_Z_ERROR; + // ~}),res.transformed.end()); + + res.distance = res.closest.z; + Eigen::Vector3d normalvec = matrixFindPlaneA.colPivHouseholderQr() + .solve(matrixFindPlaneB) + .normalized(); + geometry_msgs::Vector3 normalvec_ros; + normalvec_ros.x = normalvec(0, 0); + normalvec_ros.y = normalvec(1, 0); + normalvec_ros.z = normalvec(2, 0); + res.normal = normalvec_ros; + std::cout << "Plane solution: " << normalvec << std::endl; + +#ifdef DO_ROS_DEBUG + //Create marker for normal + visualization_msgs::Marker marker_normal; + marker_normal.header = req.header; + marker_normal.header.seq = 0; + marker_normal.header.frame_id = "right_right_cam"; + marker_normal.id = 3000; + marker_normal.type = visualization_msgs::Marker::ARROW; + + geometry_msgs::Point sdp_normalvec_ros; + sdp_normalvec_ros.x = normalvec_ros.x + res.transformed[0].x; + sdp_normalvec_ros.y = normalvec_ros.y + res.transformed[0].y; + sdp_normalvec_ros.z = normalvec_ros.z + res.transformed[0].z; + marker_normal.points.push_back(res.transformed[0]); + marker_normal.points.push_back(sdp_normalvec_ros); + + marker_normal.scale.x = 0.1; + marker_normal.scale.y = 0.5; + marker_normal.scale.z = 0.5; + marker_normal.color.a = 1.0; + marker_normal.color.r = 1.0; + marker_normal.color.g = 0.0; + marker_normal.color.b = 0.0; + markers.markers.push_back(marker_normal); + + //Publish 3D debug market + pubMarkers.publish(markers); + + //Publish debug image + cv_bridge::CvImage ros_debug_image; + ros_debug_image.encoding = "bgr8"; + ros_debug_image.image = debug_image.clone(); + points_debug_publisher.publish(ros_debug_image.toImageMsg()); +#endif + res.success = true; + return true; +} diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp new file mode 100644 index 00000000..8e49de89 --- /dev/null +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp @@ -0,0 +1,69 @@ +#pragma once +#include +// PCL specific includes +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define DO_ROS_DEBUG +#ifdef DO_ROS_DEBUG +#include "opencv2/opencv.hpp" +#include +#include +#include +#endif + +class CameraLidarTransformer +{ + private: + union floatConverter + { + float f; + struct + { + uint8_t data[4]; + }; + }; + std::string camera_info_topic; + // ~double MIN_Z,MAX_Z_ERROR; + ros::NodeHandle nh; + ros::ServiceServer transformServiceServer; + tf2_ros::Buffer tfBuffer; + tf2_ros::TransformListener tfListener; + message_filters::Subscriber lidarSub; + message_filters::Cache lidarCache; + ros::Subscriber cameraInfoSub; + image_geometry::PinholeCameraModel cam_model; + sensor_msgs::CameraInfo camera_info; + ros::Publisher pubMarkers; + void cameraInfoCallback(const sensor_msgs::CameraInfo info); + bool transformServiceCallback(navigator_msgs::CameraToLidarTransform::Request &req,navigator_msgs::CameraToLidarTransform::Response &res); +#ifdef DO_ROS_DEBUG + image_transport::ImageTransport image_transport; + image_transport::Publisher points_debug_publisher; +#endif + public: + CameraLidarTransformer(); +}; diff --git a/perception/navigator_vision/nodes/camera_to_lidar/main.cpp b/perception/navigator_vision/nodes/camera_to_lidar/main.cpp new file mode 100644 index 00000000..ff70110c --- /dev/null +++ b/perception/navigator_vision/nodes/camera_to_lidar/main.cpp @@ -0,0 +1,9 @@ +#include +#include "CameraLidarTransformer.hpp" + +int main (int argc, char** argv) +{ + ros::init (argc, argv, "camera_lidar_transformer"); + CameraLidarTransformer transformer; + ros::spin (); +} diff --git a/perception/navigator_vision/nodes/model_detector.py b/perception/navigator_vision/nodes/model_detector.py new file mode 100755 index 00000000..d5b1b838 --- /dev/null +++ b/perception/navigator_vision/nodes/model_detector.py @@ -0,0 +1,314 @@ +#!/usr/bin/env python +import roslib +import sys +import rospy +import cv2 +from sensor_msgs.msg import Image +from cv_bridge import CvBridge, CvBridgeError +import numpy as np +from scan_the_code_lib.model_tracker import ModelTracker +from navigator_msgs.srv import ScanTheCodeMission, ScanTheCodeMissionResponse + + +class image_converter: + + def __init__(self): + + self.bridge = CvBridge() + self.image_sub = rospy.Subscriber("/stereo/left/image_raw", Image, self.callback, queue_size=10) + self.fgbg = cv2.BackgroundSubtractorMOG() + self.model_tracker = ModelTracker() + self.mission_complete = False + self.colors = [] + self.on = False + + def get_foreground_mask(self, cv_image): + fgmask = self.fgbg.apply(cv_image) + + # DEBUG + cv2.imshow('mask', fgmask) + cv2.waitKey(33) + + return fgmask + + def get_contours(self, mask): + contours, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) + # moments = [] + # for i, val in enumerate(contours): + # M = cv2.moments(val) + # if M["m00"] != 0: + # cX = int(M["m10"] / M["m00"]) + # cY = int(M["m01"] / M["m00"]) + # else: + # cX, cY = 0, 0 + # moments.append([cX,cY]) + + return contours, hierarchy + + def get_best_contours(self, contours): + # Calculate the area of each contour + areas = [] + for i, val in enumerate(contours): + a = cv2.contourArea(val, 0) + areas.append([i, a]) + + # Choose the six biggest + areas = sorted(areas, key=lambda x: x[1], reverse=True) + areas = areas[:10] + + # Get the moments that correspond to the biggest areas + # my_moments = [[x[0], moments[x[0]]] for x in areas] + + # # calculate the average of these moment locations + # avgx = sum(i[1][0] for i in my_moments)/len(my_moments) + # avgy = sum(i[1][1] for i in my_moments)/len(my_moments) + # maxs = [] + + # #print avgx, avgy + # for i, m in enumerate(my_moments): + # # Get the euclidian distance of the moment to the average + # xd = m[1][0] - avgx + # yd = m[1][1] - avgy + # maxs.append([m[0],np.linalg.norm([xd,yd])]) + + # # Take the 4 closest to the average + # maxs = sorted(maxs, key=lambda x: x[1]) + # maxs = maxs[:4] + + # Get the contours that correspond to those moments + + my_contours = [contours[x[0]] for x in areas] + + # print len(my_contours) + + return my_contours + + def get_bounding_rect(self, contours): + xmin = 1000 + xmax = 0 + ymin = 1000 + ymax = 0 + for i, cont in enumerate(contours): + for j, _val in enumerate(cont): + if(_val[0][0] < xmin): + xmin = _val[0][0] + if(_val[0][0] > xmax): + xmax = _val[0][0] + if(_val[0][1] < ymin): + ymin = _val[0][1] + if(_val[0][1] > ymax): + ymax = _val[0][1] + # print xmin, ymin, xmax, ymax + + return xmin, ymin, xmax, ymax + + def get_non_furthest_points(self, i, points): + dist_points = [] + for ind, val in enumerate(points): + if(ind != i): + diff = np.subtract(points[i], val) + dist_points.append([np.linalg.norm(diff), points[ind]]) + + dist_points = sorted(dist_points, key=lambda x: x[0]) + + return dist_points[0][1], dist_points[1][1] + + def geometry_test(self, fp): + if(len(fp) < 4): + return False + for i, p in enumerate(fp): + p1, p2 = self.get_non_furthest_points(i, fp) + diff1 = np.subtract(p1, p)[0] + diff2 = np.subtract(p2, p)[0] + diff1 /= np.linalg.norm(diff1) + diff2 /= np.linalg.norm(diff2) + + val = abs(np.dot(diff1, diff2)) + if(val > .05): + return False + + return True + + def get_salient_points(self, img, rect): + if(rect[1] == rect[3] or rect[0] == rect[2]): + return + roi = img[rect[1]:rect[3], rect[0]:rect[2]] + gray = cv2.cvtColor(roi, cv2.COLOR_BGR2GRAY) + fp = cv2.goodFeaturesToTrack(gray, 35, .05, 10.0) # [, corners[, mask[, blockSize[, useHarrisDetector[, k]]]]] + if(fp is None): + return + # DEBUG + img_clone = gray.copy() + for i, val in enumerate(fp): + cv2.circle(img_clone, tuple(val[0]), 3, (255, 255, 255), -1) + + cv2.imshow("salient_points", img_clone) + cv2.waitKey(33) + + def get_rectangle(self, img, img_area, xmin_a, ymin_a): + gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) + bf = cv2.bilateralFilter(gray, 11, 13, 13) + edged = cv2.Canny(gray, 30, 200) + drawimg = edged.copy() + largest = edged.copy() + blah = edged.copy() + contours, hierarchy = cv2.findContours(blah, 1, 2) + + drawimg_p = gray.copy() + + h, w, c = img.shape + + max_i = -1 + max_ar = 0 + for i, cnt in enumerate(contours): + approx = cv2.approxPolyDP(cnt, 0.01 * cv2.arcLength(cnt, True), True) + if(cv2.contourArea(cnt, 0) > 1): + # if(len(approx) >= 3 and len(approx) <= 5 or len(approx) == 9 or len(approx) == 15): + cv2.drawContours(blah, [cnt], 0, 255, -1) + ar = cv2.contourArea(cnt, 0) + # print len(approx), "len" + # print ar, img_area + cv2.imshow("rectangles", blah) + cv2.waitKey(0) + + if(ar > max_ar): + max_ar = ar + max_i = i + + # print max_ar + cv2.drawContours(largest, contours, max_i, (255, 0, 0), -1, 8, hierarchy) + cv2.imshow("largest", largest) + cv2.waitKey(33) + + if(max_ar > 100): + xmin, ymin, xmax, ymax = self.get_bounding_rect([contours[max_i]]) + smear = 5 + xmin -= smear + ymin -= smear + xmax += smear + ymax += smear + if(xmin < 0): + xmin = 0 + if(ymin < 0): + ymin = 0 + if(xmax > w): + xmax = w + if(ymax > h): + ymax = h + fp = cv2.goodFeaturesToTrack(gray[ymin:ymax, xmin:xmax], 4, .05, 10.0) + drawimgpp = gray[ymin:ymax, xmin:xmax].copy() + for i, val in enumerate(fp): + cv2.circle(drawimgpp, tuple(val[0]), 3, (255, 255, 255), -1) + cv2.imshow("salient_points", drawimgpp) + cv2.waitKey(33) + + keep = self.geometry_test(fp) + + if(keep): + for v in fp: + v = v[0] + cv2.waitKey(33) + + v[0] += xmin_a + xmin + + v[1] += ymin_a + ymin + return fp + + return None + + def callback(self, data): + if(not self.on): + return + try: + cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8") + except CvBridgeError as e: + print(e) + + if(self.mission_complete): + return + + # print cv_image.shape + + img_clone = cv_image.copy() + img_clone1 = cv_image.copy() + gray = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY) + + # Get the background mask + mask = self.get_foreground_mask(cv_image) + + # Get the contours of the image + contours, hierarchy = self.get_contours(mask) + + if(len(contours) == 0): + return + + # Pick the good contours from this list (biggest area) + best_contours = self.get_best_contours(contours) + + # Get the bounding box to those contours + xmin, ymin, xmax, ymax = self.get_bounding_rect(best_contours) + # print xmin, ymin, xmax, ymax + + # DEBUG + cv2.rectangle(img_clone, (xmin, ymin), (xmax, ymax), (0, 255, 0), 3) + cv2.imshow("bounding_rect", img_clone) + cv2.waitKey(33) + + # Get rectangle shapes in image + + h, w, r = cv_image.shape + + self.mission_complete = self.model_tracker.track_models(cv_image) + + if(self.mission_complete): + print "MISSION COMPLETE" + + rect = self.get_rectangle(cv_image[ymin:ymax, xmin:xmax], w * h, xmin, ymin) + + if(rect is not None): + self.model_tracker.register_model(rect, cv_image) + + if(rect is not None): + for i, val in enumerate(rect): + cv2.circle(img_clone1, tuple(val[0]), 3, (255, 255, 255), -1) + cv2.imshow("model", img_clone1) + cv2.waitKey(33) + + def mission_status(self, req): + obs = False + colors = None + if(len(self.model_tracker.models) > 0): + obs = True + if(self.mission_complete): + colors = self.model_tracker.colors + found = self.mission_complete + return ScanTheCodeMissionResponse(obs, found, colors) + + def activate(self, req): + if(self.on): + del self.fgbg + del self.model_tracker + self.fgbg = cv2.BackgroundSubtractorMOG() + self.model_tracker = ModelTracker() + self.mission_complete = False + self.colors = [] + self.on = False + if(not self.on): + self.on = True + return ScanTheCodeMissionResponse(None, None, None) + + +def main(args): + ic = image_converter() + rospy.init_node('model_detector', anonymous=True) + status = rospy.Service('/vision/scan_the_code_status', ScanTheCodeMission, ic.mission_status) + activate = rospy.Service('/vision/scan_the_code_activate', ScanTheCodeMission, ic.activate) + + try: + rospy.spin() + except KeyboardInterrupt: + print("Shutting down") + cv2.destroyAllWindows() + +if __name__ == '__main__': + main(sys.argv) diff --git a/perception/navigator_vision/nodes/scan_the_code_lib/__init__.py b/perception/navigator_vision/nodes/scan_the_code_lib/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/perception/navigator_vision/nodes/scan_the_code_lib/model.py b/perception/navigator_vision/nodes/scan_the_code_lib/model.py new file mode 100644 index 00000000..6403e13c --- /dev/null +++ b/perception/navigator_vision/nodes/scan_the_code_lib/model.py @@ -0,0 +1,100 @@ +import sys +import cv2 +import numpy as np +import time + + +class Model: + + def __init__(self, prev_points, prev_frame, _id): + self.prev_points = prev_points + self.prev_frame = prev_frame + self.time_started = time.time() + self.turned_black = False + self.colors = [] + self.prev_color = None + self.colors_found = 0 + self.my_id = _id + self.offset = None + self.offset_sum = [0, 0, 0, 0] + self.num_offset = 0 + + def get_bounding_rect(self): + xmin = 1000 + xmax = 0 + ymin = 1000 + ymax = 0 + for i, cont in enumerate(self.prev_points): + for j, _val in enumerate(cont): + if(_val[0] < xmin): + xmin = _val[0] + if(_val[0] > xmax): + xmax = _val[0] + if(_val[1] < ymin): + ymin = _val[1] + if(_val[1] > ymax): + ymax = _val[1] + # print xmin, ymin, xmax, ymax + + return xmin, ymin, xmax, ymax + + def get_color(self, scalar): + blackness1 = scalar[0] - scalar[1] + blackness2 = scalar[1] - scalar[2] + if(abs(blackness1) < 30 and abs(blackness2) < 30): + o = [0, scalar[0] - scalar[1], scalar[0] - scalar[2], 0] + self.num_offset += 1 + self.offset_sum = np.add(o, self.offset_sum) + self.offset = np.append(self.offset_sum / self.num_offset, 0) + return 'k' + + if(self.offset is not None): + scalar = np.add(scalar, self.offset) + + a = scalar[0] + b = scalar[1] + c = scalar[2] + if(abs(a - b) < 10 and (b - c) < 70 and b > c and a > c): + return 'y' + elif(a > b and a > c): + return 'b' + elif(b > a and b > c): + return 'g' + elif(c > a and c > b): + return 'r' + else: + return 'g' + + def check_for_colors(self): + if(self.colors_found == 3): + return True, self.colors + xmin, ymin, xmax, ymax = self.get_bounding_rect() + scalar = cv2.mean(self.prev_frame[ymin:ymax, xmin:xmax]) + cv2.imshow("colros", self.prev_frame[ymin:ymax, xmin:xmax]) + cv2.waitKey(33) + color = self.get_color(scalar) + print "ID", self.my_id + print "color", color + + if(self.prev_color is not None): + changed = False + + if(color != self.prev_color): + changed = True + + if(color == 'k'): + print "turned black" + self.turned_black = True + self.colors_found = 0 + del self.colors[:] + + elif (self.turned_black and changed): + self.colors_found += 1 + self.colors.append(color) + print "changed" + print color + + print "cols", self.colors + self.prev_color = color + print "---" + return False, [] diff --git a/perception/navigator_vision/nodes/scan_the_code_lib/model_tracker.py b/perception/navigator_vision/nodes/scan_the_code_lib/model_tracker.py new file mode 100644 index 00000000..2fce8b44 --- /dev/null +++ b/perception/navigator_vision/nodes/scan_the_code_lib/model_tracker.py @@ -0,0 +1,121 @@ +import sys +import cv2 +import numpy as np +import time +from model import Model + + +class ModelTracker: + + def __init__(self): + self.model_count = 0 + self.models = [] + self.colors = [] + self.lk_params = dict(winSize=(15, 15), maxLevel=2, criteria=(cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03)) + + def register_model(self, model, frame): + # Go through all the models we already have + # for i, m in enumerate(self.models): + # old_frame = m.prev_frame + # old_gray = cv2.cvtColor(old_frame, cv2.COLOR_BGR2GRAY) + # gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) + # prev_points = m.prev_points + # # calculate the point in the new image + # p1, st, err = cv2.calcOpticalFlowPyrLK(old_gray, gray, prev_points, None, **self.lk_params) + + # # get the total distance between the points + # total_dists = 0 + # for i, p in enumerate(p1): + # min_dist = 1000 + # for j, _p in enumerate(model): + # dist = np.linalg.norm(np.subtract(p, _p)) + # if(dist < min_dist): + # min_dist = dist + # total_dists += min_dist + + # # If the total distance is less than 50, you have a repeat model, don't add it + # if(total_dists < 50): + # return + + if(len(self.models) > 0): + return + + model = Model(model, frame, self.model_count) + self.models.append(model) + self.model_count += 1 + + def track_models(self, frame): + draw = frame.copy() + updated_models = [] + for i, m in enumerate(self.models): + + old_frame = m.prev_frame + old_gray = cv2.cvtColor(old_frame, cv2.COLOR_BGR2GRAY) + gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) + prev_points = m.prev_points + + p1, st, err = cv2.calcOpticalFlowPyrLK(old_gray, gray, prev_points, None, **self.lk_params) + for e in err: + if(e == 1): + continue + print "err" + if(not self.geometry_test(p1, gray.copy())): + print "didn't pass geom" + continue + if(time.time() - m.time_started > 15): + print "timed out" + continue + mission_status, colors = m.check_for_colors() + if(mission_status): + self.colors = colors + return True + + m.prev_points = p1 + m.prev_frame = frame + updated_models.append(m) + + for i, val in enumerate(m.prev_points): + cv2.circle(draw, tuple(val[0]), 3, (255, 255, 255), -1) + + del self.models[:] + self.models = updated_models + + if(len(self.models) > 0): + cv2.imshow("all_models", draw) + cv2.waitKey(33) + + return False + + def get_non_furthest_points(self, i, points): + dist_points = [] + for ind, val in enumerate(points): + if(ind != i): + diff = np.subtract(points[i], val) + dist_points.append([np.linalg.norm(diff), points[ind]]) + + dist_points = sorted(dist_points, key=lambda x: x[0]) + + return dist_points[0][1], dist_points[1][1] + + def geometry_test(self, fp, draw1): + + for i, p in enumerate(fp): + draw = draw1.copy() + p1, p2 = self.get_non_furthest_points(i, fp) + cv2.circle(draw, tuple(p[0]), 3, (255, 255, 255), -1) + cv2.circle(draw, tuple(p1[0]), 3, (255, 255, 255), -1) + # cv2.circle(draw, tuple(p2[0]), 3, (255,255,255), -1) + # cv2.imshow("geom", draw) + # cv2.waitKey(0) + + diff1 = np.subtract(p1, p)[0] + diff2 = np.subtract(p2, p)[0] + + diff1 /= np.linalg.norm(diff1) + diff2 /= np.linalg.norm(diff2) + + val = abs(np.dot(diff1, diff2)) + if(val > .1): + return False + + return True diff --git a/perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp b/perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp new file mode 100644 index 00000000..cdf9c150 --- /dev/null +++ b/perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp @@ -0,0 +1,2 @@ +#include "DockShapeVision.h" +DockShapeVision::DockShapeVision(ros::NodeHandle& nh) : nh(nh) {} diff --git a/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h b/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h new file mode 100644 index 00000000..e9299753 --- /dev/null +++ b/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h @@ -0,0 +1,14 @@ +#pragma once +#include +#include +#include "opencv2/opencv.hpp" +class DockShapeVision { + protected: + ros::NodeHandle& nh; + DockShapeVision(ros::NodeHandle& nh); + + public: + virtual void GetShapes(cv::Mat& frame, cv::Rect roi, + navigator_msgs::DockShapes& symbols) = 0; + virtual void init() = 0; +}; diff --git a/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp b/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp new file mode 100644 index 00000000..c39a583b --- /dev/null +++ b/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp @@ -0,0 +1,354 @@ +#include "GrayscaleContour.h" +#include +GrayscaleContour::GrayscaleContour(ros::NodeHandle& nh) : DockShapeVision(nh) { + // Set ros params + nh.param("grayscale/canny/thresh1", cannyParams.thresh1, 60); + nh.param("grayscale/canny/thresh2", cannyParams.thresh2, 100); + nh.param("grayscale/blur_size", blur_size, 7); + nh.param("grayscale/min_contour_area", minArea, + 100.0 / (644.0 * 482.0)); + nh.param("grayscale/triangle/epsilon", triangleEpsilon, 0.1); + nh.param("grayscale/cross/epsilon", crossEpsilon, 0.03); + nh.param("grayscale/triangle/rect_error_threshold", + triRectErrorThreshold, 0.1); + nh.param("grayscale/triangle/side_error_threshold", + triSideErrorThreshold, 0.1); + nh.param("grayscale/triangle/angle_mean_error_threshold", + triAngleMeanErrorThreshold, 5); + nh.param("grayscale/triangle/angle_var_error_threshold", + triAngleVarErrorThreshold, 10); + nh.param("grayscale/cross/rect_error_threshold", + crossRectErrorThreshold, 0.2); + nh.param("grayscale/cross/side_error_threshold", + crossSideErrorThreshold, 0.2); + nh.param("grayscale/cross/angle_mean_error_threshold", + crossAngleMeanErrorThreshold, 5); + nh.param("grayscale/cross/angle_var_error_threshold", + crossAngleVarErrorThreshold, 5); + nh.param("grayscale/circle/enclosing_circle_error_threshold", + circleEnclosingErrorThreshold, 0.2); + nh.param("grayscale/red_hue_min",redHueMin,0); + nh.param("grayscale/red_hue_max",redHueMax,255); + nh.param("grayscale/blue_hue",blueHue,120); + nh.param("grayscale/green_hue",greenHue,60); + +#ifdef DO_DEBUG + namedWindow("Menu", CV_WINDOW_AUTOSIZE); + namedWindow("Result", CV_WINDOW_AUTOSIZE); + namedWindow("Grayscale", CV_WINDOW_AUTOSIZE); + namedWindow("Contours", CV_WINDOW_AUTOSIZE); + namedWindow("Edges", CV_WINDOW_AUTOSIZE); +#endif +#ifdef DO_ROS_DEBUG + image_transport.reset(new image_transport::ImageTransport(nh)); + color_debug_publisher = + image_transport->advertise("/dock_shapes/finder/debug_color", 1); + contour_debug_publisher = + image_transport->advertise("/dock_shapes/finder/debug_contours", 1); +#endif +} +void GrayscaleContour::init() { +#ifdef DO_DEBUG + createTrackbar("thresh1", "Menu", &cannyParams.thresh1, 500); + createTrackbar("thresh2", "Menu", &cannyParams.thresh2, 255); +#endif +} +void GrayscaleContour::GetShapes(cv::Mat& frame, cv::Rect roi, + navigator_msgs::DockShapes& symbols) { + if (frame.empty()) return; + + this->roi = roi; + frame_width = frame.cols; + frame_height = frame.rows; + + colorFrame = frame; + CropFrame(); + Mat temp = croppedFrame.clone(); + bilateralFilter(temp, croppedFrame, blur_size, blur_size * 2, blur_size / 2); + ConvertToGrayscale(); + DetectEdges(); + FindContours(); + +#ifdef DO_DEBUG + Mat result = croppedFrame.clone(); +#endif +#ifdef DO_ROS_DEBUG + cv_bridge::CvImage ros_color_debug; + ros_color_debug.encoding = "bgr8"; + ros_color_debug.image = croppedFrame.clone(); +#endif + + for (size_t i = 0; i < contours.size(); i++) { + navigator_msgs::DockShape dockShape; + std::vector approx_tri; + std::vector approx_cross; + approxPolyDP(Mat(contours[i]), approx_tri, + arcLength(contours[i], true) * triangleEpsilon, true); + approxPolyDP(Mat(contours[i]), approx_cross, + arcLength(contours[i], true) * crossEpsilon, true); + if (isTriangle(approx_tri)) { + dockShape.Shape = navigator_msgs::DockShape::TRIANGLE; + setShapePoints(dockShape, approx_tri); + } else if (isCross(approx_cross)) { + dockShape.Shape = navigator_msgs::DockShape::CROSS; + setShapePoints(dockShape, approx_cross); + } else if (isCircle(contours[i])) { + dockShape.Shape = navigator_msgs::DockShape::CIRCLE; + setShapePoints(dockShape, contours[i]); + } else + continue; + + if (!GetColor(i, dockShape.Color,dockShape.color_confidence)) continue; + +#ifdef DO_DEBUG + drawContours(result, contours, i, Scalar(0, 0, 255)); +#endif +#ifdef DO_ROS_DEBUG + drawContours(ros_color_debug.image, contours, i, Scalar(0, 0, 255)); +#endif + + symbols.list.push_back(dockShape); + } + +#ifdef DO_DEBUG + for (auto symbol : symbols.list) { + putText(result, symbol.Shape + "\n(" + symbol.Color + ")", + Point(symbol.CenterX + 10 - roi.x, symbol.CenterY - roi.y), 1, 1, + Scalar(0, 0, 0), 3); + } + imshow("Result", result); + imshow("Grayscale", grayscaleFrame); + imshow("Edges", edgesFrame); + imshow("Contours", contoursFrame); + waitKey(5); +#endif +#ifdef DO_ROS_DEBUG + for (auto symbol : symbols.list) { + putText(ros_color_debug.image, symbol.Shape + "\n(" + symbol.Color + ")", + Point(symbol.CenterX + 10 - roi.x, symbol.CenterY - roi.y), 1, 1, + Scalar(0, 0, 0), 3); + } + color_debug_publisher.publish(ros_color_debug.toImageMsg()); + cv_bridge::CvImage ros_contours_debug; + ros_color_debug.encoding = "mono8"; + ros_color_debug.image = contoursFrame.clone(); + contour_debug_publisher.publish(ros_color_debug.toImageMsg()); +#endif +} +void GrayscaleContour::CropFrame() { croppedFrame = colorFrame(roi); } +void GrayscaleContour::ConvertToGrayscale() { + cvtColor(croppedFrame, grayscaleFrame, CV_BGR2GRAY); +} +void GrayscaleContour::DetectEdges() { + Canny(grayscaleFrame, edgesFrame, cannyParams.thresh1, cannyParams.thresh2); +} +void GrayscaleContour::FindContours() { + contours.clear(); + hierarchy.clear(); + findContours(edgesFrame, contours, hierarchy, CV_RETR_TREE, + CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); + + // Filter out very small contours + auto cit = contours.begin(); + auto hit = hierarchy.begin(); + while (cit != contours.end()) { + if (filterArea(*cit)) { + cit = contours.erase(cit); + hit = hierarchy.erase(hit); + } else { + cit++; + hit++; + } + } + +#if defined(DO_DEBUG) || defined(DO_ROS_DEBUG) + contoursFrame = Mat(edgesFrame.size(), edgesFrame.type(), Scalar(0, 0, 0)); + for (size_t i = 0; i < contours.size(); i++) { + drawContours(contoursFrame, contours, i, Scalar(255, 255, 255)); + } +#endif +} + +bool GrayscaleContour::GetColor(int Index, std::string& color, float& confidence) { + Mat mask = Mat::zeros(croppedFrame.rows, croppedFrame.cols, CV_8UC1); + drawContours(mask, contours, Index, Scalar(255), CV_FILLED); + Mat meanBGR(1,1,croppedFrame.type(), mean(croppedFrame, mask)); + Mat mean_hsv_mat(1,1,croppedFrame.type(), Scalar(0,0,0)); + cvtColor(meanBGR,mean_hsv_mat,CV_BGR2HSV,3); + Vec3b meanColor = mean_hsv_mat.at(0,0); + double hue = meanColor[0]; + double redness = 1.0 / ( fmin(redHueMax-hue,hue-redHueMin) + 1.0); + double blueness = 1.0 / ( fabs(blueHue-hue) + 1.0); + double greeness = 1.0 / ( fabs(greenHue-hue) + 1.0); + double max = 0; + if (blueness > max) { + max = blueness; + color = navigator_msgs::DockShape::BLUE; + confidence = blueness; + } + if (greeness > max) { + max = greeness; + color = navigator_msgs::DockShape::GREEN; + confidence = greeness; + } + if (redness > max) { + max = redness; + color = navigator_msgs::DockShape::RED; + confidence = redness; + } + // ~printf("%s Confidence: %f Colors: H=%d S=%d V=%d \n",color.c_str(),confidence,meanColor.val[0],meanColor.val[1],meanColor.val[2]); + return true; +} +bool GrayscaleContour::isTriangle(std::vector& points) { + // If contour is not 3 sided, is not a triangle + if (points.size() != 3) return false; + + double contour_area = contourArea(points); + double perimeter = arcLength(points, true); + double expected_area, expected_perimeter, error; + + // Draw bounding rect, find area of triangle with this height/width, compare + // to real area + cv::Rect boundingRect = cv::boundingRect(points); + expected_area = 0.5 * boundingRect.width * boundingRect.height; + error = fabs(contour_area / expected_area - 1.0); + if (error > triRectErrorThreshold) return false; + + // Find side length based on bounding rect width, compare to real perimeter of + // contour + expected_perimeter = boundingRect.width * 3; + error = fabs(perimeter / expected_perimeter - 1.0); + if (error > triSideErrorThreshold) return false; + + // Find angles (in degrees) of contour, compare to expected of 60 degrees with + // low variance + std::vector angles; + findAngles(points, angles); + using namespace boost::accumulators; + accumulator_set> acc; + for (double angle : angles) acc(angle); + auto mean = boost::accumulators::mean(acc); + auto variance = sqrt(boost::accumulators::variance(acc)); + if (fabs(mean - 60.0) > triAngleMeanErrorThreshold) return false; + if (variance > triAngleVarErrorThreshold) return false; + + return true; +} +bool GrayscaleContour::isCross(std::vector& points) { + // If polygon is not 12 sided, is not a cross + if (points.size() != 12) return false; + + double contour_area = contourArea(points); + double perimeter = arcLength(points, true); + double expected_area, error; + + // Find length of 1 side using perimter/12, then compare area of contour with + // this side length to real area + double side_length = perimeter / 12.0; + expected_area = 5.0 * pow(side_length, 2); + error = fabs(contour_area / expected_area - 1.0); + if (error > crossRectErrorThreshold) return false; + + // Draw bounding rect around contour, determine what area should be of cross + // with this width, compare to real area + Rect rect = boundingRect(points); + expected_area = 5 * pow(rect.width / 3.0, 2); + error = fabs(contour_area / expected_area - 1); + if (error > crossSideErrorThreshold) return false; + + // Find all angles in contour, then check that the mean angle is around 40 + // degrees and variance is low + std::vector angles; + findAngles(points, angles); + using namespace boost::accumulators; + accumulator_set> acc; + for (double angle : angles) acc(angle); + auto mean = boost::accumulators::mean(acc); + auto variance = sqrt(boost::accumulators::variance(acc)); + // ~printf("CROSS SIDES=%d MEAN=%f VAR=%f\n",points.size(),mean,variance); + if (fabs(mean - 40.0) > crossAngleMeanErrorThreshold) return false; + if (variance > crossAngleVarErrorThreshold) return false; + + return true; +} + +const double pi = 3.1415926; +bool GrayscaleContour::isCircle(std::vector& points) { + if (points.size() < 5) return false; + double contour_area = contourArea(points); + // ~double perimeter = arcLength(points, true); + double expected_area, error; + + // Find a min enclosing circle for contour, then compare enclosing cirlcle + // area to real area + Point2f center; + float radius; + minEnclosingCircle(points, center, radius); + expected_area = pi * pow(radius, 2); + error = fabs(contour_area / expected_area - 1); + if (error > circleEnclosingErrorThreshold) return false; + + // ~doub + // ~Rect rect = boundingRect(points); + // ~double contour_area = contourArea(points); //Actual area of the contour + // ~double expected_area = pi*pow(rect.width/2.0,2); //What area should be if + // contour is a circle + // ~double error = fabs(contour_area/expected_area-1); + // ~if (error > 0.2) return false; + // ~ + // ~double perimeter = arcLength(points, true); + // ~double radius = perimeter/(2.0*pi); + // ~expected_area = pi*pow(radius,2); + // ~error = fabs(contour_area/expected_area-1.0); + // ~if (error > 0.2) return false; + + // double center,radius; + // minEnclosingCircle( points, center,radius); + + return true; +} +bool GrayscaleContour::filterArea(std::vector contour) { + return contourArea(contour) < (minArea * (frame_width * frame_height)); +} +Point GrayscaleContour::findCenter(std::vector& points) { + double x = 0, y = 0; + for (size_t i = 0; i < points.size(); i++) { + x += points[i].x; + y += points[i].y; + } + x /= points.size(); + y /= points.size(); + return Point(x, y); +} +double GrayscaleContour::findAngle(cv::Point& p1, cv::Point& p2, + cv::Point& p3) { + cv::Point v1 = p2 - p1; + cv::Point v2 = p3 - p1; + float m1 = sqrt(v1.x * v1.x + v1.y * v1.y); + float m2 = sqrt(v2.x * v2.x + v2.y * v2.y); + float thecos = v1.dot(v2) / (m1 * m2); + + return acos(thecos) * 180 / 3.1415; +} +void GrayscaleContour::findAngles(std::vector& points, + std::vector& angles) { + for (size_t i = 0; i < points.size() - 2; i++) + angles.push_back(findAngle(points[i], points[i + 1], points[i + 2])); + angles.push_back(findAngle(points[points.size() - 2], + points[points.size() - 1], points[0])); + angles.push_back(findAngle(points[points.size() - 1], points[0], points[1])); +} +void GrayscaleContour::setShapePoints(navigator_msgs::DockShape& dockShape, + std::vector& points) { + Point center = findCenter(points); + dockShape.CenterX = center.x + roi.x; + dockShape.CenterY = center.y + roi.y; + dockShape.img_width = colorFrame.cols; + for (size_t j = 0; j < points.size(); j++) { + geometry_msgs::Point p; + p.x = points[j].x + roi.x; + p.y = points[j].y + roi.y; + p.z = 0; + dockShape.points.push_back(p); + } +} diff --git a/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h b/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h new file mode 100644 index 00000000..3e15de05 --- /dev/null +++ b/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h @@ -0,0 +1,82 @@ +#pragma once +#include +#include +#include +#include +#include "../DockShapeVision.h" +#include "opencv2/opencv.hpp" +using namespace cv; + +//#define DO_DEBUG +#define DO_ROS_DEBUG +#ifdef DO_ROS_DEBUG +#include +#include +#include +#endif +class GrayscaleContour : public DockShapeVision { + private: + Mat colorFrame, croppedFrame, grayscaleFrame, edgesFrame; + std::vector > contours; + std::vector hierarchy; + std::vector > shapes; + Rect roi; + + void CropFrame(); + void ConvertToGrayscale(); + void DetectEdges(); + void FindContours(); + bool GetColor(int shapeIndex, std::string& color, float& confidence); + Point findCenter(std::vector& points); + Mat contoursFrame; + int frame_height; + int frame_width; + + bool filterArea(std::vector contour); + + bool isTriangle(std::vector& points); + bool isCross(std::vector& points); + bool isCircle(std::vector& points); + + void setShapePoints(navigator_msgs::DockShape& dockShape, + std::vector& points); + + static double findAngle(cv::Point& p1, cv::Point& p2, cv::Point& p3); + static void findAngles(std::vector& points, + std::vector& angles); +#ifdef DO_ROS_DEBUG + std::unique_ptr image_transport; + image_transport::Publisher color_debug_publisher; + image_transport::Publisher contour_debug_publisher; +#endif + + // Constants to use ros params for + struct CannyParams { + int thresh1; + int thresh2; + }; + CannyParams cannyParams; + double minArea; + int blur_size; + double triangleEpsilon; + double crossEpsilon; + double triRectErrorThreshold; + double triSideErrorThreshold; + double triAngleMeanErrorThreshold; + double triAngleVarErrorThreshold; + double crossRectErrorThreshold; + double crossSideErrorThreshold; + double crossAngleMeanErrorThreshold; + double crossAngleVarErrorThreshold; + double circleEnclosingErrorThreshold; + + double redHueMin; + double redHueMax; + double blueHue; + double greenHue; + public: + GrayscaleContour(ros::NodeHandle& nh); + void GetShapes(cv::Mat& frame, cv::Rect roi, + navigator_msgs::DockShapes& symbols); + void init(); +}; diff --git a/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp b/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp new file mode 100644 index 00000000..8536e017 --- /dev/null +++ b/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp @@ -0,0 +1,76 @@ +#include "ShapeTracker.h" +ShapeTracker::ShapeTracker() : active(false) +{ + +} +void ShapeTracker::setActive(bool a) +{ + active = a; +} +void ShapeTracker::init(ros::NodeHandle& nh) +{ + getShapesService = nh.advertiseService("/vision/get_shapes", &ShapeTracker::getShapesCallback, this); + allFoundShapesPublish = nh.advertise("/dock_shapes/filtered_shapes", 10); +} +void ShapeTracker::addShape(navigator_msgs::DockShape& s) +{ + for (auto &tracked : shapes) + { + if (tracked.update(s)) return; + } + // ~printf("Inserting Shape \n"); + shapes.push_back(TrackedShape(s)); +} +void ShapeTracker::addShapes(navigator_msgs::DockShapes& newShapes) +{ + // ~printf("Adding %d shapes current size = %d\n",newShapes.list.size(),shapes.size()); + for (auto &shape : newShapes.list) + { + addShape(shape); + } + shapes.erase(std::remove_if(shapes.begin(), shapes.end(), [] (TrackedShape& s) {return s.isStale();}),shapes.end()); + // ~printf("Shapes size %d\n",shapes.size()); + navigator_msgs::DockShapes found_shapes; + for (auto &tracked : shapes) + { + if (tracked.isReady() ) + { + found_shapes.list.push_back(tracked.get()); + } + } + allFoundShapesPublish.publish(found_shapes); +} +bool ShapeTracker::getShapesCallback(navigator_msgs::GetDockShapes::Request &req, + navigator_msgs::GetDockShapes::Response &res) { + if (!active) { + res.found = false; + res.error = navigator_msgs::GetDockShapes::Response::NODE_DISABLED; + return true; + } + if (!validRequest(req.Color,req.Shape)) { + res.found = false; + res.error = navigator_msgs::GetDockShapes::Response::INVALID_REQUEST; + return true; + } + for (auto &tracked : shapes) + { + if (tracked.sameType(req.Color,req.Shape) && tracked.isReady() ) + { + res.shapes.list.push_back(tracked.get()); + } + } + if (res.shapes.list.size() > 0) res.found = true; + else res.error = navigator_msgs::GetDockShapes::Response::SHAPE_NOT_FOUND; + return true; +} +bool ShapeTracker::validRequest(std::string& color, std::string& shape) +{ +return (color == navigator_msgs::GetDockShape::Request::BLUE || + color == navigator_msgs::GetDockShape::Request::GREEN || + color == navigator_msgs::GetDockShape::Request::RED || + color == navigator_msgs::GetDockShape::Request::ANY) && + (shape == navigator_msgs::GetDockShape::Request::CIRCLE || + shape == navigator_msgs::GetDockShape::Request::CROSS || + shape == navigator_msgs::GetDockShape::Request::TRIANGLE || + shape == navigator_msgs::GetDockShape::Request::ANY); +} diff --git a/perception/navigator_vision/nodes/shape_identification/ShapeTracker.h b/perception/navigator_vision/nodes/shape_identification/ShapeTracker.h new file mode 100644 index 00000000..3126705d --- /dev/null +++ b/perception/navigator_vision/nodes/shape_identification/ShapeTracker.h @@ -0,0 +1,24 @@ +#include +#include +#include +#include +#include +#include +#include "TrackedShape.h" + +class ShapeTracker +{ + private: + ros::ServiceServer getShapesService; + ros::Publisher allFoundShapesPublish; + std::vector shapes; + bool active; + static bool validRequest(std::string& color, std::string& shape); + public: + ShapeTracker(); + void setActive(bool a); + bool getShapesCallback(navigator_msgs::GetDockShapes::Request &req,navigator_msgs::GetDockShapes::Response &res); + void init(ros::NodeHandle& nh); + void addShape(navigator_msgs::DockShape& s); + void addShapes(navigator_msgs::DockShapes& newShapes); +}; diff --git a/perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp b/perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp new file mode 100644 index 00000000..4c71d345 --- /dev/null +++ b/perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp @@ -0,0 +1,81 @@ +#include "TrackedShape.h" +using namespace std; +int TrackedShape::MIN_COUNT = 20; +double TrackedShape::MAX_DISTANCE = 5; +ros::Duration TrackedShape::MAX_TIME_GAP = ros::Duration(0, 0.5 * 1E9); +TrackedShape::TrackedShape() : count(0) +{ + +} +TrackedShape::TrackedShape(navigator_msgs::DockShape& s) : + count(1), + latest(s) +{ + +} +void TrackedShape::init(ros::NodeHandle& nh) +{ + nh.param("tracking/min_count",MIN_COUNT,10); + nh.param("tracking/max_distance_gap",MAX_DISTANCE,15); + double seconds; + nh.param("tracking/max_seen_gap_seconds", seconds, 0.5); + MAX_TIME_GAP = ros::Duration(0, seconds * 1E9); +} +double TrackedShape::centerDistance(navigator_msgs::DockShape& a, navigator_msgs::DockShape& b) +{ + // ~printf("A = (%d,%d) B= (%d,%d)\n",a.CenterX,a.CenterY,b.CenterX,b.CenterY); + return sqrtf( powf(double(a.CenterX)-double(b.CenterX), 2) + powf(double(a.CenterY)-double(b.CenterY),2) ); +} +bool TrackedShape::update(navigator_msgs::DockShape& s) +{ + double distance = centerDistance(latest,s); + if (distance > MAX_DISTANCE) + { + // ~printf(" %s %s distance too big to %s %s DISTANCE = %f \n",latest.Color.c_str(),latest.Shape.c_str(),s.Color.c_str(),s.Shape.c_str(),distance); + return false; + } + if (tooOld(s)) + { + // ~printf(" %s %s too old from %s %s\n",latest.Color.c_str(),latest.Shape.c_str(),s.Color.c_str(),s.Shape.c_str()); + return false; + } + if (latest.Color != s.Color) + { + //What to do if color is different with two nearby + if (latest.color_confidence > s.color_confidence) + { + s.Color = latest.Color; + s.color_confidence = latest.color_confidence; + } + } + // ~if (latest.Shape != s.Shape) + // ~{ + // ~if (s.shapeConfidence) + // ~} + latest = s; + count++; + // ~printf("Updating %s %s with %s %s COUNT=%d Distance=%f \n",latest.Color.c_str(),latest.Shape.c_str(),s.Color.c_str(),s.Shape.c_str(),count,distance); + return true; +} +bool TrackedShape::tooOld(navigator_msgs::DockShape& s) +{ + return s.header.stamp - latest.header.stamp > MAX_TIME_GAP; +} +bool TrackedShape::isStale() +{ + ros::Time now = ros::Time::now(); + // ~std::cout << "Time Diff: " << (now - latest.header.stamp).toSec() << std::endl; + return (now - latest.header.stamp) > MAX_TIME_GAP; +} +bool TrackedShape::isReady() +{ + return count >= MIN_COUNT && !isStale(); +} +navigator_msgs::DockShape TrackedShape::get() +{ + return latest; +} +bool TrackedShape::sameType(std::string& color, std::string& shape) +{ + return (color == navigator_msgs::GetDockShape::Request::ANY || color == latest.Color ) && (shape == navigator_msgs::GetDockShape::Request::ANY || shape == latest.Shape); +} diff --git a/perception/navigator_vision/nodes/shape_identification/TrackedShape.h b/perception/navigator_vision/nodes/shape_identification/TrackedShape.h new file mode 100644 index 00000000..1cc25272 --- /dev/null +++ b/perception/navigator_vision/nodes/shape_identification/TrackedShape.h @@ -0,0 +1,27 @@ +#include +#include +#include +#include +#include +#include + +class TrackedShape +{ + private: + int count; + static int MIN_COUNT; + static double MAX_DISTANCE; + static ros::Duration MAX_TIME_GAP; + navigator_msgs::DockShape latest; + static double centerDistance(navigator_msgs::DockShape& a, navigator_msgs::DockShape& b); + bool tooOld(navigator_msgs::DockShape& s); + public: + TrackedShape(); + TrackedShape(navigator_msgs::DockShape& s); + bool sameType(std::string& color, std::string & shape); + bool update(navigator_msgs::DockShape& s); + bool isReady(); + bool isStale(); + navigator_msgs::DockShape get(); + static void init(ros::NodeHandle& nh); +}; diff --git a/perception/navigator_vision/nodes/shape_identification/main.cpp b/perception/navigator_vision/nodes/shape_identification/main.cpp new file mode 100644 index 00000000..10af17c4 --- /dev/null +++ b/perception/navigator_vision/nodes/shape_identification/main.cpp @@ -0,0 +1,115 @@ +#include "std_msgs/String.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "GrayscaleContour/GrayscaleContour.h" +#include +#include +#include "DockShapeVision.h" +#include +#include "ShapeTracker.h" + + +using namespace cv; + +class ShooterVision { + private: + ShapeTracker tracker; + std::unique_ptr vision; + navigator_msgs::DockShapes symbols; + ros::NodeHandle nh_; + image_transport::ImageTransport it_; + image_transport::Subscriber image_sub_; + ros::Publisher foundShapesPublisher; + + ros::ServiceServer runService; + ros::ServiceServer roiService; + std::string camera_topic; + bool active; + cv::Rect roi; + int width; + int height; + + public: + ShooterVision() : nh_("dock_shape_finder"), it_(nh_) { + nh_.param("auto_start", active, false); + vision.reset(new GrayscaleContour(nh_)); + vision->init(); + nh_.param("symbol_camera", camera_topic, + "/right/right/image_raw"); + runService = nh_.advertiseService("/vision/get_shapes/switch", &ShooterVision::runCallback, this); + roiService = nh_.advertiseService("setROI", + &ShooterVision::roiServiceCallback, this); + //#ifdef DO_DEBUG + // DebugWindow::init(); + //#endif + foundShapesPublisher = nh_.advertise( + "/dock_shapes/found_shapes", 1000); + image_sub_ = it_.subscribe(camera_topic, 1, &ShooterVision::run, this); + + int x_offset, y_offset, width, height; + nh_.param("roi/x_offset", x_offset, 73); + nh_.param("roi/y_offset", y_offset, 103); + nh_.param("roi/width", width, 499); + nh_.param("roi/height", height, 243); + roi = Rect(x_offset, y_offset, width, height); + TrackedShape::init(nh_); + tracker.init(nh_); + } + + bool runCallback(std_srvs::SetBool::Request &req, + std_srvs::SetBool::Response &res) { + active = req.data; + res.success = true; + tracker.setActive(req.data); + return true; + } + bool roiServiceCallback(navigator_msgs::SetROI::Request &req, + navigator_msgs::SetROI::Response &res) { + if (req.roi.x_offset < 0 || req.roi.y_offset < 0 || + req.roi.x_offset + req.roi.width > width || + req.roi.y_offset + req.roi.height > height) { + res.error = "OUTSIDE_OF_FRAME"; + res.success = false; + return true; + } + roi = + Rect(req.roi.x_offset, req.roi.y_offset, req.roi.width, req.roi.height); + res.success = true; + return true; + } + void run(const sensor_msgs::ImageConstPtr &msg) { + if (!active) return; + // Grab ros frame + cv_bridge::CvImagePtr cv_ptr; + try { + cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8); + } catch (cv_bridge::Exception &e) { + ROS_ERROR("cv_bridge exception: %s", e.what()); + return; + } + cv::Mat frame = cv_ptr->image; + width = frame.cols; + height = frame.rows; + symbols.list.clear(); + + vision->GetShapes(frame, roi, symbols); + for (size_t i = 0; i < symbols.list.size(); i++) + symbols.list[i].header = msg->header; + foundShapesPublisher.publish(symbols); + tracker.addShapes(symbols); + } +}; + +int main(int argc, char **argv) { + ros::init(argc, argv, "dock_shape_finder"); + ShooterVision sv = ShooterVision(); + ros::spin(); + return 0; +} diff --git a/perception/navigator_vision/nodes/start_gate_manual.py b/perception/navigator_vision/nodes/start_gate_manual.py new file mode 100755 index 00000000..7da4b9af --- /dev/null +++ b/perception/navigator_vision/nodes/start_gate_manual.py @@ -0,0 +1,308 @@ +#!/usr/bin/python +from __future__ import division + +import rospy +import cv2 +import numpy as np +import time +import tf + +from sensor_msgs.msg import Image +from geometry_msgs.msg import PointStamped, PoseStamped +from navigator_msgs.srv import StartGate, StartGateResponse +import navigator_tools +import image_geometry +from scipy.optimize import minimize + +class ImageGetter(object): + def __init__(self, topic_name): + self.sub = navigator_tools.Image_Subscriber(topic_name, self.get_image) + + print 'getting topic', topic_name + self.frame = None + self.done = False + + self.camera_info = self.sub.wait_for_camera_info() + + def get_image(self, msg): + self.frame = msg + self.done = True + + +class BuoySelector(object): + ''' + Allows user to manually click the buoys + ''' + def __init__(self, name, img, scale_factor=1, color=(10, 75, 250), draw_radius=10): + assert img is not None, "Image is none" + + cv2.namedWindow(name) + cv2.setMouseCallback(name, self.mouse_cb) + + self.name = name + self.point = None + self.draw_radius = draw_radius + self.color = color + + self.scale_factor = scale_factor + self.img = cv2.resize(img, None, fx=scale_factor, fy=scale_factor, interpolation=cv2.INTER_CUBIC) + self.draw_img = self.img + + self.mouse_position = (0, 0) + + def segment(self): + while self.point is None: + draw_img = np.copy(self.img) + cv2.circle(draw_img, self.mouse_position, self.draw_radius, self.color, 1) + cv2.imshow(self.name, draw_img) + + k = cv2.waitKey(10) & 0xFF + + if k == 27: # Esc + cv2.destroyAllWindows() + rospy.sleep(.2) + + return None, None + elif k == ord('q'): + self.draw_radius += 2 + elif k == ord('a'): + self.draw_radius -= 2 + + cv2.destroyAllWindows() + rospy.sleep(.2) + pt = self.point / self.scale_factor + self.point = None + return pt, self.draw_radius + + def mouse_cb(self, event, x, y, flags, param): + self.draw_img = np.copy(self.img) + self.mouse_position = (x, y) + + if event == cv2.EVENT_LBUTTONUP: + self.point = np.array([x, y]) + + +class Segmenter(object): + def __init__(self, color_space, ranges, invert): + self.color_space = color_space + self.lower = np.array(ranges['lower']) + self.upper = np.array(ranges['upper']) + self.invert = invert + + def segment(self, orig_image, debug_color=False): + ''' + Make sure input image is BGR + If you want only the debug image returned, pass a color in for `debug_color` (ex. (255, 0, 0) for blue) + ''' + image = orig_image if self.color_space == 'bgr' else cv2.cvtColor(orig_image, cv2.COLOR_BGR2HSV) + filtered_image = self.filter_image(image) + + mask = cv2.inRange(image, self.lower, self.upper) + filtered_mask = self.filter_mask(mask) + + cnts, _ = cv2.findContours(filtered_mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) + cnt = sorted(cnts, key=cv2.contourArea, reverse=True)[0] + M = cv2.moments(cnt) + center = np.array([M['m10'], M['m01']]) / M['m00'] + + if debug_color: + debug_img = np.copy(orig_image) + x, y, w, h = cv2.boundingRect(cnt) + cv2.rectangle(debug_img, (x, y), (x + w, y + h), debug_color, 2) + cv2.circle(debug_img, tuple(center.astype(np.int16)), 2, debug_color, -1) + return debug_img + + return center, cv2.contourArea(cnt) + + def filter_image(self, image): + filtered = cv2.GaussianBlur(image, (5, 5), 0) + return filtered + + def filter_mask(self, mask): + kernel = np.ones((5, 5), np.uint8) + filtered = cv2.erode(mask, kernel, iterations=1) + filtered = cv2.dilate(filtered, kernel, iterations=1) + return filtered + + +def intersect(A, a, B, b): + # Based on http://morroworks.com/Content/Docs/Rays%20closest%20point.pdf + # Finds the intersection of two rays `a` and `b` whose origin are `A` and `B` + return (A + a * (-np.dot(a, b) * np.dot(b, B - A) + np.dot(a, B - A) * np.dot(b, b)) / + (np.dot(a, a) * np.dot(b, b) - np.dot(a, b) ** 2) + + B + b * (np.dot(a, b) * np.dot(a, B - A) - np.dot(b, B - A) * np.dot(a, a)) / + (np.dot(a, a) * np.dot(b, b) - np.dot(a, b) ** 2)) / 2 + + +def minimize_repro_error(left_cam, right_cam, pt_l, pt_r, estimation): + + def f(x): + left_error = np.linalg.norm(left_cam.project3dToPixel((x)) - pt_l) + right_error = np.linalg.norm(right_cam.project3dToPixel((x)) - pt_r) + return left_error ** 2 + right_error ** 2 + + correction = minimize( + fun=f, + x0=estimation, + tol=1e-9, + method="TNC", + ) + return correction.x + +def do_the_magic(pt_l, pt_r, cam_tf): + ''' + pt_l is in the left frame and pt_r is in the right frame + ''' + global left_cam, right_cam + + ray_1 = np.array(left_cam.projectPixelTo3dRay((pt_l))) + ray_2 = np.array(right_cam.projectPixelTo3dRay((pt_r))) + + # I'm doing all the math in the camera frame + origin_1 = np.array([0, 0, 0]) + origin_2 = np.array(cam_tf[0]) # Not accounting for different rotations + inital_estimate = intersect(origin_1, ray_1, origin_2, ray_2) + + # Now re-project points and find the minimum error + estimate = minimize_repro_error(left_cam, right_cam, pt_l, pt_r, inital_estimate) + + return estimate + + +def load_from_parameter(color): + param_name = '/start_gate/{}'.format(color) + if not rospy.has_param(param_name): + rospy.logerr("No parameters have been set!") + rospy.signal_shutdown("Requires param to be set: {}".format(param_name)) + # exit() + + param = rospy.get_param(param_name) + + s = Segmenter(param['color_space'], param['ranges'], param['invert']) + return s + + +def do_buoys(srv, left, right, red_seg, green_seg, tf_listener): + ''' + FYI: + `left`: the left camera ImageGetter object + `right`: the right camera ImageGetter object + ''' + left_point = None + right_point = None + + while not rospy.is_shutdown(): + # Get all the required TF links + try: + # Working copy of the current frame obtained at the same time as the tf link + tf_listener.waitForTransform("enu", "stereo_left_cam", rospy.Time(), rospy.Duration(4.0)) + left_image, right_image = left.frame, right.frame + cam_tf = tf_listener.lookupTransform("stereo_left_cam", "stereo_right_cam", left.sub.last_image_time) + cam_p, cam_q = tf_listener.lookupTransform("enu", "stereo_left_cam", left.sub.last_image_time) + cam_p = np.array([cam_p]) + cam_r = tf.transformations.quaternion_matrix(cam_q)[:3, :3] + break + + except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException, TypeError) as e: + print e + rospy.logwarn("TF link not found.") + time.sleep(.5) + continue + + red_left_pt, rl_area = red_seg.segment(left_image) + red_right_pt, rr_area = red_seg.segment(right_image) + green_left_pt, gl_area = green_seg.segment(left_image) + green_right_pt, gr_area = green_seg.segment(right_image) + + area_tol = 50 + if np.linalg.norm(rl_area - rr_area) > area_tol or np.linalg.norm(gl_area - gr_area) > area_tol: + rospy.logwarn("Unsafe segmentation") + StartGateResponse(success=False) + + red_point_np = do_the_magic(red_left_pt, red_right_pt, cam_tf) + red_point_np = np.array(cam_r.dot(red_point_np) + cam_p)[0] + green_point_np = do_the_magic(green_left_pt, green_right_pt, cam_tf) + green_point_np = np.array(cam_r.dot(green_point_np) + cam_p)[0] + + # Just for visualization + for i in range(5): + # Publish it 5 times so we can see it in rviz + navigator_tools.draw_ray_3d(red_left_pt, left_cam, [1, 0, 0, 1], m_id=0, frame="stereo_left_cam") + navigator_tools.draw_ray_3d(red_right_pt, right_cam, [1, 0, 0, 1], m_id=1, frame="stereo_right_cam") + navigator_tools.draw_ray_3d(green_left_pt, left_cam, [0, 1, 0, 1], m_id=2, frame="stereo_left_cam") + navigator_tools.draw_ray_3d(green_right_pt, right_cam, [0, 1, 0, 1], m_id=3, frame="stereo_right_cam") + + red_point = PointStamped() + red_point.header = navigator_tools.make_header(frame="enu") + red_point.point = navigator_tools.numpy_to_point(red_point_np) + red_pub.publish(red_point) + + green_point = PointStamped() + green_point.header = navigator_tools.make_header(frame="enu") + green_point.point = navigator_tools.numpy_to_point(green_point_np) + green_pub.publish(green_point) + + time.sleep(1) + + # Now we have two points, find their midpoint and calculate a target angle + midpoint = (red_point_np + green_point_np) / 2 + between_vector = green_point_np - red_point_np # Red is on the left when we go out. + yaw_theta = np.arctan2(between_vector[1], between_vector[0]) + # Rotate that theta by 90 deg to get the angle through the buoys + yaw_theta += np.pi / 2.0 + + p = midpoint + q = tf.transformations.quaternion_from_euler(0, 0, yaw_theta) + + target_pose = PoseStamped() + target_pose.header = navigator_tools.make_header(frame="enu") + target_pose.pose = navigator_tools.numpy_quat_pair_to_pose(p, q) + + return StartGateResponse(target=target_pose, success=True) + + +def publish_debug(red_pub, green_pub, red_seg, green_seg, camera, *args): + r_debug = red_seg.segment(camera.frame, (0, 10, 250)) + g_debug = green_seg.segment(camera.frame, (0, 250, 10)) + + red_pub.publish(navigator_tools.make_image_msg(r_debug)) + green_pub.publish(navigator_tools.make_image_msg(g_debug)) + + +if __name__ == "__main__": + rospy.init_node("start_gate_perception") + tf_listener = tf.TransformListener() + + # For visualization + red_pub = rospy.Publisher("red_buoy", PointStamped, queue_size=5) + green_pub = rospy.Publisher("green_buoy", PointStamped, queue_size=5) + red_debug = rospy.Publisher("vision/start_gate/red", Image, queue_size=5) + green_debug = rospy.Publisher("vision/start_gate/green", Image, queue_size=5) + + left = ImageGetter('/stereo/left/image_rect_color') + left_cam = image_geometry.PinholeCameraModel() + left_cam.fromCameraInfo(left.camera_info) + + right = ImageGetter('/stereo/right/image_rect_color') + right_cam = image_geometry.PinholeCameraModel() + right_cam.fromCameraInfo(right.camera_info) + + while left.frame is None and right.frame is None: + print "Waiting for frame..." + rospy.sleep(.5) + + red_seg = load_from_parameter("red") + green_seg = load_from_parameter("green") + + # while not rospy.is_shutdown(): + # l_center, debug_img = red.segment(left.frame, (0, 70, 255)) + # red_debug.publish(navigator_tools.make_image_msg(debug_img)) + # rospy.sleep(1) + + s = rospy.Service("/vision/start_gate_buoys", StartGate, + lambda srv: do_buoys(srv, left, right, red_seg, green_seg, tf_listener)) + + # rospy.Timer(rospy.Duration(.5), lambda republish: publish_debug(red_debug, green_debug, + # red_seg, green_seg, left)) + + rospy.spin() diff --git a/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp b/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp new file mode 100644 index 00000000..015d5d83 --- /dev/null +++ b/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp @@ -0,0 +1,502 @@ +/* + * Author: David Soto + * Year: 2016 + */ + +// this node uses a stereo camera rig to generate a point cloud of the environment. +// It uses the exFAST detector to generate a sparse point cloud. +// See this for more info: http://www.ra.cs.uni-tuebingen.de/software/sparsestereo/welcome_e.html + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +void left_image_callback( + const sensor_msgs::ImageConstPtr &image_msg_ptr, + const sensor_msgs::CameraInfoConstPtr &info_msg_ptr); +void right_image_callback( + const sensor_msgs::ImageConstPtr &image_msg_ptr, + const sensor_msgs::CameraInfoConstPtr &info_msg_ptr); +void cloud_callback (const sensor_msgs::PointCloud2ConstPtr& cloud_msg); +bool detection_activation_switch( + std_srvs::SetBool::Request &req, + std_srvs::SetBool::Response &resp); + +using namespace std; +using namespace cv; +using namespace sparsestereo; +using namespace boost; +using namespace boost::posix_time; +using ros::param::param; + +struct XYZRGB{ + + float x, y, z; + uint8_t r, g, b; + + bool operator==(const XYZRGB &other) const{ + if(this->x == other.x && this->y == other.y && this->z == other.z){ + return true; + } + else return false; + } + + bool operator<(const XYZRGB &other) const{ + if(this->x > other.x) return false; + else if(this->y > other.y) return false; + else if(this->z >= other.z) return false; + else return true; + } +}; + +// Image storage +sensor_msgs::ImageConstPtr left_most_recent, right_most_recent; +sensor_msgs::CameraInfoConstPtr left_most_recent_info, right_most_recent_info; + +// Stereo subscribing +image_transport::CameraSubscriber left_image_sub, right_image_sub; +image_geometry::PinholeCameraModel left_cam_model, right_cam_model; + +// Velodyne subscribing +ros::Subscriber pcd_sub; + +// Camera projection matrices +Matx34d left_P, right_P; + +// TF +// tf::TransformListener tf_listener; +string tf_frame_stereo_l {"/stereo_left_cam"}; +string tf_frame_stereo_r {"/stereo_right_cam"}; +string tf_frame_velodyne {"/velodyne"}; +tf::StampedTransform stereoL_tf_velodyne; + +// Point clouds +vector stereo_point_cloud; +// sensor_msgs::PointCloud2 pt_cloud_msg; +std_msgs::Header header; +int pt_cloud_seq {0}; + +// Point cloud publisher +ros::Publisher pcl_pub; + +// Thread safety +boost::mutex left_mtx, right_mtx; + +// Control +bool active = false; +ros::ServiceServer detection_switch; + + +int main(int argc, char** argv) { + namespace fs = boost::filesystem; + + try { + + stringstream log_msg; + log_msg << "\nInitializing Sparse Stereo Point Cloud Driver:\n"; + int tab_sz = 4; + + // globals + ros::init(argc, argv, "stereo_point_cloud_driver"); + ros::NodeHandle nh; + image_transport::ImageTransport img_trans {nh}; + image_geometry::PinholeCameraModel left_cam_model, right_cam_model; + cv_bridge::CvImagePtr input_bridge; + string activation_srv_name {"stereo/activation_srv"}; + nav::PcdColorizer vel_colorizer{nh, "/velodyne_points", "/colored_velodyne_cloud", "/stereo/left/image_rect_color", "/stereo_left_cam"}; + + // Default parameters + string img_topic_left_default = "/stereo/left/image_raw/"; + string img_topic_right_default = "/stereo/right/image_raw/"; + string activation_default = "/stereo/activation_switch"; + string dbg_topic_default = "/stereo/dbg_imgs"; + + // Advertise activation switch + detection_switch = nh.advertiseService( + activation_srv_name, detection_activation_switch); + + // Advertise point cloud topic + pcl_pub = nh.advertise ("stereo_front_point_cloud", 1); + + // Subscribe to Cameras (image + camera_info) + cout << "subscribing to cameras" << endl; + string left = param("/stereo/input_left", img_topic_left_default); + string right = param("/stereo/input_right", img_topic_right_default); + cout << "Got topic name params!" << endl; + left_image_sub = img_trans.subscribeCamera(left, 10, left_image_callback); + right_image_sub = img_trans.subscribeCamera(right, 10, right_image_callback); + log_msg << setw(1 * tab_sz) << "" << "Camera Subscriptions:\x1b[37m\n" + << setw(2 * tab_sz) << "" << "left = " << left << endl + << setw(2 * tab_sz) << "" << "right = " << right << "\x1b[0m\n"; + cout << left << ' ' << right << endl; + ROS_INFO(log_msg.str().c_str()); + + // Subscribe to Velodyne scan + pcd_sub = nh.subscribe ("/velodyne_points", 1, cloud_callback); + + // Wait for transform between velodyne and stereo frames + tf::TransformListener tf_listener; + ROS_INFO("Waiting 10 seconds for tf between velodyne and stereo frames to become available..."); + // try{ + // ros::Time now = ros::Time(0); + // if (tf_listener.waitForTransform(tf_frame_velodyne, tf_frame_stereo_l, + // now, ros::Duration(10.0))){ + // cout << "TF is available." << endl; + // } + // else cout << "Timed out waiting for TF." << endl; + + // tf_listener.lookupTransform(tf_frame_velodyne, tf_frame_stereo_l, now, stereoL_tf_velodyne); + // // cout << "TF:" << stereoL_tf_velodyne << endl; + // } + // catch (const std::exception& e) { + // cerr << "Fatal exception while acquiring TF's: " << e.what(); + // return -1; + // } + + + + // Size adjustment ROI + /* + exFAST is currently heavily optimized for VGA size images, extracting a + properly sized region of interest is much more efficient than resizing the image + */ + + // Stereo matching parameters + double uniqueness = 0.8; + int maxDisp = 70; + int leftRightStep = 2; + + // Feature detection parameters + double adaptivity = 1.0; + int minThreshold = 10; + + // Load rectification data + fs::path calibration_file_path {ros::package::getPath("navigator_perception") + "/calibration.xml"}; + cout << calibration_file_path << endl; + if ( !boost::filesystem::exists( calibration_file_path ) ) // Check file exists + { + cout << "Can't find calibration file!" << std::endl; + throw "Can't find calibration file!"; + } + StereoRectification* rectification = NULL; + rectification = new StereoRectification(CalibrationResult(calibration_file_path.c_str())); + + // The stereo matcher. SSE Optimized implementation is only available for a 5x5 window + SparseStereo, short> stereo(maxDisp, 1, uniqueness, rectification, + false, false, leftRightStep); + + // Feature detectors for left and right image + FeatureDetector* leftFeatureDetector = new ExtendedFAST(true, minThreshold, adaptivity, true, 10); + FeatureDetector* rightFeatureDetector = new ExtendedFAST(false, minThreshold, adaptivity, true, 10); + + // Sparse Correspondences + vector correspondences; + + // Main stereo processing loop + ros::Rate loop_rate(10); // process images 10 times per second + cout << "\x1b[1;31mMain Loop!\x1b[0m" << endl; + while (ros::ok()) { + cout << "clock: " << microsec_clock::local_time() << endl; + cout << " The left Camera Subscriber is connected to " << left_image_sub.getNumPublishers() << " publishers." << endl; + cout << " The right Camera Subscriber is connected to " << right_image_sub.getNumPublishers() << " publishers." << endl; + cout << "active=" << (active? "true" : "false") << endl; + cout << "left_most_recent=" << left_most_recent << " right_most_recent=" << right_most_recent << endl; + // Idle if active is not set or valid image pointer pairs have not been retrieved + if (!active || (left_most_recent == nullptr) || (right_most_recent == nullptr)){ + ros::spinOnce(); // still handle ROS callbacks if skipping an iteration + loop_rate.sleep(); + cout << "Skipping this stereo pair for processing." << endl; + continue; + } + try { + // Containers + Mat raw_left, raw_right, draw_left, draw_right; + Mat_ leftImg, rightImg; + + // Thread Safety + cout << "Getting frame data" << endl; + left_mtx.lock(); + right_mtx.lock(); + { + // Get Frame Message data + namespace cvb = cv_bridge; + cout << "left_most_recent: " << left_most_recent << endl; + cout << "right_most_recent: " << right_most_recent << endl; + input_bridge = cvb::toCvCopy(left_most_recent, sensor_msgs::image_encodings::BGR8); + raw_left = input_bridge->image; + draw_left = raw_left.clone(); + // imshow("raw_left", raw_left); waitKey(0); + cout << "raw_left_size:" << raw_left.size() << endl; + input_bridge = cvb::toCvCopy(right_most_recent, sensor_msgs::image_encodings::BGR8); + raw_right = input_bridge->image; + draw_right = raw_right.clone(); + } + cout << "Got frame data" << endl; + left_mtx.unlock(); + right_mtx.unlock(); + + // Check images for data and adjust size + if(raw_left.data == NULL || raw_right.data == NULL) + throw sparsestereo::Exception("Unable to open input images!"); + cvtColor(raw_left, leftImg, CV_BGR2GRAY); + cvtColor(raw_right, rightImg, CV_BGR2GRAY); + + // Enforce approximate image synchronization + double left_stamp = left_most_recent_info->header.stamp.toSec(); + double right_stamp = right_most_recent_info->header.stamp.toSec(); + double sync_error = fabs(left_stamp - right_stamp); + stringstream sync_msg; + sync_msg << "Left and right images were not sufficiently synchronized" + << "\nsync error: " << sync_error << "s"; + if (sync_error > 0.3) { + ROS_WARN(sync_msg.str().c_str()); + cout << "skipping" << endl; + continue; + } + + // Objects for storing final and intermediate results + int rows = leftImg.rows; + int cols = leftImg.cols; + Mat_ charLeft(rows, cols); + Mat_ charRight(rows, cols); + Mat_ censusLeft(rows, cols); + Mat_ censusRight(rows, cols); + vector keypointsLeft, keypointsRight; + + ptime lastTime = microsec_clock::local_time(); // Time algorithm start time + + // Calculate census transforms for both images and detet features + // for both images in parallel + #pragma omp parallel sections default(shared) num_threads(2) + { + #pragma omp section + { + ImageConversion::unsignedToSigned(leftImg, &charLeft); + Census::transform5x5(charLeft, &censusLeft); + keypointsLeft.clear(); + leftFeatureDetector->detect(leftImg, keypointsLeft); + } + #pragma omp section + { + ImageConversion::unsignedToSigned(rightImg, &charRight); + Census::transform5x5(charRight, &censusRight); + keypointsRight.clear(); + rightFeatureDetector->detect(rightImg, keypointsRight); + } + } + + // Stereo matching and reconstruction + Point2f pt_L; + Point2f pt_R; + Matx31d pt_L_hom; + Matx31d pt_R_hom; + Mat X, X_hom; + Vec3b color; + + // Calculate correspondences in Blue channel + correspondences.clear(); + stereo.match(censusLeft, censusRight, keypointsLeft, keypointsRight, &correspondences); + + for(int i=0; i<(int)correspondences.size(); i++) { + + // Visualize feature points + Scalar rand_color = Scalar(rand() % 256, rand() % 256, rand() % 256); + rectangle(raw_left, pt_L - Point2f(2,2), pt_L + Point2f(2, 2), rand_color, CV_FILLED); + rectangle(raw_right, pt_R - Point2f(2,2), pt_R + Point2f(2, 2), rand_color, CV_FILLED); + + // Triangulate matches to reconstruct 3D point + pt_L = correspondences[i].imgLeft->pt; + pt_R = correspondences[i].imgRight->pt; + pt_L_hom = Matx31d(pt_L.x, pt_L.y, 1); + pt_R_hom = Matx31d(pt_R.x, pt_R.y, 1); + X_hom = nav::triangulate_Linear_LS(Mat(left_P), Mat(right_P), Mat(pt_L_hom), Mat(pt_R_hom)); + X_hom = X_hom / X_hom.at(3, 0); + + // Get Color and fill point_cloud element precursor + XYZRGB point; + point.x = X_hom.at(0, 0); + point.y = X_hom.at(1, 0); + point.z = X_hom.at(2, 0); + Vec3b pix_color = raw_left.at(pt_L); + cout << i << "ptL: " << pt_L << " ptR: " << pt_R << " Color: " << pix_color + << " " << X_hom.t() << endl; + point.b = pix_color[0]; + point.g = pix_color[1]; + point.r = pix_color[2]; + stereo_point_cloud.push_back(point); + } + + // Display image + Mat screen = Mat::zeros(Size(leftImg.cols*2, leftImg.rows), CV_8UC3); + raw_left.copyTo(screen(Rect(Point(0,0), leftImg.size()))); + raw_right.copyTo(screen(Rect(Point(leftImg.cols,0), leftImg.size()))); + namedWindow("Stereo"); + imshow("Stereo", screen); + waitKey(0); + + // Print statistics + time_duration elapsed = (microsec_clock::local_time() - lastTime); + cout << "Time for stereo matching: " << elapsed.total_microseconds()/1.0e6 << "s" << endl + << "Features detected in left image: " << keypointsLeft.size() << endl + << "Features detected in right image: " << keypointsRight.size() << endl + << "Percentage of matched features: " << (100.0 * correspondences.size() / keypointsLeft.size()) << "%" << endl; + + // Populate point cloud message header + sensor_msgs::PointCloud2 pt_cloud_msg; + header.seq = pt_cloud_seq++; + ros::Time frame_time_l = left_most_recent_info->header.stamp; + ros::Time frame_time_r = right_most_recent_info->header.stamp; + ros::Duration offset((frame_time_r - frame_time_l).toSec() / 2); + header.stamp = left_most_recent_info->header.stamp + offset; + header.frame_id = tf_frame_stereo_l; + pt_cloud_msg.header = header; + cout << "pcd header: " < iter_x(pt_cloud_msg, "x"); + sensor_msgs::PointCloud2Iterator iter_y(pt_cloud_msg, "y"); + sensor_msgs::PointCloud2Iterator iter_z(pt_cloud_msg, "z"); + sensor_msgs::PointCloud2Iterator iter_b(pt_cloud_msg, "b"); + sensor_msgs::PointCloud2Iterator iter_g(pt_cloud_msg, "g"); + sensor_msgs::PointCloud2Iterator iter_r(pt_cloud_msg, "r"); + + cout << "filling pcd2 msg " << endl; + for(size_t i = 0; i < stereo_point_cloud.size(); ++i, ++iter_x, ++iter_y, ++iter_z, ++iter_b, ++iter_g, ++iter_r) + { + *iter_x = stereo_point_cloud[i].x; + *iter_y = stereo_point_cloud[i].y; + *iter_z = stereo_point_cloud[i].z; + *iter_b = stereo_point_cloud[i].b; + *iter_g = stereo_point_cloud[i].g; + *iter_r = stereo_point_cloud[i].r; + cout << i << "XYZBGR: " << *iter_x << " " << *iter_y << " " << *iter_z << " " << int(*iter_b) << " " << int(*iter_g) << " " << + int(*iter_r) << endl; + } + + // Publish point cloud + pcl_pub.publish(pt_cloud_msg); + + // Empty point cloud container for upcoming iteration + cout << "PCD size: " << pt_cloud_msg.height * pt_cloud_msg.row_step + << ", " << stereo_point_cloud.size() << endl; + stereo_point_cloud.clear(); + + // Handle ROS callbacks + ros::spinOnce(); + } + catch (const std::exception& e) { + cerr << "Fatal exception during main loop: " << e.what(); + return -1; + } + loop_rate.sleep(); + } // end main while loop + return 0; + } // end large try block + catch (const std::exception& e) { + cerr << "Fatal exception during initialization: " << e.what(); + return -1; + } +} + +bool detection_activation_switch( + std_srvs::SetBool::Request &req, + std_srvs::SetBool::Response &resp) { + stringstream ros_log; + ros_log << "\x1b[1;31mSetting sereo point cloud generation to: \x1b[1;37m" + << (req.data ? "on" : "off") << "\x1b[0m"; + ROS_INFO(ros_log.str().c_str()); + active = req.data; + resp.success = true; + return true; +} + +void left_image_callback( + const sensor_msgs::ImageConstPtr &image_msg_ptr, + const sensor_msgs::CameraInfoConstPtr &info_msg_ptr) { + + // Get ptrs to most recent frame and info (thread safe) + left_mtx.lock(); + cout << "\x1b[1;31mleft callback!\x1b[0m" << endl; + left_most_recent = image_msg_ptr; + left_most_recent_info = info_msg_ptr; + left_mtx.unlock(); + + // Initialize camera parameters + static bool first_call = true; + if(first_call){ + left_cam_model.fromCameraInfo(left_most_recent_info); + left_P = left_cam_model.fullProjectionMatrix(); + tf_frame_stereo_l = left_cam_model.tfFrame(); + first_call = false; + } +} + +void right_image_callback( + const sensor_msgs::ImageConstPtr &image_msg_ptr, + const sensor_msgs::CameraInfoConstPtr &info_msg_ptr) { + + // Get ptrs to most recent frame and info (thread safe) + right_mtx.lock(); + cout << "\x1b[1;31mright callback!\x1b[0m" << endl; + right_most_recent = image_msg_ptr; + right_most_recent_info = info_msg_ptr; + right_mtx.unlock(); + + // Initialize camera parameters + static bool first_call = true; + if(first_call){ + right_cam_model.fromCameraInfo(right_most_recent_info); + right_P = right_cam_model.fullProjectionMatrix(); + right_P(0, 3) = -140; + cout << "right projection_matrix: " << right_P << endl; + tf_frame_stereo_r = right_cam_model.tfFrame(); + first_call = false; + } +} + +void cloud_callback (const sensor_msgs::PointCloud2ConstPtr& cloud_msg){ + ROS_INFO("velodyne_points callback"); + sensor_msgs::PointCloud2 colored_velodyne_pcd; + +} + diff --git a/perception/navigator_vision/package.xml b/perception/navigator_vision/package.xml new file mode 100644 index 00000000..40a0abe9 --- /dev/null +++ b/perception/navigator_vision/package.xml @@ -0,0 +1,35 @@ + + + navigator_vision + 1.0.0 + Nodes and libraries used for computer vision perception on Navigator + + tess + david + MIT + catkin + roscpp + roscpp + rospy + rospy + image_transport + image_transport + image_geometry + image_geometry + cv_bridge + cv_bridge + navigator_msgs + navigator_msgs + message_generation + message_runtime + std_msgs + std_msgs + geometry_msgs + geometry_msgs + sensor_msgs + sensor_msgs + libpcl-all + tf2_sensor_msgs + tf2_sensor_msgs + tf2_geometry_msgs + diff --git a/perception/navigator_vision/ros_tools/on_the_fly_thresholder.py b/perception/navigator_vision/ros_tools/on_the_fly_thresholder.py new file mode 100755 index 00000000..ebad11fe --- /dev/null +++ b/perception/navigator_vision/ros_tools/on_the_fly_thresholder.py @@ -0,0 +1,117 @@ +#!/usr/bin/python +from txros import util, NodeHandle +from twisted.internet import defer, reactor +from sensor_msgs.msg import Image, CompressedImage +from cv_bridge import CvBridge, CvBridgeError + +from navigator_tools import image_helpers +import numpy as np +import argcomplete +import sys +import argparse +import cv2 + + +class Trackbars(object): + ''' + Keeps track of the different colorspace trackbars + ''' + def __init__(self, thresh_type): + self.thresh_type = list(thresh_type) + [cv2.createTrackbar(name + '_high', 'parameters', 0, 255 if name != 'h' else 179, lambda a: a) for name in self.thresh_type] + [cv2.createTrackbar(name + '_low', 'parameters', 0, 255 if name != 'h' else 179, lambda a: a) for name in self.thresh_type] + + cv2.waitKey(10) + + def get_bounds(self): + upper_bounds = np.array([cv2.getTrackbarPos(name + '_high', 'parameters') for name in self.thresh_type]) + lower_bounds = np.array([cv2.getTrackbarPos(name + '_low', 'parameters') for name in self.thresh_type]) + return lower_bounds, upper_bounds + + +class Thresholder(object): + ''' + Does the thresholding and manages the windows associated with that + ''' + def __init__(self, img, thresh_type='bgr'): + cv2.namedWindow('parameters', cv2.WINDOW_NORMAL) + cv2.namedWindow('mask') + cv2.imshow('mask', np.zeros_like(img[:,:,0])) + + self.image = img + + self.thresh_type = thresh_type + self.trackbars = Trackbars(thresh_type) + + self.lower = None + self.upper = None + + def update_mask(self): + self.lower, self.upper = self.trackbars.get_bounds() + self.mask = cv2.inRange(self.image if self.thresh_type == 'bgr' else cv2.cvtColor(self.image, cv2.COLOR_BGR2HSV), + self.lower, self.upper) + cv2.imshow("mask", self.mask) + + def to_dict(self): + lower = map(float, self.lower) + upper = map(float, self.upper) + return {'color_space':self.thresh_type, 'ranges': {'lower': lower, 'upper': upper}, 'invert': False} + + +@util.cancellableInlineCallbacks +def main(param_prefix, args): + nh = yield NodeHandle.from_argv("on_the_fly_mission_runner", anonymous=True) + + image_sub = yield nh.subscribe(args.topic_name, Image) + img = yield util.wrap_timeout(image_sub.get_next_message(), 5).addErrback(errback) + + np_img = image_helpers.get_image_msg(img, "bgr8") + cv2.imshow(args.topic_name, np_img) + t = Thresholder(np_img, 'hsv' if args.hsv else 'bgr') + + k = 0 + while k != ord('q'): # q to quit without saving + t.update_mask() + k = cv2.waitKey(50) & 0xFF + + if k == ord('s'): # s to save parameters + print "Saving params:" + temp = t.to_dict() + print temp + nh.set_param(param_prefix, temp) + break + + cv2.destroyAllWindows() + reactor.stop() + + +def errback(e): + '''Just for catching errors''' + print "Error when looking for image." + print " - You probably entered the wrong image topic." + reactor.stop() + + +def do_parsing(): + usage_msg = "Useful for doing on-the-fly color thresholding." + desc_msg = "Pass the name of the topic to listen to and the parameter family to set." + + parser = argparse.ArgumentParser(usage=usage_msg, description=desc_msg) + parser.add_argument(dest='topic_name', + help="The topic name you'd like to listen to.") + parser.add_argument(dest='parameter_family', + help="This script will set the rosparams: \n\t `parameter_family`/bgr_high \n\t `parameter_family`/bgr_low \n OR \ + \n\t `parameter_family`/hsv_high \n\t `parameter_family`/hsv_high \n (depending on --hsv)") + parser.add_argument('--hsv', action='store_true', + help="Use HSV instead of BGR") + + args = parser.parse_args(sys.argv[1:]) + print 'Using {}'.format('HSV' if args.hsv else 'BGR') + + return args.parameter_family, args + + +if __name__ == '__main__': + param_prefix, args = do_parsing() + reactor.callWhenRunning(main, param_prefix, args) + reactor.run() diff --git a/perception/navigator_vision/setup.py b/perception/navigator_vision/setup.py new file mode 100644 index 00000000..29a9ec75 --- /dev/null +++ b/perception/navigator_vision/setup.py @@ -0,0 +1,11 @@ +# ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD + +from distutils.core import setup +from catkin_pkg.python_setup import generate_distutils_setup + +# fetch values from package.xml +setup_args = generate_distutils_setup( + packages=[''], +) + +setup(**setup_args) diff --git a/perception/navigator_vision/src/navigator_vision_lib/cv_utils.cc b/perception/navigator_vision/src/navigator_vision_lib/cv_utils.cc new file mode 100644 index 00000000..1787b747 --- /dev/null +++ b/perception/navigator_vision/src/navigator_vision_lib/cv_utils.cc @@ -0,0 +1,656 @@ +#include + +namespace nav { + +cv::Point contour_centroid(Contour &contour) { + cv::Moments m = cv::moments(contour, true); + cv::Point center(m.m10 / m.m00, m.m01 / m.m00); + return center; +} + +bool larger_contour(const Contour &c1, const Contour &c2) { + if (cv::contourArea(c1) > cv::contourArea(c2)) + return true; + else + return false; +} + +cv::MatND smooth_histogram(const cv::MatND &histogram, + size_t filter_kernel_size, float sigma) { + cv::MatND hist = histogram.clone(); + std::vector gauss_kernel = + generate_gaussian_kernel_1D(filter_kernel_size, sigma); + size_t histSize = hist.total(); + int offset = (filter_kernel_size - 1) / 2; + for (size_t i = offset; i < histSize - offset; + i++) // Convolve histogram values with gaussian kernel + { + int sum = 0; + int kernel_idx = 0; + for (int j = i - offset; j <= int(i + offset); j++) { + sum += (hist.at(j) * gauss_kernel[kernel_idx++]); + } + hist.at(i) = sum; + } + for (int i = 0; i < offset; ++i) // Pad filtered result with zeroes + { + hist.at(i) = 0; + hist.at(histSize - 1 - i) = 0; + } + return hist; +} + +std::vector generate_gaussian_kernel_1D(size_t kernel_size, + float sigma) { + std::vector kernel; + int middle_index = (kernel_size - 1) / 2; + int first_discrete_sample_x = -(middle_index); + for (int i = first_discrete_sample_x; i <= 0; i++) { + float power = -0.5 * (float(i) / sigma) * (float(i) / sigma); + kernel.push_back( + exp(power)); // From definition of Standard Normal Distribution + } + for (int i = 1; i <= middle_index; i++) { // Kernel is symmetric + kernel.push_back(kernel[middle_index - i]); + } + // Normalize kernel (sum of values should equal 1.0) + float sum = 0; + for (size_t i = 0; i < kernel_size; i++) { + sum += kernel[i]; + } + for (size_t i = 0; i < kernel_size; i++) { + kernel[i] /= sum; + } + return kernel; +} + +std::vector find_local_maxima(const cv::MatND &histogram, + float thresh_multiplier) { + + std::stringstream ros_log; + ros_log << "\x1b[1;31m" + << "find_local_maxima" + << "\x1b[0m" << std::endl; + + std::vector local_maxima, threshed_local_maxima; + float global_maximum = -std::numeric_limits::infinity(); + + // Locate local maxima and find global maximum + for (size_t idx = 1; idx < histogram.total() - 1; idx++) { + float current_value = histogram.at(idx); + if ((histogram.at(idx - 1) < current_value) && + (histogram.at(idx + 1) <= current_value)) { + local_maxima.push_back(cv::Point(idx, current_value)); + if (global_maximum < current_value) + global_maximum = current_value; + } + } + ros_log << "Maxima [x, y]:"; + for (size_t i = 0; i < local_maxima.size(); i++) { + if (local_maxima[i].y > global_maximum * thresh_multiplier) + threshed_local_maxima.push_back(local_maxima[i]); + if (i % 4 == 0) + ros_log << std::endl + << "\t"; + ros_log << "[" << std::setw(5) << local_maxima[i].x << "," << std::setw(5) + << local_maxima[i].y << "] "; + } + ros_log << std::endl + << "thresh: > global_maximum(" << global_maximum + << ") * thresh_multiplier(" << thresh_multiplier + << ") = " << (global_maximum * thresh_multiplier); + ros_log << std::endl + << "Threshed Maxima (x): "; + if (threshed_local_maxima.size() != local_maxima.size()) { + BOOST_FOREACH (cv::Point pt, threshed_local_maxima) { + ros_log << " " << pt.x << " "; + } + } else + ros_log << "same as 'Maxima'"; + ros_log << std::endl; +#ifdef SEGMENTATION_DEBUG + ROS_INFO(ros_log.str().c_str()); +#endif + return threshed_local_maxima; +} + +std::vector find_local_minima(const cv::MatND &histogram, + float thresh_multiplier) { + + std::stringstream ros_log; + ros_log << "\x1b[1;31m" + << "find_local_minima" + << "\x1b[0m" << std::endl; + + std::vector local_minima, threshed_local_minima; + float global_minimum = std::numeric_limits::infinity(); + ; + + // Locate local minima and find global minimum + for (size_t idx = 1; idx < histogram.total() - 1; idx++) { + float current_value = histogram.at(idx); + if ((histogram.at(idx - 1) >= current_value) && + (histogram.at(idx + 1) > current_value)) { + local_minima.push_back(cv::Point(idx, current_value)); + if (global_minimum > current_value) + global_minimum = current_value; + } + } + ros_log << "Minima [x, y]:"; + for (size_t i = 0; i < local_minima.size(); i++) { + if (local_minima[i].y < global_minimum * thresh_multiplier) + threshed_local_minima.push_back(local_minima[i]); + if (i % 4 == 0) + ros_log << std::endl + << "\t"; + ros_log << "[" << std::setw(5) << local_minima[i].x << "," << std::setw(5) + << local_minima[i].y << "] "; + } + ros_log << std::endl + << "thresh: < global_minimum(" << global_minimum + << ") * thresh_multiplier(" << thresh_multiplier + << ") = " << (global_minimum * thresh_multiplier); + ros_log << std::endl + << "Threshed Minima (x): "; + if (threshed_local_minima.size() != local_minima.size()) { + BOOST_FOREACH (cv::Point pt, threshed_local_minima) { + ros_log << " " << pt.x << " "; + } + } else + ros_log << "same as 'Minima'"; + ros_log << std::endl; +#ifdef SEGMENTATION_DEBUG + ROS_INFO(ros_log.str().c_str()); +#endif + return threshed_local_minima; +} + +unsigned int select_hist_mode(std::vector &histogram_modes, + int target) { + + std::stringstream ros_log; + ros_log << "\x1b[1;31m" + << "select_hist_mode" + << "\x1b[0m" << std::endl; + + std::vector distances; + BOOST_FOREACH (cv::Point mode, histogram_modes) { + distances.push_back(mode.x - target); + } + int min_idx = 0; + for (size_t i = 0; i < distances.size(); i++) { + if (std::abs(distances[i]) <= std::abs(distances[min_idx])) + min_idx = i; + } + if (histogram_modes.size() == 0) { + ros_log << "No modes could be generated" << std::endl; + ROS_INFO(ros_log.str().c_str()); + return 0; + } else + return histogram_modes[min_idx].x; +} + +void statistical_image_segmentation(const cv::Mat &src, cv::Mat &dest, + cv::Mat &debug_img, const int hist_size, + const float **ranges, const int target, + std::string image_name, bool ret_dbg_img, + const float sigma, + const float low_thresh_gain, + const float high_thresh_gain) { + std::stringstream ros_log; + ros_log << "\x1b[1;31m" + << "statistical_image_segmentation" + << "\x1b[0m" << std::endl; + + // Calculate histogram + cv::MatND hist, hist_smooth, hist_derivative; + cv::calcHist(&src, 1, 0, cv::Mat(), hist, 1, &hist_size, ranges, true, false); + + // Smooth histogram + const int kernel_size = 11; + hist_smooth = nav::smooth_histogram(hist, kernel_size, sigma); + + // Calculate histogram derivative (central finite difference) + hist_derivative = hist_smooth.clone(); + hist_derivative.at(0) = 0; + hist_derivative.at(hist_size - 1) = 0; + for (int i = 1; i < hist_size - 1; ++i) { + hist_derivative.at(i) = + (hist_smooth.at(i + 1) - hist_smooth.at(i - 1)) / 2.0; + } + hist_derivative = nav::smooth_histogram(hist_derivative, kernel_size, sigma); + + // Find target mode + std::vector histogram_modes = + nav::find_local_maxima(hist_smooth, 0.1); + int target_mode = nav::select_hist_mode(histogram_modes, target); + ros_log << "Target: " << target << std::endl; + ros_log << "Mode Selected: " << target_mode << std::endl; + + // Calculate std dev of histogram slopes + cv::Scalar hist_deriv_mean, hist_deriv_stddev; + cv::meanStdDev(hist_derivative, hist_deriv_mean, hist_deriv_stddev); + + // Determine thresholds for cv::inRange() using the std dev of histogram + // slopes times a gain as a cutoff heuristic + int high_abs_derivative_thresh = + std::abs(hist_deriv_stddev[0] * high_thresh_gain); + int low_abs_derivative_thresh = + std::abs(hist_deriv_stddev[0] * low_thresh_gain); + std::vector derivative_maxima = + nav::find_local_maxima(hist_derivative, 0.01); + std::vector derivative_minima = + nav::find_local_minima(hist_derivative, 0.01); + int high_thresh_search_start = target_mode; + int low_thresh_search_start = target_mode; + ros_log << "high_thresh_search_start: " << target_mode << std::endl; + ros_log << "Looking for the local minimum of the derivative of the histogram " + "immediately to the right of the selected mode." << std::endl; + + for (size_t i = 0; i < derivative_minima.size(); i++) { + ros_log << "\tderivative_minima[" << i << "].x = " << derivative_minima[i].x + << std::endl; + if (derivative_minima[i].x > target_mode) { + high_thresh_search_start = derivative_minima[i].x; + ros_log << "Done: The upper threshold will be no less than " + << high_thresh_search_start << std::endl; + break; + } + } + ros_log << "low_thresh_search_start: " << target_mode << std::endl; + ros_log << "Looking for the local maximum of the derivative of the histogram " + "immediately to the left of the selected mode." << std::endl; + for (int i = derivative_maxima.size() - 1; i >= 0; i--) { + ros_log << "\tderivative_maxima[" << i << "].x = " << derivative_maxima[i].x + << std::endl; + if (derivative_maxima[i].x < target_mode) { + low_thresh_search_start = derivative_maxima[i].x; + ros_log << "Done: The lower threshold will be no greater than " + << low_thresh_search_start << std::endl; + break; + } + } + int high_thresh = high_thresh_search_start; + int low_thresh = low_thresh_search_start; + ros_log << "high_deriv_thresh: " << hist_deriv_stddev[0] << " * " + << high_thresh_gain << " = " << high_abs_derivative_thresh + << std::endl; + ros_log << "abs(high_deriv_thresh) - abs(slope) = slope_error" << std::endl; + ros_log << "i: potential upper threshold" << std::endl; + ros_log << "Looking for first i such that slope_error >= 0" << std::endl; + for (int i = high_thresh_search_start; i < hist_size; i++) { + int abs_slope = std::abs(hist_derivative.at(i)); + ros_log << "\ti = " << i << " : " << high_abs_derivative_thresh << " - " + << abs_slope << " = " << high_abs_derivative_thresh - abs_slope + << std::endl; + if (abs_slope <= high_abs_derivative_thresh) { + high_thresh = i; + break; + } + } + ros_log << "high_thresh = " << high_thresh << std::endl; + ros_log << "low_deriv_thresh: " << hist_deriv_stddev[0] << " * " + << low_thresh_gain << " = " << low_abs_derivative_thresh << std::endl; + ros_log << "abs(low_deriv_thresh) - abs(slope) = slope_error" << std::endl; + ros_log << "i: potential lower threshold" << std::endl; + ros_log << "Looking for first i such that slope_error <= 0" << std::endl; + for (int i = low_thresh_search_start; i > 0; i--) { + int abs_slope = std::abs(hist_derivative.at(i)); + ros_log << "\ti = " << i << " : " << low_abs_derivative_thresh << " - " + << abs_slope << " = " << high_abs_derivative_thresh - abs_slope + << std::endl; + + if (abs_slope <= low_abs_derivative_thresh) { + low_thresh = i; + break; + } + } + + ros_log << "low_thresh = " << low_thresh << std::endl; + ros_log << "\x1b[1;37mTarget: " << target + << "\nClosest distribution mode: " << target_mode + << "\nThresholds selected: low=" << low_thresh + << " high=" << high_thresh << "\x1b[0m" << std::endl; + + // Threshold image + cv::inRange(src, low_thresh, high_thresh, dest); + +#ifdef SEGMENTATION_DEBUG + ROS_INFO(ros_log.str().c_str()); + cv::imshow("src" + image_name, src); + cv::waitKey(1); +#endif + + // Closing Morphology operation + int dilation_size = 2; + cv::Mat structuring_element = cv::getStructuringElement( + cv::MORPH_ELLIPSE, cv::Size(2 * dilation_size + 1, 2 * dilation_size + 1), + cv::Point(dilation_size, dilation_size)); + cv::dilate(dest, dest, structuring_element); + cv::erode(dest, dest, structuring_element); + + if (ret_dbg_img) { + + try { + // Prepare to draw graph of histogram and derivative + int hist_w = src.cols; + int hist_h = src.rows; + int bin_w = hist_w / hist_size; + cv::Mat histImage(hist_h, hist_w, CV_8UC1, cv::Scalar(0, 0, 0)); + cv::Mat histDerivImage(hist_h, hist_w, CV_8UC1, cv::Scalar(0, 0, 0)); + cv::normalize(hist_smooth, hist_smooth, 0, histImage.rows, + cv::NORM_MINMAX, -1, cv::Mat()); + cv::normalize(hist_derivative, hist_derivative, 0, histImage.rows, + cv::NORM_MINMAX, -1, cv::Mat()); + + // Draw Graphs + for (int i = 1; i < hist_size; i++) { + // Plot image histogram + cv::line( + histImage, + cv::Point(bin_w * (i - 1), + hist_h - cvRound(hist_smooth.at(i - 1))), + cv::Point(bin_w * (i), hist_h - cvRound(hist_smooth.at(i))), + cv::Scalar(255, 0, 0), 2, 8, 0); + // Plot image histogram derivative + cv::line(histDerivImage, + cv::Point(bin_w * (i - 1), + hist_h - cvRound(hist_derivative.at(i - 1))), + cv::Point(bin_w * (i), + hist_h - cvRound(hist_derivative.at(i))), + cv::Scalar(122, 0, 0), 1, 8, 0); + } + + // Shade in area being segmented under histogram curve + cv::line(histImage, + cv::Point(bin_w * low_thresh, + hist_h - cvRound(hist_smooth.at(low_thresh))), + cv::Point(bin_w * low_thresh, hist_h), cv::Scalar(255, 255, 0), + 2); + cv::line(histImage, + cv::Point(bin_w * high_thresh, + hist_h - cvRound(hist_smooth.at(high_thresh))), + cv::Point(bin_w * high_thresh, hist_h), cv::Scalar(255, 255, 0), + 2); + cv::floodFill( + histImage, + cv::Point(bin_w * cvRound(float(low_thresh + high_thresh) / 2.0), + hist_h - 1), + cv::Scalar(155)); + + // Combine graphs into one image and display results + cv::Mat segmentation_channel = cv::Mat::zeros(histImage.size(), CV_8UC1); + std::vector debug_img_channels; + debug_img_channels.push_back(histImage); + debug_img_channels.push_back(histDerivImage); + debug_img_channels.push_back(dest * 0.25); + cv::merge(debug_img_channels, debug_img); + cv::Point text_upper_left(debug_img.cols / 2.0, debug_img.rows / 10.0); + std::string text_ln1 = image_name; + std::stringstream text_ln2; + text_ln2 << "low = " << low_thresh; + std::stringstream text_ln3; + text_ln3 << "high = " << high_thresh; + int font = cv::FONT_HERSHEY_SIMPLEX; + double font_scale = 0.0015 * debug_img.rows; + cv::Point vert_offset = cv::Point(0, debug_img.rows / 15.0); + cv::Scalar text_color(255, 255, 0); + cv::putText(debug_img, text_ln1, text_upper_left, font, font_scale, + text_color); + cv::putText(debug_img, text_ln2.str(), text_upper_left + vert_offset, + font, font_scale, text_color); + cv::putText(debug_img, text_ln3.str(), + text_upper_left + vert_offset + vert_offset, font, font_scale, + text_color); + } catch (std::exception &e) { + ROS_INFO(e.what()); + } + } +} + +cv::Mat triangulate_Linear_LS(cv::Mat mat_P_l, cv::Mat mat_P_r, + cv::Mat undistorted_l, cv::Mat undistorted_r) { + cv::Mat A(4, 3, CV_64FC1), b(4, 1, CV_64FC1), X(3, 1, CV_64FC1), + X_homogeneous(4, 1, CV_64FC1), W(1, 1, CV_64FC1); + W.at(0, 0) = 1.0; + A.at(0, 0) = + (undistorted_l.at(0, 0) / undistorted_l.at(2, 0)) * + mat_P_l.at(2, 0) - + mat_P_l.at(0, 0); + A.at(0, 1) = + (undistorted_l.at(0, 0) / undistorted_l.at(2, 0)) * + mat_P_l.at(2, 1) - + mat_P_l.at(0, 1); + A.at(0, 2) = + (undistorted_l.at(0, 0) / undistorted_l.at(2, 0)) * + mat_P_l.at(2, 2) - + mat_P_l.at(0, 2); + A.at(1, 0) = + (undistorted_l.at(1, 0) / undistorted_l.at(2, 0)) * + mat_P_l.at(2, 0) - + mat_P_l.at(1, 0); + A.at(1, 1) = + (undistorted_l.at(1, 0) / undistorted_l.at(2, 0)) * + mat_P_l.at(2, 1) - + mat_P_l.at(1, 1); + A.at(1, 2) = + (undistorted_l.at(1, 0) / undistorted_l.at(2, 0)) * + mat_P_l.at(2, 2) - + mat_P_l.at(1, 2); + A.at(2, 0) = + (undistorted_r.at(0, 0) / undistorted_r.at(2, 0)) * + mat_P_r.at(2, 0) - + mat_P_r.at(0, 0); + A.at(2, 1) = + (undistorted_r.at(0, 0) / undistorted_r.at(2, 0)) * + mat_P_r.at(2, 1) - + mat_P_r.at(0, 1); + A.at(2, 2) = + (undistorted_r.at(0, 0) / undistorted_r.at(2, 0)) * + mat_P_r.at(2, 2) - + mat_P_r.at(0, 2); + A.at(3, 0) = + (undistorted_r.at(1, 0) / undistorted_r.at(2, 0)) * + mat_P_r.at(2, 0) - + mat_P_r.at(1, 0); + A.at(3, 1) = + (undistorted_r.at(1, 0) / undistorted_r.at(2, 0)) * + mat_P_r.at(2, 1) - + mat_P_r.at(1, 1); + A.at(3, 2) = + (undistorted_r.at(1, 0) / undistorted_r.at(2, 0)) * + mat_P_r.at(2, 2) - + mat_P_r.at(1, 2); + b.at(0, 0) = + -((undistorted_l.at(0, 0) / undistorted_l.at(2, 0)) * + mat_P_l.at(2, 3) - + mat_P_l.at(0, 3)); + b.at(1, 0) = + -((undistorted_l.at(1, 0) / undistorted_l.at(2, 0)) * + mat_P_l.at(2, 3) - + mat_P_l.at(1, 3)); + b.at(2, 0) = + -((undistorted_r.at(0, 0) / undistorted_r.at(2, 0)) * + mat_P_r.at(2, 3) - + mat_P_r.at(0, 3)); + b.at(3, 0) = + -((undistorted_r.at(1, 0) / undistorted_r.at(2, 0)) * + mat_P_r.at(2, 3) - + mat_P_r.at(1, 3)); + solve(A, b, X, cv::DECOMP_SVD); + vconcat(X, W, X_homogeneous); + return X_homogeneous; +} + +Eigen::Vector3d kanatani_triangulation(const cv::Point2d &pt1, + const cv::Point2d &pt2, + const Eigen::Matrix3d &essential, + const Eigen::Matrix3d &R) { + /* + K. Kanatani, Y. Sugaya, and H. Niitsuma. Triangulation from two views + revisited: Hartley-Sturm vs. optimal + correction. In British Machine Vision Conference, page 55, 2008. + */ + // std::cout << "ptL_noisy: " << pt1 << " ptR_noisy: " << pt2 << std::endl; + const unsigned int max_iterations = 7; + Eigen::Vector3d p1_old(pt1.x, pt1.y, 1.0); + Eigen::Vector3d p2_old(pt2.x, pt2.y, 1.0); + const Eigen::Vector3d p1_0(pt1.x, pt1.y, 1.0); + const Eigen::Vector3d p2_0(pt2.x, pt2.y, 1.0); + Eigen::Vector3d p1, p2; + Eigen::Vector2d n1, n2, delta_p1, delta_p2, delta_p1_old, delta_p2_old; + delta_p1_old << 0.0, 0.0; + delta_p2_old << 0.0, 0.0; + Eigen::Matrix S; + S << 1, 0, 0, 0, 1, 0; + Eigen::Matrix2d essential_bar = essential.topLeftCorner(2, 2); + double lambda; + for (unsigned int i = 0; i < max_iterations; i++) { + n1 = S * (essential * p2_old); + n2 = S * (essential.transpose() * p1_old); + lambda = ((p1_0.transpose() * essential * p2_0)(0) - + (delta_p1_old.transpose() * essential_bar * delta_p2_old)(0)) / + (n1.transpose() * n1 + n2.transpose() * n2)(0); + delta_p1 = lambda * n1; + delta_p2 = lambda * n2; + p1 = p1_0 - (S.transpose() * delta_p1); + p2 = p2_0 - (S.transpose() * delta_p2); + p1_old = p1; + p2_old = p2; + delta_p1_old = delta_p1; + delta_p2_old = delta_p2; + // std::cout << "ptL_est: [" << p1.transpose() << "] ptR_est: [" << + // p2.transpose() << "]" << std::endl; + } + Eigen::Vector3d z = p1.cross(R * p2); + Eigen::Vector3d X = + ((z.transpose() * (essential * p2))(0) / (z.transpose() * z)(0)) * p1; + return X; +} + +Eigen::Vector3d lindstrom_triangulation(const cv::Point2d &pt1, + const cv::Point2d &pt2, + const Eigen::Matrix3d &essential, + const Eigen::Matrix3d &R) { + /* + Optimal triangulation method for two cameras with parallel principal + axes + Based of off this paper by Peter Lindstrom: + https://e-reports-ext.llnl.gov/pdf/384387.pdf **Listing 2** + */ + // std::cout << "ptL_noisy: " << pt1 << " ptR_noisy: " << pt2 << std::endl; + const unsigned int max_iterations = 7; + Eigen::Vector3d p1_old(pt1.x, pt1.y, 1.0); + Eigen::Vector3d p2_old(pt2.x, pt2.y, 1.0); + const Eigen::Vector3d p1_0(pt1.x, pt1.y, 1.0); + const Eigen::Vector3d p2_0(pt2.x, pt2.y, 1.0); + Eigen::Vector3d p1, p2; + Eigen::Vector2d n1, n2, delta_p1, delta_p2; + Eigen::Matrix S; + S << 1, 0, 0, 0, 1, 0; + Eigen::Matrix2d essential_bar = essential.topLeftCorner(2, 2); + double a, b, c, d, lambda; + c = p1_0.transpose() * (essential * p2_0); + for (unsigned int i = 0; i < max_iterations; i++) { + n1 = S * (essential * p2_old); + n2 = S * (essential.transpose() * p1_old); + a = n1.transpose() * (essential_bar * n2); + b = (0.5 * ((n1.transpose() * n1) + (n2.transpose() * n2)))(0); + d = sqrt(b * b - a * c); + double signum_b = (b > 0) ? 1 : ((b < 0) ? -1 : 0); + lambda = c / (b + signum_b * d); + delta_p1 = lambda * n1; + delta_p2 = lambda * n2; + p1 = p1_0 - (S.transpose() * delta_p1); + p2 = p2_0 - (S.transpose() * delta_p2); + p1_old = p1; + p2_old = p2; + // std::cout << "ptL_est: [" << p1.transpose() << "] ptR_est: [" << + // p2.transpose() << "]" << std::endl; + } + Eigen::Vector3d z = p1.cross(R * p2); + Eigen::Vector3d X = + ((z.transpose() * (essential * p2))(0) / (z.transpose() * z)(0)) * p1; + return X; +} + +ImageWithCameraInfo::ImageWithCameraInfo( + sensor_msgs::ImageConstPtr _image_msg_ptr, + sensor_msgs::CameraInfoConstPtr _info_msg_ptr) + : image_msg_ptr(_image_msg_ptr), info_msg_ptr(_info_msg_ptr), + image_time(_image_msg_ptr->header.stamp) {} + +FrameHistory::FrameHistory(std::string img_topic, unsigned int hist_size) + : topic_name(img_topic), history_size(hist_size), _image_transport(nh), + frame_count(0) { + std::stringstream console_msg; + console_msg << "[FrameHistory] size set to " << history_size << std::endl + << "\tSubscribing to image topic: " << topic_name; + ROS_INFO(console_msg.str().c_str()); + _image_sub = _image_transport.subscribeCamera( + img_topic, 1, &FrameHistory::image_callback, this); + if (_image_sub.getNumPublishers() == 0) { + std::stringstream error_msg; + error_msg << "[FrameHistory] no publishers currently publishing to " + << topic_name; + ROS_WARN(error_msg.str().c_str()); + } +} + +FrameHistory::~FrameHistory() { + std::stringstream console_msg; + console_msg << "[FrameHistory] Unsubscribed from image topic: " << topic_name + << std::endl + << "[FrameHistory] Deleting FrameHistory object" << std::endl; + ROS_INFO(console_msg.str().c_str()); +} + +void FrameHistory::image_callback( + const sensor_msgs::ImageConstPtr &image_msg, + const sensor_msgs::CameraInfoConstPtr &info_msg) { + /** + Adds an ImageWithCameraInfo object to the frame history ring buffer + */ + ImageWithCameraInfo current_frame(image_msg, info_msg); + bool full = _frame_history_ring_buffer.size() >= history_size; + std::stringstream debug_msg; + debug_msg << "Adding frame to ring buffer " + << "[frame=" << frame_count << "," + << "full=" << (full ? "true" : "false") + << ",frames_available=" << _frame_history_ring_buffer.size() << "]" + << std::endl; + ROS_DEBUG(debug_msg.str().c_str()); + if (!full) { + _frame_history_ring_buffer.push_back(current_frame); + } else { + _frame_history_ring_buffer[frame_count % history_size] = current_frame; + } + frame_count++; +} + +std::vector +FrameHistory::get_frame_history(unsigned int frames_requested) { + /** + Returns a vector with the last ImageWithCameraInfo + objects + */ + std::vector frame_history; + std::vector sorted_frame_history = + _frame_history_ring_buffer; + if (_frame_history_ring_buffer.size() < frames_requested) { + ROS_WARN("get_frame_history(%d): %d frames were requested, but there are " + "%d frames available", + frames_requested, frames_requested, + _frame_history_ring_buffer.size()); + } else { + std::sort(sorted_frame_history.begin(), sorted_frame_history.end()); + for (size_t i = 0; i < frames_requested; i++) { + frame_history.push_back(sorted_frame_history[i]); + if (i == frames_requested - 1) + break; + } + } + return frame_history; +} + +} // namespace nav diff --git a/perception/navigator_vision/src/navigator_vision_lib/point_cloud_algorithms.cc b/perception/navigator_vision/src/navigator_vision_lib/point_cloud_algorithms.cc new file mode 100644 index 00000000..6f3263de --- /dev/null +++ b/perception/navigator_vision/src/navigator_vision_lib/point_cloud_algorithms.cc @@ -0,0 +1,142 @@ +#include + +using namespace std; +using namespace cv; + +namespace nav{ + +Mat g_color_sequence; + +PcdColorizer::PcdColorizer(ros::NodeHandle nh, string input_pcd_topic, string output_pcd_topic, string rgb_cam_topic, string rgb_cam_frame) +{ + this->nh = nh; + this->input_pcd_topic = input_pcd_topic; + this->output_pcd_topic = output_pcd_topic; + this->rgb_cam_topic = rgb_cam_topic; + this->rgb_cam_frame = rgb_cam_frame; + + cloud_sub = nh.subscribe(input_pcd_topic, 10, &PcdColorizer::cloud_cb, this); + cloud_pub = nh.advertise(output_pcd_topic, 10, false); + rgb_cam_sub = img_transport.subscribeCamera(rgb_cam_topic, 10, + [this](const sensor_msgs::ImageConstPtr &image_msg_ptr, const sensor_msgs::CameraInfoConstPtr &info_msg_ptr) + { + this->latest_frame_img_msg = image_msg_ptr; + this->latest_frame_info_msg = info_msg_ptr; + if(!_intrinsics_set){ + auto K = info_msg_ptr->K; + cam_intrinsics << K[0], K[1], K[2], + K[3], K[4], K[5], + K[6], K[7], K[8]; + _intrinsics_set = true; + } + } + ); +} + +void PcdColorizer::cloud_cb(const PCD &cloud_msg){ + input_pcd = cloud_msg; + _transform_to_cam(); + _color_pcd(); + cloud_pub.publish(output_pcd); + seq++; +} + +void PcdColorizer::_transform_to_cam(){ + tf::StampedTransform vel_to_cam_tf; + ros::Time vel_cloud_stamp = input_pcd.header.stamp; + if(ros::ok()){ + try{ + // tf_listener.lookupTransform(input_pcd.header.frame_id, rgb_cam_frame, vel_cloud_stamp, vel_to_cam_tf); + tf_listener.lookupTransform(rgb_cam_frame, input_pcd.header.frame_id, vel_cloud_stamp, vel_to_cam_tf); + } + catch(std::exception &e){ + cout << "Unable to look up tf between pcd and rgb camera. Error: " << e.what() << endl; + } + } + pcl_ros::transformPointCloud(rgb_cam_frame, vel_to_cam_tf, input_pcd, transformed_pcd); + // pcl_ros::transformPointCloud(rgb_cam_frame, input_pcd, pcd_cam, tf_listener); + +} + +void PcdColorizer::_color_pcd(){ + // confgure output_pcd header + output_pcd = PCD(); + output_pcd.header.stamp = input_pcd.header.stamp; + output_pcd.header.seq = seq; + output_pcd.header.frame_id = rgb_cam_frame; + // cout << "OUT CLOUD frame:" << rgb_cam_frame << endl; + output_pcd.height = 1; + output_pcd.width = input_pcd.width; + output_pcd.is_bigendian = false; + output_pcd.is_dense = false; + + // Create classes to iterate over and modify clouds + sensor_msgs::PointCloud2Iterator iter_x_input{transformed_pcd, "x"}; + sensor_msgs::PointCloud2Modifier output_pcd_modifier{output_pcd}; + output_pcd_modifier.setPointCloud2FieldsByString(2, "xyz", "rgb"); + sensor_msgs::PointCloud2Iterator iter_x{output_pcd, "x"}; + sensor_msgs::PointCloud2Iterator iter_y{output_pcd, "y"}; + sensor_msgs::PointCloud2Iterator iter_z{output_pcd, "z"}; + sensor_msgs::PointCloud2Iterator iter_b(output_pcd, "b"); + sensor_msgs::PointCloud2Iterator iter_g(output_pcd, "g"); + sensor_msgs::PointCloud2Iterator iter_r(output_pcd, "r"); + // cout << "iter_x_output: " << iter_x_output.data_ << endl; + // cout << "iter_rgb_output: " << iter_rgb_output.data_ << endl; + + // Latest frame + Mat latest_frame_mat; + cout << "latest_frame_ptr:" << latest_frame_img_msg << endl; + if(latest_frame_img_msg == nullptr) + return; + // cout << "Getting frame:" << latest_frame_img_msg << endl; + cv_bridge::CvImagePtr input_bridge = cv_bridge::toCvCopy(latest_frame_img_msg, sensor_msgs::image_encodings::BGR8); + // cout << "extracting frame" << endl; + latest_frame_mat = input_bridge->image; + // cout << "There is no god!" << endl; + cout << "K: " << cam_intrinsics << endl; + + + + // lambda for checking if point is visible from camera + auto in_view = [=](Eigen::Vector3f v) + {return v[0] >= 0 && v[0] < latest_frame_mat.cols && v[1] >= 0 && v[1] < latest_frame_mat.rows;}; + + while(iter_x_input != iter_x_input.end()) + { + Eigen::Vector3f xyz; + xyz << iter_x_input[0], iter_x_input[1], iter_x_input[2]; + Eigen::Vector3f hom_img_coordinates; + hom_img_coordinates = cam_intrinsics * xyz; // Project point in cam frame into image plane + hom_img_coordinates = hom_img_coordinates / hom_img_coordinates[2]; // normalize homogenous vector + Vec3b rgb; + if(in_view(hom_img_coordinates)){ + // cout << "hom_coords: " << hom_img_coordinates << " cols: " << latest_frame_mat.cols << " rows: " + // << latest_frame_mat.rows << endl; + rgb = latest_frame_mat.at(hom_img_coordinates[1], hom_img_coordinates[0]); + } + else + { + rgb[0] = 122; + rgb[1] = 122; + rgb[2] = 122; + } + + // Set the values of the fields of the colored output cloud + *iter_x = xyz[0]; + *iter_y = xyz[1]; + *iter_z = xyz[2]; + *iter_b = rgb[0]; + *iter_g = rgb[1]; + *iter_r = rgb[2]; + + ++iter_x_input; + ++iter_x; + ++iter_y; + ++iter_z; + ++iter_r; + ++iter_g; + ++iter_b; + } +} + +} //namespace nav \ No newline at end of file diff --git a/perception/navigator_vision/src/navigator_vision_lib/visualization.cc b/perception/navigator_vision/src/navigator_vision_lib/visualization.cc new file mode 100644 index 00000000..f0903073 --- /dev/null +++ b/perception/navigator_vision/src/navigator_vision_lib/visualization.cc @@ -0,0 +1,151 @@ +#include +#include + +namespace nav { + +RvizVisualizer::RvizVisualizer(std::string rviz_topic) { + rviz_pub = nh.advertise(rviz_topic, 1); + ros::spinOnce(); + // Give these guys some time to get ready + ros::Duration(0.5).sleep(); +} + +void RvizVisualizer::visualize_buoy(geometry_msgs::Pose& pose, std::string& frame_id) { + visualization_msgs::Marker marker; + + // Generate sphere marker for the buoy + marker.header.frame_id = frame_id; + marker.header.stamp = ros::Time(); + marker.ns = "sub"; + marker.id = 0; + marker.type = visualization_msgs::Marker::SPHERE; + marker.action = visualization_msgs::Marker::ADD; + marker.pose = pose; + marker.scale.x = 0.2; + marker.scale.y = 0.2; + marker.scale.z = 0.2; + marker.color.a = 1.0; + marker.color.r = 0.0; + marker.color.g = 1.0; + marker.color.b = 0.0; + marker.lifetime = ros::Duration(); + rviz_pub.publish(marker); + + // Generate text to overlay on the buoy (TODO: Put the text offset from the buoy) + marker.header.stamp = ros::Time(); + marker.ns = "sub"; + marker.id = 1; + marker.type = visualization_msgs::Marker::TEXT_VIEW_FACING; + marker.action = visualization_msgs::Marker::ADD; + marker.pose.position.x = pose.position.x; + // y is down in computer vision world + marker.pose.position.y = pose.position.y - 0.25; + marker.pose.position.z = pose.position.z; + // Orientation doesn't matter for this guy + marker.scale.x = 0.2; + marker.scale.y = 0.2; + marker.scale.z = 0.2; + + marker.color.a = 1.0; + marker.color.r = 1.0; + marker.color.g = 0.0; + marker.color.b = 0.0; + marker.lifetime = ros::Duration(); + marker.text = "Buoy Bump Target"; + rviz_pub.publish(marker); +} + +void RvizVisualizer::visualize_torpedo_board(geometry_msgs::Pose& pose, Eigen::Quaterniond orientation, + std::vector& targets, std::vector& corners3d, + std::string& frame_id) { + visualization_msgs::Marker centroid, target_markers, text, borders; + centroid.header.frame_id = target_markers.header.frame_id = text.header.frame_id = borders.header.frame_id = frame_id; + centroid.header.stamp = target_markers.header.stamp = text.header.stamp = borders.header.stamp = ros::Time::now(); + centroid.ns = target_markers.ns = text.ns = borders.ns = "torpedo_board"; + centroid.lifetime = target_markers.lifetime = text.lifetime = borders.lifetime = ros::Duration(); + + // Generate sphere marker for the torpedo_board centroid + centroid.id = 0; + centroid.type = visualization_msgs::Marker::SPHERE; + centroid.action = visualization_msgs::Marker::ADD; + centroid.pose = pose; + centroid.scale.x = 0.1; + centroid.scale.y = 0.1; + centroid.scale.z = 0.1; + centroid.color.a = 1.0; + centroid.color.r = 0.0; + centroid.color.g = 1.0; + centroid.color.b = 0.0; + // centroid.pose.orientation.x = orientation.x(); + // centroid.pose.orientation.y = orientation.y(); + // centroid.pose.orientation.z = orientation.z(); + // centroid.pose.orientation.w = orientation.w(); + rviz_pub.publish(centroid); + + // Generate markers denoting the centers of individual targets + target_markers.id = 1; + target_markers.type = visualization_msgs::Marker::SPHERE_LIST; + target_markers.action = visualization_msgs::Marker::ADD; + geometry_msgs::Point p; + for(size_t i = 0; i < targets.size(); i++){ + p.x = targets[i](0); + p.y = targets[i](1); + p.z = targets[i](2); + target_markers.points.push_back(p); + } + target_markers.scale.x = 0.1; + target_markers.scale.y = 0.1; + target_markers.scale.z = 0.1; + target_markers.color.a = 1.0; + target_markers.color.r = 0.0; + target_markers.color.g = 1.0; + target_markers.color.b = 0.0; + // rviz_pub.publish(target_markers); + + // Generate text to overlay on the torpedo_board centroid + text.id = 2; + text.type = visualization_msgs::Marker::TEXT_VIEW_FACING; + text.action = visualization_msgs::Marker::ADD; + text.pose.position.x = pose.position.x; + text.pose.position.y = pose.position.y - 0.25; + text.pose.position.z = pose.position.z; + // Orientation doesn't matter for this guy + text.scale.x = 0.1; + text.scale.y = 0.1; + text.scale.z = 0.1; + text.color.a = 1.0; + text.color.r = 1.0; + text.color.g = 1.0; + text.color.b = 0.0; + text.text = "Torpedo Board Centroid"; + rviz_pub.publish(text); + + // Generate border line markers + borders.id = 3; + borders.type = visualization_msgs::Marker::LINE_STRIP; + borders.action = visualization_msgs::Marker::ADD; + borders.pose.orientation.w = 1.0; + for(size_t i = 0; i <= corners3d.size(); i++){ + if(i == corners3d.size()){ + p.x = corners3d[0](0); + p.y = corners3d[0](1); + p.z = corners3d[0](2); + } + else{ + p.x = corners3d[i](0); + p.y = corners3d[i](1); + p.z = corners3d[i](2); + } + borders.points.push_back(p); + } + borders.scale.x = 0.05; + borders.scale.y = 0.05; + borders.scale.z = 0.05; + borders.color.a = 1.0; + borders.color.r = 1.0; + borders.color.g = 1.0; + borders.color.b = 0.0; + rviz_pub.publish(borders); +} + +} // End namespace "sub" From 5dcf14024446c254b0c31c8e4e1e6915a116ef33 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Thu, 27 Oct 2016 00:32:23 -0400 Subject: [PATCH 104/267] TOOLS: Add 'title' param to print_t --- .../navigator_tools/general_helpers.py | 30 +++++++++++++++---- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/general_helpers.py b/utils/navigator_tools/navigator_tools/general_helpers.py index 792f34c9..0da897f3 100644 --- a/utils/navigator_tools/navigator_tools/general_helpers.py +++ b/utils/navigator_tools/navigator_tools/general_helpers.py @@ -1,12 +1,32 @@ import rospy -def print_t(_str, time=None): - # Needs a rospy init for ros time +def print_t(_str, time=None, title=None): + time_header = None + title_header = False + + if title is not None: + title_header = "\033[94m\033[1m{}\033[0m".format(title) + if time is None: try: time = rospy.Time.now().to_sec() - print "\033[1m{}:\033[0m {}".format(time, _str) + time_header = "\033[1m{}\033[0m".format(time) except rospy.exceptions.ROSInitException: - print _str + pass else: - print "\033[1m{}:\033[0m {}".format(time, _str) + time_header = "\033[1m{}\033[0m".format(time) + + if title_header and time_header: + # (TIME) HEADER: message + to_print = "{time} {title}: {msg}" + elif time_header: + # (TIME): message + to_print = "{time}: {msg}" + elif title_header: + # HEADER: message + to_print = "{title}: {msg}" + else: + # message + to_print = "{msg}" + + print to_print.format(time=time_header, title=title_header, msg=_str) \ No newline at end of file From d6ad3b4dc26a98afc6f8bae5279595c88ed0069e Mon Sep 17 00:00:00 2001 From: mattlangford Date: Thu, 27 Oct 2016 15:05:08 -0400 Subject: [PATCH 105/267] TOOLS: Add color printing features to fprint, transition mission components over to fprint --- .../navigator_tools/general_helpers.py | 38 ++++++++++++++++--- 1 file changed, 32 insertions(+), 6 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/general_helpers.py b/utils/navigator_tools/navigator_tools/general_helpers.py index 0da897f3..12021761 100644 --- a/utils/navigator_tools/navigator_tools/general_helpers.py +++ b/utils/navigator_tools/navigator_tools/general_helpers.py @@ -1,20 +1,40 @@ import rospy -def print_t(_str, time=None, title=None): - time_header = None +class Colors(): + # Some cool stuff could happen here + red = '\033[91m' + blue = '\033[94m' + green = '\033[92m' + bold = '\033[1m' + + reset = '\033[0m' + + def __getattr__(self, arg): + # If we get a non existent color, return the reset color + if hasattr(self, arg.lower()): + return getattr(self, arg.lower()) + + return self.reset + + +def fprint(msg, time=None, title=None, newline=True, msg_color=None): + time_header = False title_header = False if title is not None: - title_header = "\033[94m\033[1m{}\033[0m".format(title) + title_header = "{C.blue}{C.bold}{title}{C.reset}".format(C=Colors, title=title) + + if msg_color is not None: + msg = "{color}{msg}{C.reset}".format(color=getattr(Colors(), msg_color), C=Colors, msg=msg) if time is None: try: time = rospy.Time.now().to_sec() - time_header = "\033[1m{}\033[0m".format(time) + time_header = "{C.bold}{time}{C.reset}".format(C=Colors, time=time) except rospy.exceptions.ROSInitException: pass else: - time_header = "\033[1m{}\033[0m".format(time) + time_header = "{C.bold}{time}{C.reset}".format(C=Colors, time=time) if title_header and time_header: # (TIME) HEADER: message @@ -29,4 +49,10 @@ def print_t(_str, time=None, title=None): # message to_print = "{msg}" - print to_print.format(time=time_header, title=title_header, msg=_str) \ No newline at end of file + if newline: + print to_print.format(time=time_header, title=title_header, msg=msg) + else: + # Note, this adds a space at the end. + print to_print.format(time=time_header, title=title_header, msg=msg), + +print_t = fprint # For legacy \ No newline at end of file From 8dca2f8a733a2de3cd8b9149806f9820a969b83a Mon Sep 17 00:00:00 2001 From: mattlangford Date: Sat, 29 Oct 2016 14:59:25 -0400 Subject: [PATCH 106/267] FPRINT: Add features to fprint and incorporate in mission runner --- .../navigator_tools/general_helpers.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/utils/navigator_tools/navigator_tools/general_helpers.py b/utils/navigator_tools/navigator_tools/general_helpers.py index 12021761..5abf827c 100644 --- a/utils/navigator_tools/navigator_tools/general_helpers.py +++ b/utils/navigator_tools/navigator_tools/general_helpers.py @@ -16,6 +16,18 @@ def __getattr__(self, arg): return self.reset +class Seperator(): + # TODO + def __getattr__(self, *args, **kwargs): + return + + def equals(self, _len, blank_space=False): + text = "{}".format("=" * len) + if blank_space: + return "\n{}\n".format(text) + + return text + def fprint(msg, time=None, title=None, newline=True, msg_color=None): time_header = False @@ -50,6 +62,8 @@ def fprint(msg, time=None, title=None, newline=True, msg_color=None): to_print = "{msg}" if newline: + if isinstance(newline, int): + msg += "\n" * (newline - 1) # Since printing implicitly adds a new line print to_print.format(time=time_header, title=title_header, msg=msg) else: # Note, this adds a space at the end. From fa24cbf3b4d29e95e870eebd303dd61d46e94047 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Sat, 29 Oct 2016 17:48:26 -0400 Subject: [PATCH 107/267] MISSIONS: Add failure check to move.go() --- utils/navigator_tools/navigator_tools/general_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/navigator_tools/navigator_tools/general_helpers.py b/utils/navigator_tools/navigator_tools/general_helpers.py index 5abf827c..93ad057b 100644 --- a/utils/navigator_tools/navigator_tools/general_helpers.py +++ b/utils/navigator_tools/navigator_tools/general_helpers.py @@ -62,7 +62,7 @@ def fprint(msg, time=None, title=None, newline=True, msg_color=None): to_print = "{msg}" if newline: - if isinstance(newline, int): + if not isinstance(newline, bool): msg += "\n" * (newline - 1) # Since printing implicitly adds a new line print to_print.format(time=time_header, title=title_header, msg=msg) else: From 49d9d287d7947a0dae024e5ea9cc796eae0f0f3a Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Sun, 30 Oct 2016 10:48:43 -0400 Subject: [PATCH 108/267] DETECT DELIVER: perception services/topics are relative, roi fixed for new cam --- .../nodes/camera_to_lidar/CameraLidarTransformer.cpp | 10 ++++++---- .../GrayscaleContour/GrayscaleContour.cpp | 4 ++-- .../nodes/shape_identification/ShapeTracker.cpp | 6 ++++-- .../nodes/shape_identification/main.cpp | 6 +++--- 4 files changed, 15 insertions(+), 11 deletions(-) diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp index 7d7ed915..d1166ef4 100644 --- a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp @@ -1,7 +1,7 @@ #include "CameraLidarTransformer.hpp" CameraLidarTransformer::CameraLidarTransformer() - : nh("camera_lidar_transformer"), + : nh(ros::this_node::getName()), tfBuffer(), tfListener(tfBuffer, nh), lidarSub(nh, "/velodyne_points", 10), @@ -13,9 +13,9 @@ CameraLidarTransformer::CameraLidarTransformer() { #ifdef DO_ROS_DEBUG points_debug_publisher = - image_transport.advertise("/camera_lidar_transformer/points_debug", 1); + image_transport.advertise("points_debug", 1); pubMarkers = nh.advertise( - "/dock_shapes/raylider/markers", 10); + "markers_debug", 10); #endif nh.param("camera_info_topic",camera_info_topic,"/right/right/camera_info"); // ~nh.param("MIN_Z",MIN_Z,1); @@ -24,8 +24,10 @@ CameraLidarTransformer::CameraLidarTransformer() cameraInfoSub = nh.subscribe(camera_info_topic, 1, &CameraLidarTransformer::cameraInfoCallback, this); + std::string camera_to_lidar_transform_topic; + nh.param("camera_to_lidar_transform_topic",camera_to_lidar_transform_topic,"transform_camera"); transformServiceServer = nh.advertiseService( - "transform_camera", &CameraLidarTransformer::transformServiceCallback, + camera_to_lidar_transform_topic, &CameraLidarTransformer::transformServiceCallback, this); } void CameraLidarTransformer::cameraInfoCallback(sensor_msgs::CameraInfo info) diff --git a/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp b/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp index c39a583b..3b1e23b0 100644 --- a/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp +++ b/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp @@ -42,9 +42,9 @@ GrayscaleContour::GrayscaleContour(ros::NodeHandle& nh) : DockShapeVision(nh) { #ifdef DO_ROS_DEBUG image_transport.reset(new image_transport::ImageTransport(nh)); color_debug_publisher = - image_transport->advertise("/dock_shapes/finder/debug_color", 1); + image_transport->advertise("debug_color", 1); contour_debug_publisher = - image_transport->advertise("/dock_shapes/finder/debug_contours", 1); + image_transport->advertise("debug_contours", 1); #endif } void GrayscaleContour::init() { diff --git a/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp b/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp index 8536e017..e5622573 100644 --- a/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp +++ b/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp @@ -9,8 +9,10 @@ void ShapeTracker::setActive(bool a) } void ShapeTracker::init(ros::NodeHandle& nh) { - getShapesService = nh.advertiseService("/vision/get_shapes", &ShapeTracker::getShapesCallback, this); - allFoundShapesPublish = nh.advertise("/dock_shapes/filtered_shapes", 10); + std::string get_shapes_topic; + nh.param("get_shapes_topic",get_shapes_topic,"get_shapes"); + getShapesService = nh.advertiseService(get_shapes_topic, &ShapeTracker::getShapesCallback, this); + allFoundShapesPublish = nh.advertise("filtered_shapes", 10); } void ShapeTracker::addShape(navigator_msgs::DockShape& s) { diff --git a/perception/navigator_vision/nodes/shape_identification/main.cpp b/perception/navigator_vision/nodes/shape_identification/main.cpp index 10af17c4..5553ad05 100644 --- a/perception/navigator_vision/nodes/shape_identification/main.cpp +++ b/perception/navigator_vision/nodes/shape_identification/main.cpp @@ -37,7 +37,7 @@ class ShooterVision { int height; public: - ShooterVision() : nh_("dock_shape_finder"), it_(nh_) { + ShooterVision() : nh_(ros::this_node::getName()), it_(nh_) { nh_.param("auto_start", active, false); vision.reset(new GrayscaleContour(nh_)); vision->init(); @@ -50,7 +50,7 @@ class ShooterVision { // DebugWindow::init(); //#endif foundShapesPublisher = nh_.advertise( - "/dock_shapes/found_shapes", 1000); + "found_shapes", 1000); image_sub_ = it_.subscribe(camera_topic, 1, &ShooterVision::run, this); int x_offset, y_offset, width, height; @@ -108,7 +108,7 @@ class ShooterVision { }; int main(int argc, char **argv) { - ros::init(argc, argv, "dock_shape_finder"); + ros::init(argc, argv,"shape_identificaiton"); ShooterVision sv = ShooterVision(); ros::spin(); return 0; From ed849f49502b5856498dedb45a1f4e431282ab52 Mon Sep 17 00:00:00 2001 From: mike Date: Mon, 31 Oct 2016 15:43:09 -0400 Subject: [PATCH 109/267] DETECT DELIVER: Switch to using front camera, resize for performance --- .../CameraLidarTransformer.cpp | 9 ++--- .../shape_identification/DockShapeVision.h | 3 +- .../GrayscaleContour/GrayscaleContour.cpp | 32 ++++++++--------- .../GrayscaleContour/GrayscaleContour.h | 6 ++-- .../nodes/shape_identification/main.cpp | 35 +++++++++++++++---- 5 files changed, 51 insertions(+), 34 deletions(-) diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp index d1166ef4..846aab02 100644 --- a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp @@ -18,6 +18,7 @@ CameraLidarTransformer::CameraLidarTransformer() "markers_debug", 10); #endif nh.param("camera_info_topic",camera_info_topic,"/right/right/camera_info"); + printf("Topic %s\n",camera_info_topic.c_str()); // ~nh.param("MIN_Z",MIN_Z,1); // ~nh.param("MAX_Z_ERROR",MAX_Z_ERROR,0.2); std::cout << "Constructed" << std::endl; @@ -57,8 +58,8 @@ bool CameraLidarTransformer::transformServiceCallback( matrixFindPlaneA << 0, 0, 0, 0, 0, 0, 0, 0, 0; Eigen::Vector3d matrixFindPlaneB(0, 0, 0); - cv::circle(debug_image, cv::Point(req.point.x, req.point.y), 3, - cv::Scalar(255, 0, 0), 5); + cv::circle(debug_image, cv::Point(req.point.x, req.point.y), 8, + cv::Scalar(255, 0, 0), -1); //Tracks the closest lidar point to the requested camera point double minDistance = std::numeric_limits::max(); for (auto ii = 0, jj = 0; ii < cloud_transformed.width; @@ -167,7 +168,7 @@ bool CameraLidarTransformer::transformServiceCallback( visualization_msgs::Marker marker_normal; marker_normal.header = req.header; marker_normal.header.seq = 0; - marker_normal.header.frame_id = "right_right_cam"; + marker_normal.header.frame_id = req.header.frame_id; marker_normal.id = 3000; marker_normal.type = visualization_msgs::Marker::ARROW; @@ -175,7 +176,7 @@ bool CameraLidarTransformer::transformServiceCallback( sdp_normalvec_ros.x = normalvec_ros.x + res.transformed[0].x; sdp_normalvec_ros.y = normalvec_ros.y + res.transformed[0].y; sdp_normalvec_ros.z = normalvec_ros.z + res.transformed[0].z; - marker_normal.points.push_back(res.transformed[0]); + marker_normal.points.push_back(res.closest); marker_normal.points.push_back(sdp_normalvec_ros); marker_normal.scale.x = 0.1; diff --git a/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h b/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h index e9299753..473f1cd5 100644 --- a/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h +++ b/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h @@ -8,7 +8,6 @@ class DockShapeVision { DockShapeVision(ros::NodeHandle& nh); public: - virtual void GetShapes(cv::Mat& frame, cv::Rect roi, - navigator_msgs::DockShapes& symbols) = 0; + virtual void GetShapes(cv::Mat& frame, navigator_msgs::DockShapes& symbols) = 0; virtual void init() = 0; }; diff --git a/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp b/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp index 3b1e23b0..fbe88d87 100644 --- a/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp +++ b/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp @@ -53,29 +53,26 @@ void GrayscaleContour::init() { createTrackbar("thresh2", "Menu", &cannyParams.thresh2, 255); #endif } -void GrayscaleContour::GetShapes(cv::Mat& frame, cv::Rect roi, - navigator_msgs::DockShapes& symbols) { +void GrayscaleContour::GetShapes(cv::Mat& frame, navigator_msgs::DockShapes& symbols) { if (frame.empty()) return; - this->roi = roi; frame_width = frame.cols; frame_height = frame.rows; colorFrame = frame; - CropFrame(); - Mat temp = croppedFrame.clone(); - bilateralFilter(temp, croppedFrame, blur_size, blur_size * 2, blur_size / 2); + Mat temp = colorFrame.clone(); + bilateralFilter(temp, colorFrame, blur_size, blur_size * 2, blur_size / 2); ConvertToGrayscale(); DetectEdges(); FindContours(); #ifdef DO_DEBUG - Mat result = croppedFrame.clone(); + Mat result = colorFrame.clone(); #endif #ifdef DO_ROS_DEBUG cv_bridge::CvImage ros_color_debug; ros_color_debug.encoding = "bgr8"; - ros_color_debug.image = croppedFrame.clone(); + ros_color_debug.image = colorFrame.clone(); #endif for (size_t i = 0; i < contours.size(); i++) { @@ -113,7 +110,7 @@ void GrayscaleContour::GetShapes(cv::Mat& frame, cv::Rect roi, #ifdef DO_DEBUG for (auto symbol : symbols.list) { putText(result, symbol.Shape + "\n(" + symbol.Color + ")", - Point(symbol.CenterX + 10 - roi.x, symbol.CenterY - roi.y), 1, 1, + Point(symbol.CenterX + 10, symbol.CenterY), 1, 1, Scalar(0, 0, 0), 3); } imshow("Result", result); @@ -135,9 +132,8 @@ void GrayscaleContour::GetShapes(cv::Mat& frame, cv::Rect roi, contour_debug_publisher.publish(ros_color_debug.toImageMsg()); #endif } -void GrayscaleContour::CropFrame() { croppedFrame = colorFrame(roi); } void GrayscaleContour::ConvertToGrayscale() { - cvtColor(croppedFrame, grayscaleFrame, CV_BGR2GRAY); + cvtColor(colorFrame, grayscaleFrame, CV_BGR2GRAY); } void GrayscaleContour::DetectEdges() { Canny(grayscaleFrame, edgesFrame, cannyParams.thresh1, cannyParams.thresh2); @@ -170,10 +166,10 @@ void GrayscaleContour::FindContours() { } bool GrayscaleContour::GetColor(int Index, std::string& color, float& confidence) { - Mat mask = Mat::zeros(croppedFrame.rows, croppedFrame.cols, CV_8UC1); + Mat mask = Mat::zeros(colorFrame.rows, colorFrame.cols, CV_8UC1); drawContours(mask, contours, Index, Scalar(255), CV_FILLED); - Mat meanBGR(1,1,croppedFrame.type(), mean(croppedFrame, mask)); - Mat mean_hsv_mat(1,1,croppedFrame.type(), Scalar(0,0,0)); + Mat meanBGR(1,1,colorFrame.type(), mean(colorFrame, mask)); + Mat mean_hsv_mat(1,1,colorFrame.type(), Scalar(0,0,0)); cvtColor(meanBGR,mean_hsv_mat,CV_BGR2HSV,3); Vec3b meanColor = mean_hsv_mat.at(0,0); double hue = meanColor[0]; @@ -341,13 +337,13 @@ void GrayscaleContour::findAngles(std::vector& points, void GrayscaleContour::setShapePoints(navigator_msgs::DockShape& dockShape, std::vector& points) { Point center = findCenter(points); - dockShape.CenterX = center.x + roi.x; - dockShape.CenterY = center.y + roi.y; + dockShape.CenterX = center.x; + dockShape.CenterY = center.y; dockShape.img_width = colorFrame.cols; for (size_t j = 0; j < points.size(); j++) { geometry_msgs::Point p; - p.x = points[j].x + roi.x; - p.y = points[j].y + roi.y; + p.x = points[j].x; + p.y = points[j].y; p.z = 0; dockShape.points.push_back(p); } diff --git a/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h b/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h index 3e15de05..3fcef197 100644 --- a/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h +++ b/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h @@ -16,13 +16,12 @@ using namespace cv; #endif class GrayscaleContour : public DockShapeVision { private: - Mat colorFrame, croppedFrame, grayscaleFrame, edgesFrame; + Mat colorFrame, grayscaleFrame, edgesFrame; std::vector > contours; std::vector hierarchy; std::vector > shapes; Rect roi; - void CropFrame(); void ConvertToGrayscale(); void DetectEdges(); void FindContours(); @@ -76,7 +75,6 @@ class GrayscaleContour : public DockShapeVision { double greenHue; public: GrayscaleContour(ros::NodeHandle& nh); - void GetShapes(cv::Mat& frame, cv::Rect roi, - navigator_msgs::DockShapes& symbols); + void GetShapes(cv::Mat& frame, navigator_msgs::DockShapes& symbols); void init(); }; diff --git a/perception/navigator_vision/nodes/shape_identification/main.cpp b/perception/navigator_vision/nodes/shape_identification/main.cpp index 5553ad05..27e1590d 100644 --- a/perception/navigator_vision/nodes/shape_identification/main.cpp +++ b/perception/navigator_vision/nodes/shape_identification/main.cpp @@ -13,6 +13,7 @@ #include #include "DockShapeVision.h" #include +#include #include "ShapeTracker.h" @@ -33,8 +34,9 @@ class ShooterVision { std::string camera_topic; bool active; cv::Rect roi; - int width; - int height; + cv::Size size; + unsigned int width; + unsigned int height; public: ShooterVision() : nh_(ros::this_node::getName()), it_(nh_) { @@ -58,11 +60,19 @@ class ShooterVision { nh_.param("roi/y_offset", y_offset, 103); nh_.param("roi/width", width, 499); nh_.param("roi/height", height, 243); + nh_.param("size/height", size.height, 243); + nh_.param("size/width", size.width, 243); roi = Rect(x_offset, y_offset, width, height); TrackedShape::init(nh_); tracker.init(nh_); } - + void fixPoint(geometry_msgs::Point& p) + { + p.x += roi.x; + p.y += roi.y; + p.x *= width/double(size.width); + p.y *= height/double(size.height); + } bool runCallback(std_srvs::SetBool::Request &req, std_srvs::SetBool::Response &res) { active = req.data; @@ -99,9 +109,22 @@ class ShooterVision { height = frame.rows; symbols.list.clear(); - vision->GetShapes(frame, roi, symbols); - for (size_t i = 0; i < symbols.list.size(); i++) - symbols.list[i].header = msg->header; + cv::Mat resized; + cv::resize(frame,resized,size); + cv::Mat resized_roied = resized(roi); + vision->GetShapes(resized_roied,symbols); + for (navigator_msgs::DockShape& shape : symbols.list) + { + geometry_msgs::Point center; + center.x = shape.CenterX; + center.y = shape.CenterY; + fixPoint(center); + shape.CenterX = center.x; + shape.CenterY = center.y; + for (geometry_msgs::Point& p: shape.points) + fixPoint(p); + shape.header = msg->header; + } foundShapesPublisher.publish(symbols); tracker.addShapes(symbols); } From 70e92f1ea919f7dd11477587c3168c72e20cbb56 Mon Sep 17 00:00:00 2001 From: mike Date: Tue, 1 Nov 2016 15:47:56 -0400 Subject: [PATCH 110/267] DETECT DELIVER: Cleaner mission code and fix bug while using front camera --- perception/navigator_vision/nodes/shape_identification/main.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/perception/navigator_vision/nodes/shape_identification/main.cpp b/perception/navigator_vision/nodes/shape_identification/main.cpp index 27e1590d..fb8d68e0 100644 --- a/perception/navigator_vision/nodes/shape_identification/main.cpp +++ b/perception/navigator_vision/nodes/shape_identification/main.cpp @@ -115,6 +115,7 @@ class ShooterVision { vision->GetShapes(resized_roied,symbols); for (navigator_msgs::DockShape& shape : symbols.list) { + shape.img_width = frame.cols; geometry_msgs::Point center; center.x = shape.CenterX; center.y = shape.CenterY; From 7b88ce3580dafbac105ea06fdab2dfe4d5f5b418 Mon Sep 17 00:00:00 2001 From: Matt Langford Date: Wed, 2 Nov 2016 19:50:28 -0400 Subject: [PATCH 111/267] TOOLS: Update tf_fudger to accept '/' in name. --- utils/navigator_tools/nodes/tf_fudger.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/utils/navigator_tools/nodes/tf_fudger.py b/utils/navigator_tools/nodes/tf_fudger.py index 53b2dc61..316049d6 100755 --- a/utils/navigator_tools/nodes/tf_fudger.py +++ b/utils/navigator_tools/nodes/tf_fudger.py @@ -30,7 +30,7 @@ # slider max values x_max = 1000 -ang_max = 1000 +ang_max = 3000 # tf ranges (symmetric about zero) x_range = args.range # meters @@ -60,6 +60,9 @@ tf_line = '\n' prd = 100 # This will get replaced if it needs to +args.tf_child = args.tf_child[1:] if args.tf_child[0] == '/' else args.tf_child +args.tf_parent = args.tf_parent[1:] if args.tf_parent[0] == '/' else args.tf_parent + while not rospy.is_shutdown(): x, y, z = toTfLin(cv2.getTrackbarPos("x", "tf")), toTfLin(cv2.getTrackbarPos("y", "tf")), toTfLin(cv2.getTrackbarPos("z", "tf")) p = (x, y, z) @@ -70,7 +73,7 @@ q_feedback = "xyz: {}, q: {}".format([round(x, 5) for x in p], [round(x, 5) for x in q]) print q_feedback if q_mode else rpy_feedback - k = cv2.waitKey(10) & 0xFF + k = cv2.waitKey(100) & 0xFF if k == ord('q'): q_mode = not q_mode @@ -118,4 +121,4 @@ br.sendTransform(p, q, rospy.Time.now(), args.tf_child, args.tf_parent) # Print out the tf static transform line with the fudged tf -print '\n',tf_line.format(child=args.tf_child, p=p, q=np.round(q, 5), parent=args.tf_parent, prd=prd) \ No newline at end of file +print '\n',tf_line.format(child=args.tf_child, p=p, q=np.round(q, 5), parent=args.tf_parent, prd=prd) From e5c501b165ae0497e858778d16e2b9d6cf23d040 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Thu, 3 Nov 2016 01:27:11 -0400 Subject: [PATCH 112/267] BOUNDS: Add sim mode to bounds server, remove need for convert server to be running for bounds requests --- .../navigator_tools/msg_helpers.py | 6 ++- utils/navigator_tools/nodes/bounds.py | 52 +++++++++++++++---- .../nodes/coordinate_conversion_server.py | 23 ++++---- 3 files changed, 60 insertions(+), 21 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/msg_helpers.py b/utils/navigator_tools/navigator_tools/msg_helpers.py index f4a5e6aa..cd440e38 100644 --- a/utils/navigator_tools/navigator_tools/msg_helpers.py +++ b/utils/navigator_tools/navigator_tools/msg_helpers.py @@ -96,7 +96,11 @@ def wrench_to_numpy(wrench): return force, torque -def numpy_to_point(np_vector): +def numpy_to_point(vector): + np_vector = np.array(vector) + if np_vector.size == 2: + np_vector = np.append(np_vector, 0) # Assume user is give 2d point + return geometry_msgs.Point(*np_vector) diff --git a/utils/navigator_tools/nodes/bounds.py b/utils/navigator_tools/nodes/bounds.py index 5f3ab1c5..6cbd56a5 100755 --- a/utils/navigator_tools/nodes/bounds.py +++ b/utils/navigator_tools/nodes/bounds.py @@ -3,22 +3,53 @@ import txros import navigator_tools from twisted.internet import defer +from coordinate_conversion_server import Converter from navigator_msgs.srv import Bounds, BoundsResponse, \ CoordinateConversion, CoordinateConversionRequest +class Param(): + @classmethod + @txros.util.cancellableInlineCallbacks + def get(cls, nh, param): + try: + p = yield nh.get_param(param) + print param, p + defer.returnValue(cls(p)) + + except txros.rosxmlrpc.Error: + # Param not found + defer.returnValue(cls(None)) + + def __init__(self, value): + self.value = value + self.found = value is not None + self.found_and_true = self.value and self.found + + @txros.util.cancellableInlineCallbacks def got_request(nh, req, convert): to_frame = "enu" if req.to_frame == '' else req.to_frame resp = BoundsResponse(enforce=False) - if (yield nh.has_param("/bounds/enforce")) and (yield nh.get_param("/bounds/enforce")): - # We want to enforce the bounds that were set - lla_bounds = yield nh.get_param("/bounds/lla") - bounds = [] - for lla_bound in lla_bounds: - bound = yield convert(CoordinateConversionRequest(frame="lla", point=np.append(lla_bound, 0))) - bounds.append(navigator_tools.numpy_to_point(getattr(bound, to_frame))) + + # Lets get all these defers started up + enforce = yield Param.get(nh, "/bounds/enforce") + is_sim = yield Param.get(nh, "/is_simulation") + sim_bounds = yield Param.get(nh, "/bounds/sim") + lla_bounds = yield Param.get(nh, "/bounds/lla") + + if enforce.found_and_true: + if is_sim.found_and_true: + default_bounds = [[100, 100], [-100, 100], [-100, -100], [100, -100]] # default if param not found + bounds = sim_bounds.value if sim_bounds.found else default_bounds + bounds = map(navigator_tools.numpy_to_point, bounds) + else: + # We want to enforce the bounds that were set + bounds = [] + for lla_bound in lla_bounds.value: + bound = yield convert.request(CoordinateConversionRequest(frame="lla", point=np.append(lla_bound, 0))) + bounds.append(navigator_tools.numpy_to_point(getattr(bound, to_frame))) resp = BoundsResponse(enforce=True, bounds=bounds) @@ -29,10 +60,11 @@ def got_request(nh, req, convert): def main(): nh = yield txros.NodeHandle.from_argv("bounds_server") - convert = nh.get_service_client("/convert", CoordinateConversion) + convert = Converter() + yield convert.init(nh) nh.advertise_service('/get_bounds', Bounds, lambda req: got_request(nh, req, convert)) yield defer.Deferred() # never exit - -txros.util.launch_main(main) \ No newline at end of file +if __name__ == "__main__": + txros.util.launch_main(main) \ No newline at end of file diff --git a/utils/navigator_tools/nodes/coordinate_conversion_server.py b/utils/navigator_tools/nodes/coordinate_conversion_server.py index 7cfccb96..fdd52f61 100755 --- a/utils/navigator_tools/nodes/coordinate_conversion_server.py +++ b/utils/navigator_tools/nodes/coordinate_conversion_server.py @@ -12,12 +12,13 @@ class Converter(object): @txros.util.cancellableInlineCallbacks - def init(self): - self.nh = yield txros.NodeHandle.from_argv("coordinate_conversion_server") - self.odom_sub = self.nh.subscribe('odom', Odometry) - self.abs_odom_sub = self.nh.subscribe('absodom', Odometry) + def init(self, nh, advertise_service=False): + self.nh = nh + self.odom_sub = yield self.nh.subscribe('odom', Odometry) + self.abs_odom_sub = yield self.nh.subscribe('absodom', Odometry) - self.nh.advertise_service('/convert', CoordinateConversion, self.got_request) + if advertise_service: + yield self.nh.advertise_service('/convert', CoordinateConversion, self.request) def __getattr__(self, attr): print "Frame '{}' not found!".format(attr) @@ -27,11 +28,11 @@ def not_found(self): return [[np.nan, np.nan, np.nan]] * 3 @txros.util.cancellableInlineCallbacks - def got_request(self, srv): + def request(self, srv): self.point = np.array(srv.point) - self.enu_pos = navigator_tools.odometry_to_numpy((yield self.odom_sub.get_last_message()))[0][0] - self.ecef_pos = navigator_tools.odometry_to_numpy((yield self.abs_odom_sub.get_last_message()))[0][0] + self.enu_pos = navigator_tools.odometry_to_numpy((yield self.odom_sub.get_next_message()))[0][0] + self.ecef_pos = navigator_tools.odometry_to_numpy((yield self.abs_odom_sub.get_next_message()))[0][0] enu, ecef, lla = getattr(self, srv.frame)() @@ -80,9 +81,11 @@ def enu(self): @txros.util.cancellableInlineCallbacks def main(): + nh = yield txros.NodeHandle.from_argv("coordinate_conversion_server") c = Converter() - yield c.init() + yield c.init(nh, advertise_service=True) yield defer.Deferred() # never exit -txros.util.launch_main(main) \ No newline at end of file +if __name__ == "__main__": + txros.util.launch_main(main) \ No newline at end of file From e413c4167aeb9807470b09bc69e2ad2743d44ce2 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Thu, 3 Nov 2016 01:48:31 -0400 Subject: [PATCH 113/267] ALARMS: Move station holding to its own node - hopes to fix the station holding issue --- utils/navigator_tools/navigator_tools/general_helpers.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/utils/navigator_tools/navigator_tools/general_helpers.py b/utils/navigator_tools/navigator_tools/general_helpers.py index 93ad057b..925d7041 100644 --- a/utils/navigator_tools/navigator_tools/general_helpers.py +++ b/utils/navigator_tools/navigator_tools/general_helpers.py @@ -39,7 +39,10 @@ def fprint(msg, time=None, title=None, newline=True, msg_color=None): if msg_color is not None: msg = "{color}{msg}{C.reset}".format(color=getattr(Colors(), msg_color), C=Colors, msg=msg) - if time is None: + if time == "": + time_header = False + + elif time is None: try: time = rospy.Time.now().to_sec() time_header = "{C.bold}{time}{C.reset}".format(C=Colors, time=time) From 73ad146558676147306b5b0445951b58e6bc0309 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Thu, 3 Nov 2016 02:09:07 -0400 Subject: [PATCH 114/267] OGRID: Add ogrid draw node back, add ogrid fuse script to launch --- utils/navigator_tools/nodes/ogrid_draw.py | 115 ++++++++++++++++++++++ 1 file changed, 115 insertions(+) create mode 100755 utils/navigator_tools/nodes/ogrid_draw.py diff --git a/utils/navigator_tools/nodes/ogrid_draw.py b/utils/navigator_tools/nodes/ogrid_draw.py new file mode 100755 index 00000000..bd664701 --- /dev/null +++ b/utils/navigator_tools/nodes/ogrid_draw.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python +""" +Opens a neat window for drawing occupancy grids! + +""" +import rospy +import cv2 +import numpy as np +import argparse +import sys + +from geometry_msgs.msg import Pose +from nav_msgs.msg import OccupancyGrid, MapMetaData + +rospy.init_node("ogrid_draw_node", anonymous=True) + +class DrawGrid(object): + def __init__(self, height, width, image_path): + self.height, self.width = height, width + + self.clear_screen() + + if image_path: + self.make_image(image_path) + + cv2.namedWindow('Draw OccupancyGrid') + cv2.setMouseCallback('Draw OccupancyGrid', self.do_draw) + self.drawing = 0 + + def make_image(self, image_path): + img = cv2.imread(image_path, 0) + if img is None: + print "Image not found at '{}'".format(image_path) + return + + img = cv2.resize(img, (self.width, self.height), interpolation=cv2.INTER_CUBIC) + _, img = cv2.threshold(img, 127, 255, cv2.THRESH_BINARY_INV) + self.img = np.clip(img, -1, 100) + + def clear_screen(self): + self.img = np.zeros((self.height, self.width), np.uint8) + + def do_draw(self, event, x, y, flags, param): + draw_vals = {1: 100, 2: 0} + + if event == cv2.EVENT_LBUTTONUP or event == cv2.EVENT_RBUTTONUP: + self.drawing = 0 + elif event == cv2.EVENT_LBUTTONDOWN: + self.drawing = 1 + elif event == cv2.EVENT_RBUTTONDOWN: + self.drawing = 2 + elif self.drawing != 0: + cv2.circle(self.img, (x, y), 5, draw_vals[self.drawing], -1) + +class OGridPub(object): + def __init__(self, image_path=None): + height = int(rospy.get_param("~grid_height", 800)) + width = int(rospy.get_param("~grid_width", 800)) + resolution = rospy.get_param("~grid_resolution", .3) + ogrid_topic = rospy.get_param("~grid_topic", "/ogrid") + + self.grid_drawer = DrawGrid(height, width, image_path) + self.ogrid_pub = rospy.Publisher(ogrid_topic, OccupancyGrid, queue_size=1) + + m = MapMetaData() + m.resolution = resolution + m.width = width + m.height = height + pos = np.array([-width * resolution / 2, -height * resolution / 2, 0]) + quat = np.array([0, 0, 0, 1]) + m.origin = Pose() + m.origin.position.x, m.origin.position.y = pos[:2] + + self.map_meta_data = m + + rospy.Timer(rospy.Duration(1), self.pub_grid) + + def pub_grid(self, *args): + grid = self.grid_drawer.img + + ogrid = OccupancyGrid() + ogrid.header.frame_id = '/enu' + ogrid.header.stamp = rospy.Time.now() + ogrid.info = self.map_meta_data + ogrid.data = np.subtract(np.flipud(grid).flatten(), 1).astype(np.int8).tolist() + + self.ogrid_pub.publish(ogrid) + +if __name__ == "__main__": + usage_msg = "Lets you draw an occupancy grid!" + desc_msg = "If you pass -i , the ogrid will be created based on thresholded black and white image." + + parser = argparse.ArgumentParser(usage=usage_msg, description=desc_msg) + parser.add_argument('-i', '--image', action='store', type=str, + help="Path to an image to use as an ogrid.") + + # Roslaunch passes in some args we don't want to deal with (there may be a better way than this) + if '-i' in sys.argv: + args = parser.parse_args(sys.argv[1:]) + im_path = args.image + else: + im_path = None + + o = OGridPub(image_path=im_path) + + while not rospy.is_shutdown(): + cv2.imshow("Draw OccupancyGrid", o.grid_drawer.img) + k = cv2.waitKey(100) & 0xFF + + if k == 27: + break + elif k == 113: + o.grid_drawer.clear_screen() + + cv2.destroyAllWindows() From 52bde499b9d8034ba1723a8562283b049b4a1456 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 4 Nov 2016 16:21:58 -0400 Subject: [PATCH 115/267] GAZEBO: Make absodom correct, meaning bounds can be real now --- utils/navigator_tools/nodes/bounds.py | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/utils/navigator_tools/nodes/bounds.py b/utils/navigator_tools/nodes/bounds.py index 6cbd56a5..e750256b 100755 --- a/utils/navigator_tools/nodes/bounds.py +++ b/utils/navigator_tools/nodes/bounds.py @@ -1,11 +1,12 @@ #!/usr/bin/env python import numpy as np + import txros import navigator_tools from twisted.internet import defer from coordinate_conversion_server import Converter from navigator_msgs.srv import Bounds, BoundsResponse, \ - CoordinateConversion, CoordinateConversionRequest + CoordinateConversionRequest class Param(): @@ -35,21 +36,13 @@ def got_request(nh, req, convert): # Lets get all these defers started up enforce = yield Param.get(nh, "/bounds/enforce") - is_sim = yield Param.get(nh, "/is_simulation") - sim_bounds = yield Param.get(nh, "/bounds/sim") lla_bounds = yield Param.get(nh, "/bounds/lla") if enforce.found_and_true: - if is_sim.found_and_true: - default_bounds = [[100, 100], [-100, 100], [-100, -100], [100, -100]] # default if param not found - bounds = sim_bounds.value if sim_bounds.found else default_bounds - bounds = map(navigator_tools.numpy_to_point, bounds) - else: - # We want to enforce the bounds that were set - bounds = [] - for lla_bound in lla_bounds.value: - bound = yield convert.request(CoordinateConversionRequest(frame="lla", point=np.append(lla_bound, 0))) - bounds.append(navigator_tools.numpy_to_point(getattr(bound, to_frame))) + bounds = [] + for lla_bound in lla_bounds.value: + bound = yield convert.request(CoordinateConversionRequest(frame="lla", point=np.append(lla_bound, 0))) + bounds.append(navigator_tools.numpy_to_point(getattr(bound, to_frame))) resp = BoundsResponse(enforce=True, bounds=bounds) From eba4c1d1f5395b7d7cd863bd63d88a48ee8321c1 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 4 Nov 2016 22:08:45 -0400 Subject: [PATCH 116/267] SIM: Adjust launch files to launch lidar oa in correct order with bounds server --- utils/navigator_tools/nodes/bounds.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/navigator_tools/nodes/bounds.py b/utils/navigator_tools/nodes/bounds.py index e750256b..1226c702 100755 --- a/utils/navigator_tools/nodes/bounds.py +++ b/utils/navigator_tools/nodes/bounds.py @@ -51,7 +51,7 @@ def got_request(nh, req, convert): @txros.util.cancellableInlineCallbacks def main(): - nh = yield txros.NodeHandle.from_argv("bounds_server") + nh = yield txros.NodeHandle.from_argv("bounds_server", anonymous=True) convert = Converter() yield convert.init(nh) From 3af41ae177b2b53ebf070c083e46987b02415634 Mon Sep 17 00:00:00 2001 From: Tess Date: Sat, 5 Nov 2016 19:18:16 -0400 Subject: [PATCH 117/267] SCAN THE CODE: finishes scan the code mission, updates TF from the boat, updates the mission planner so that it has accounts for a base mission, uses a YAML file and a Queue object --- .../navigator_tools/__init__.py | 4 +- .../navigator_tools/object_database_helper.py | 63 +++++++++++++++++++ 2 files changed, 66 insertions(+), 1 deletion(-) create mode 100644 utils/navigator_tools/navigator_tools/object_database_helper.py diff --git a/utils/navigator_tools/navigator_tools/__init__.py b/utils/navigator_tools/navigator_tools/__init__.py index 8b91231e..b2fae0f2 100644 --- a/utils/navigator_tools/navigator_tools/__init__.py +++ b/utils/navigator_tools/navigator_tools/__init__.py @@ -5,6 +5,7 @@ import geometry_helpers import rviz_helpers import general_helpers +import object_database_helper from init_helpers import * from image_helpers import * @@ -12,4 +13,5 @@ from threading_helpers import * from geometry_helpers import * from rviz_helpers import * -from general_helpers import * \ No newline at end of file +from general_helpers import * +from object_database_helper import * diff --git a/utils/navigator_tools/navigator_tools/object_database_helper.py b/utils/navigator_tools/navigator_tools/object_database_helper.py new file mode 100644 index 00000000..77f503cc --- /dev/null +++ b/utils/navigator_tools/navigator_tools/object_database_helper.py @@ -0,0 +1,63 @@ +"""Use the DBHelper class to interface with the Database without having to deal with ROS things.""" +from navigator_msgs.msg import PerceptionObjects +from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest +from twisted.internet import defer +from txros import util +from sets import Set +__author__ = "Tess Bianchi" + + +class DBHelper(object): + """DBHelper class.""" + + def __init__(self, nh): + """Initialize the DB helper class.""" + self.found = Set() + self.nh = nh + self.new_object_subscriber = None + + @util.cancellableInlineCallbacks + def init_(self): + """Initialize the txros parts of the DBHelper.""" + self._sub_database = yield self.nh.subscribe('/database/objects', PerceptionObjects, self.object_cb) + self._database = yield self.nh.get_service_client("/database/requests", ObjectDBQuery) + + defer.returnValue(self) + + @util.cancellableInlineCallbacks + def begin_observing(self, cb): + """ + Get notified when a new object is added to the database. + + cb : The callback that is called when a new object is added to the database + """ + self.new_object_subscriber = cb + req = ObjectDBQueryRequest() + req.name = 'all' + resp = yield self._database(req) + + for o in resp.objects: + # The is upper call is because the first case is upper case if it is a 'fake' object... WHYYYYY + if o.name not in self.found and not o.name[0].isupper(): + self.found.add(o.name) + self.new_object_subscriber(o) + + def object_cb(self, perception_objects): + """Callback for the object database.""" + for o in perception_objects: + if o.name not in self.found and not o[0].isupper(): + self.found.add(o.name) + if self.new_object_subscriber is not None: + self.new_object_subscriber(o) + + def set_color(self, color, name): + """Set the color of an object in the database.""" + raise NotImplementedError() + + def get_object(self, name): + """Get an object from the database.""" + raise NotImplementedError() + + def set_fake_position(self, pos): + """Set the position of a fake perception object.""" + raise NotImplementedError() From 3d391d3333793e5bd4789dfa5d0c183c8ca95c9b Mon Sep 17 00:00:00 2001 From: mattlangford Date: Sun, 6 Nov 2016 01:36:33 -0500 Subject: [PATCH 118/267] TOOLS: Add script to move fake database objects based on lla coordinates --- .../nodes/estimated_object_setter.py | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100755 utils/navigator_tools/nodes/estimated_object_setter.py diff --git a/utils/navigator_tools/nodes/estimated_object_setter.py b/utils/navigator_tools/nodes/estimated_object_setter.py new file mode 100755 index 00000000..22854053 --- /dev/null +++ b/utils/navigator_tools/nodes/estimated_object_setter.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python +import numpy as np + +import ast +import sys +from argparse import RawTextHelpFormatter, ArgumentParser + +import txros +import navigator_tools +from twisted.internet import defer +from coordinate_conversion_server import Converter +from navigator_msgs.srv import CoordinateConversionRequest, ObjectDBQuery, ObjectDBQueryRequest + +@txros.util.cancellableInlineCallbacks +def main(name, lla): + nh = yield txros.NodeHandle.from_argv("esitmated_object_setter") + db = yield nh.get_service_client("/database/requests", ObjectDBQuery) + + convert = Converter() + yield convert.init(nh) + + # Convert the name to Be_Like_This + name = '_'.join(map(lambda txt: txt.title(), name.split("_"))) + + point = yield convert.request(CoordinateConversionRequest(frame="lla", point=lla)) + yield db(ObjectDBQueryRequest(cmd="{}={p[0]}, {p[1]}".format(name, p=point.enu))) + + +if __name__ == "__main__": + usage_msg = "Used to set the estimated position of objects in the database with lla positions." + desc_msg = "Pass the name of the database object and the lla position and this will set it's esimated position in the database. \n\ + ex. rosrun navigator_tools estimated_object_setter.py scan_the_code \"[85.3, -25.6]\" \n\ + ex. rosrun navigator_tools estimated_object_setter.py Shooter \"[82.32, -26.87, 2]\"" + + parser = ArgumentParser(usage=usage_msg, description=desc_msg, formatter_class=RawTextHelpFormatter) + parser.add_argument(dest='name', + help="Name of the object.") + parser.add_argument(dest='lat', type=float, + help="Latitude in degrees of the object of intrest.") + parser.add_argument(dest='long', type=float, + help="Longitude in degrees of the object of intrest.") + parser.add_argument('--alt', '-a', action='store', type=float, default=0.0, + help="Altitude in meters of the object of intrest (usually can be omitted).") + + args = parser.parse_args(sys.argv[1:]) + name = args.name + lat = args.lat + lon = args.long # `long` is python reserved :( + alt = args.alt + + txros.util.launch_main(lambda: main(name, [lat, lon, alt])) From e2442ec13e536a63859d5aab59af5c65bc92d9db Mon Sep 17 00:00:00 2001 From: Tess Date: Thu, 10 Nov 2016 11:37:02 -0500 Subject: [PATCH 119/267] MISSION PLANNER: object database uses incorrect message type --- .../navigator_tools/object_database_helper.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/object_database_helper.py b/utils/navigator_tools/navigator_tools/object_database_helper.py index 77f503cc..91903374 100644 --- a/utils/navigator_tools/navigator_tools/object_database_helper.py +++ b/utils/navigator_tools/navigator_tools/object_database_helper.py @@ -1,5 +1,5 @@ """Use the DBHelper class to interface with the Database without having to deal with ROS things.""" -from navigator_msgs.msg import PerceptionObjects +from navigator_msgs.msg import PerceptionObjectArray from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest from twisted.internet import defer from txros import util @@ -19,7 +19,7 @@ def __init__(self, nh): @util.cancellableInlineCallbacks def init_(self): """Initialize the txros parts of the DBHelper.""" - self._sub_database = yield self.nh.subscribe('/database/objects', PerceptionObjects, self.object_cb) + self._sub_database = yield self.nh.subscribe('/database/objects', PerceptionObjectArray, self.object_cb) self._database = yield self.nh.get_service_client("/database/requests", ObjectDBQuery) defer.returnValue(self) @@ -44,7 +44,7 @@ def begin_observing(self, cb): def object_cb(self, perception_objects): """Callback for the object database.""" - for o in perception_objects: + for o in perception_objects.objects: if o.name not in self.found and not o[0].isupper(): self.found.add(o.name) if self.new_object_subscriber is not None: From 067773a12279ec768fa8ef38911f2c04df7d9bae Mon Sep 17 00:00:00 2001 From: mike Date: Thu, 10 Nov 2016 23:10:31 -0500 Subject: [PATCH 120/267] DETECT DELIVER: Clearer text in debug image --- .../nodes/shape_identification/DockShapeVision.cpp | 14 ++++++++++++++ .../nodes/shape_identification/DockShapeVision.h | 4 ++++ .../GrayscaleContour/GrayscaleContour.cpp | 12 ++---------- 3 files changed, 20 insertions(+), 10 deletions(-) diff --git a/perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp b/perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp index cdf9c150..ded453a2 100644 --- a/perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp +++ b/perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp @@ -1,2 +1,16 @@ #include "DockShapeVision.h" +using namespace cv; +int DockShapeVision::fontFace = CV_FONT_HERSHEY_SIMPLEX; +double DockShapeVision::fontScale = .6; DockShapeVision::DockShapeVision(ros::NodeHandle& nh) : nh(nh) {} +void DockShapeVision::DrawShapes(cv::Mat& frame, navigator_msgs::DockShapes& symbols) +{ + for (auto symbol : symbols.list) { + Scalar color(0,0,255); + if (symbol.Color == navigator_msgs::DockShape::RED) + color = Scalar(0,255,0); + putText(frame, symbol.Shape + " (" + symbol.Color + ")", + Point(symbol.CenterX, symbol.CenterY), fontFace, fontScale, + Scalar(0,0,255), 1,CV_AA); + } +} diff --git a/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h b/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h index 473f1cd5..af82d294 100644 --- a/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h +++ b/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h @@ -1,5 +1,6 @@ #pragma once #include +#include #include #include "opencv2/opencv.hpp" class DockShapeVision { @@ -9,5 +10,8 @@ class DockShapeVision { public: virtual void GetShapes(cv::Mat& frame, navigator_msgs::DockShapes& symbols) = 0; + static void DrawShapes(cv::Mat& frame, navigator_msgs::DockShapes& symbols); + static int fontFace; + static double fontScale; virtual void init() = 0; }; diff --git a/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp b/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp index fbe88d87..bdaee629 100644 --- a/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp +++ b/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp @@ -108,11 +108,7 @@ void GrayscaleContour::GetShapes(cv::Mat& frame, navigator_msgs::DockShapes& sym } #ifdef DO_DEBUG - for (auto symbol : symbols.list) { - putText(result, symbol.Shape + "\n(" + symbol.Color + ")", - Point(symbol.CenterX + 10, symbol.CenterY), 1, 1, - Scalar(0, 0, 0), 3); - } + DrawShapes(result,symbols); imshow("Result", result); imshow("Grayscale", grayscaleFrame); imshow("Edges", edgesFrame); @@ -120,11 +116,7 @@ void GrayscaleContour::GetShapes(cv::Mat& frame, navigator_msgs::DockShapes& sym waitKey(5); #endif #ifdef DO_ROS_DEBUG - for (auto symbol : symbols.list) { - putText(ros_color_debug.image, symbol.Shape + "\n(" + symbol.Color + ")", - Point(symbol.CenterX + 10 - roi.x, symbol.CenterY - roi.y), 1, 1, - Scalar(0, 0, 0), 3); - } + DrawShapes(ros_color_debug.image,symbols); color_debug_publisher.publish(ros_color_debug.toImageMsg()); cv_bridge::CvImage ros_contours_debug; ros_color_debug.encoding = "mono8"; From 22b4e4edfb8f4b2f1d29f93148e64c2b3137e469 Mon Sep 17 00:00:00 2001 From: mike Date: Fri, 11 Nov 2016 00:29:47 -0500 Subject: [PATCH 121/267] SIMULATION: Fix sensor pose + tool for getting gazebo pose from tf Stereo cameras switched to two individual cams in SDF so frame ids are correct Veloyne points reproject into camera frame correctely in sim Use ```rosrun navigator_tools tf_to_gazebo.py _cam_fix:=true``` to print out gazebo pose tags based for sensors based on current tf --- utils/navigator_tools/nodes/tf_to_gazebo.py | 34 +++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100755 utils/navigator_tools/nodes/tf_to_gazebo.py diff --git a/utils/navigator_tools/nodes/tf_to_gazebo.py b/utils/navigator_tools/nodes/tf_to_gazebo.py new file mode 100755 index 00000000..437f8995 --- /dev/null +++ b/utils/navigator_tools/nodes/tf_to_gazebo.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +import roslib +import rospy +import math +import tf +import geometry_msgs.msg +import numpy as np +def main(): + rospy.init_node('tf_to_gazebo') + do_cam_fix = rospy.get_param("~cam_fix",False) + listener = tf.TransformListener() + rate = rospy.Rate(10.0) + cam_fix_quat = (-0.5 , 0.5 ,-0.5 ,-0.5) + + while not rospy.is_shutdown(): + try: + print "============= TRANFORMS ================" + for frame_id in listener.getFrameStrings(): + (trans,rot) = listener.lookupTransform('/measurement',frame_id, rospy.Time(0)) + print "--" + print "Tranform {} {}".format('/measurement',frame_id) + if do_cam_fix: + rot = tf.transformations.quaternion_multiply(rot,cam_fix_quat) + print "(qx={}, qy={} , qz={}, qw={})".format(rot[0],rot[1],rot[2],rot[3]) + print "(x={}, y={}, z={})".format(trans[0],trans[1],trans[2]) + euler = tf.transformations.euler_from_quaternion( rot) + print "(Roll={}, Pitch={}, Yaw={})".format(euler[0], euler[1], euler[2]) + print " {} {} {} {} {} {} ".format(trans[0],trans[1],trans[2],euler[0], euler[1], euler[2]) + except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): + continue + rate.sleep() + +if __name__ == '__main__': + main() From f6be9f85411df62ea8fe865bcd222c97430e939b Mon Sep 17 00:00:00 2001 From: mike Date: Fri, 11 Nov 2016 02:29:40 -0500 Subject: [PATCH 122/267] DETECT DELIVER: Better normal approx and debug Pixel tollerence for normal approx based on shape size Points included in normal are red in debug image --- .../camera_to_lidar/CameraLidarTransformer.cpp | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp index 846aab02..ec98cc94 100644 --- a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp @@ -87,7 +87,7 @@ bool CameraLidarTransformer::transformServiceCallback( marker_point.pose.position.x = x.f; marker_point.pose.position.y = y.f; marker_point.pose.position.z = z.f; - marker_point.scale.x = 1; + marker_point.scale.x = 0.1; marker_point.scale.y = 0.1; marker_point.scale.z = 0.1; marker_point.color.a = 1.0; @@ -95,7 +95,6 @@ bool CameraLidarTransformer::transformServiceCallback( marker_point.color.g = 1.0; marker_point.color.b = 0.0; //marker_point.lifetime = ros::Duration(0.5); - markers.markers.push_back(marker_point); // Draw if (int(point.x - 20) > 0 && int(point.x + 20) < camera_info.width && @@ -135,7 +134,11 @@ bool CameraLidarTransformer::transformServiceCallback( minDistance = distance; } res.transformed.push_back(geo_point); + marker_point.color.r = 1.0; + marker_point.color.g = 0.0; + marker_point.color.b = 0.0; } + markers.markers.push_back(marker_point); } } if (res.transformed.size() < 1) { @@ -161,6 +164,7 @@ bool CameraLidarTransformer::transformServiceCallback( normalvec_ros.y = normalvec(1, 0); normalvec_ros.z = normalvec(2, 0); res.normal = normalvec_ros; + std::cout << "Points: " << res.transformed.size() << std::endl; std::cout << "Plane solution: " << normalvec << std::endl; #ifdef DO_ROS_DEBUG @@ -173,9 +177,9 @@ bool CameraLidarTransformer::transformServiceCallback( marker_normal.type = visualization_msgs::Marker::ARROW; geometry_msgs::Point sdp_normalvec_ros; - sdp_normalvec_ros.x = normalvec_ros.x + res.transformed[0].x; - sdp_normalvec_ros.y = normalvec_ros.y + res.transformed[0].y; - sdp_normalvec_ros.z = normalvec_ros.z + res.transformed[0].z; + sdp_normalvec_ros.x = normalvec_ros.x + res.closest.x; + sdp_normalvec_ros.y = normalvec_ros.y + res.closest.y; + sdp_normalvec_ros.z = normalvec_ros.z + res.closest.z; marker_normal.points.push_back(res.closest); marker_normal.points.push_back(sdp_normalvec_ros); From 081b7cc3ae47f13c55880b2ffc0213bd26a59f3a Mon Sep 17 00:00:00 2001 From: mike Date: Fri, 11 Nov 2016 15:32:34 -0500 Subject: [PATCH 123/267] SHOOTER: Move joystick control to joystick control node Also minor change in tf gazebo tool to address PR comment --- utils/navigator_tools/nodes/tf_to_gazebo.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/utils/navigator_tools/nodes/tf_to_gazebo.py b/utils/navigator_tools/nodes/tf_to_gazebo.py index 437f8995..30eefd9d 100755 --- a/utils/navigator_tools/nodes/tf_to_gazebo.py +++ b/utils/navigator_tools/nodes/tf_to_gazebo.py @@ -5,9 +5,12 @@ import tf import geometry_msgs.msg import numpy as np + + def main(): rospy.init_node('tf_to_gazebo') do_cam_fix = rospy.get_param("~cam_fix",False) + tf_parent = rospy.get_param("~tf_parent","/measurement") listener = tf.TransformListener() rate = rospy.Rate(10.0) cam_fix_quat = (-0.5 , 0.5 ,-0.5 ,-0.5) @@ -16,9 +19,9 @@ def main(): try: print "============= TRANFORMS ================" for frame_id in listener.getFrameStrings(): - (trans,rot) = listener.lookupTransform('/measurement',frame_id, rospy.Time(0)) + (trans,rot) = listener.lookupTransform(tf_parent,frame_id, rospy.Time(0)) print "--" - print "Tranform {} {}".format('/measurement',frame_id) + print "Transform {} {}".format(tf_parent,frame_id) if do_cam_fix: rot = tf.transformations.quaternion_multiply(rot,cam_fix_quat) print "(qx={}, qy={} , qz={}, qw={})".format(rot[0],rot[1],rot[2],rot[3]) From 45777c211d5794a0123c28ec87dc7b16f6609c91 Mon Sep 17 00:00:00 2001 From: mike Date: Sat, 12 Nov 2016 01:05:43 -0500 Subject: [PATCH 124/267] DETECT DELIVER: Use PCL for better normal approximation --- .../CameraLidarTransformer.cpp | 69 +++++++------------ .../CameraLidarTransformer.hpp | 4 ++ 2 files changed, 27 insertions(+), 46 deletions(-) diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp index ec98cc94..2381ae5e 100644 --- a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp @@ -54,14 +54,13 @@ bool CameraLidarTransformer::transformServiceCallback( cv::Mat debug_image(camera_info.height, camera_info.width, CV_8UC3, cv::Scalar(0)); #endif - Eigen::Matrix3d matrixFindPlaneA; - matrixFindPlaneA << 0, 0, 0, 0, 0, 0, 0, 0, 0; - Eigen::Vector3d matrixFindPlaneB(0, 0, 0); - cv::circle(debug_image, cv::Point(req.point.x, req.point.y), 8, cv::Scalar(255, 0, 0), -1); //Tracks the closest lidar point to the requested camera point double minDistance = std::numeric_limits::max(); + pcl::PointCloud cloud2; + pcl::fromROSMsg(cloud_transformed, cloud2); + std::vector indices; for (auto ii = 0, jj = 0; ii < cloud_transformed.width; ++ii, jj += cloud_transformed.point_step) { floatConverter x, y, z, i; @@ -111,19 +110,7 @@ bool CameraLidarTransformer::transformServiceCallback( cv::circle(debug_image, point, 3, cv::Scalar(0, 255, 0), -1); } #endif - matrixFindPlaneA(0, 0) += x.f * x.f; - matrixFindPlaneA(1, 0) += y.f * x.f; - matrixFindPlaneA(2, 0) += x.f; - matrixFindPlaneA(0, 1) += x.f * y.f; - matrixFindPlaneA(1, 1) += y.f * y.f; - matrixFindPlaneA(2, 1) += y.f; - matrixFindPlaneA(0, 2) += x.f; - matrixFindPlaneA(1, 2) += y.f; - matrixFindPlaneA(2, 2) += 1; - matrixFindPlaneB(0, 0) -= x.f * z.f; - matrixFindPlaneB(1, 0) -= y.f * z.f; - matrixFindPlaneB(2, 0) -= z.f; - + geometry_msgs::Point geo_point; geo_point.x = x.f; geo_point.y = y.f; @@ -133,12 +120,14 @@ bool CameraLidarTransformer::transformServiceCallback( res.closest = geo_point; minDistance = distance; } - res.transformed.push_back(geo_point); + indices.push_back(ii); marker_point.color.r = 1.0; marker_point.color.g = 0.0; marker_point.color.b = 0.0; + res.transformed.push_back(geo_point); } - markers.markers.push_back(marker_point); + markers.markers.push_back(marker_point); + } } if (res.transformed.size() < 1) { @@ -146,50 +135,38 @@ bool CameraLidarTransformer::transformServiceCallback( res.error = navigator_msgs::CameraToLidarTransform::Response::NO_POINTS_FOUND; return true; } - - //Filter out lidar points behind the plane (I don't think this will work, no good way to do this) - // ~double min_z = (*std::min_element(res.transformed.begin(),res.transformed.end(), [=] (const geometry_msgs::Point& a,const geometry_msgs::Point& b) { - // ~return a.z < b.z && a.z > MIN_Z; - // ~})).z; - // ~res.transformed.erase(std::remove_if(res.transformed.begin(),res.transformed.end(),[min_z] (const geometry_msgs::Point& p) { - // ~return (p.z - min_z) < MAX_Z_ERROR; - // ~}),res.transformed.end()); + + float x,y,z,n; + pcl::NormalEstimation ne; + ne.computePointNormal (cloud2,indices,x,y,z,n); + Eigen::Vector3d normal_vector = Eigen::Vector3d(x,y,z).normalized(); + res.normal.x = -normal_vector(0, 0); + res.normal.y = -normal_vector(1, 0); + res.normal.z = -normal_vector(2, 0); res.distance = res.closest.z; - Eigen::Vector3d normalvec = matrixFindPlaneA.colPivHouseholderQr() - .solve(matrixFindPlaneB) - .normalized(); - geometry_msgs::Vector3 normalvec_ros; - normalvec_ros.x = normalvec(0, 0); - normalvec_ros.y = normalvec(1, 0); - normalvec_ros.z = normalvec(2, 0); - res.normal = normalvec_ros; - std::cout << "Points: " << res.transformed.size() << std::endl; - std::cout << "Plane solution: " << normalvec << std::endl; #ifdef DO_ROS_DEBUG - //Create marker for normal + //Add a marker for the normal to the plane + geometry_msgs::Point sdp_normalvec_ros; + sdp_normalvec_ros.x = res.closest.x + res.normal.x; + sdp_normalvec_ros.y = res.closest.y + res.normal.y; + sdp_normalvec_ros.z = res.closest.z + res.normal.z; visualization_msgs::Marker marker_normal; marker_normal.header = req.header; marker_normal.header.seq = 0; marker_normal.header.frame_id = req.header.frame_id; marker_normal.id = 3000; marker_normal.type = visualization_msgs::Marker::ARROW; - - geometry_msgs::Point sdp_normalvec_ros; - sdp_normalvec_ros.x = normalvec_ros.x + res.closest.x; - sdp_normalvec_ros.y = normalvec_ros.y + res.closest.y; - sdp_normalvec_ros.z = normalvec_ros.z + res.closest.z; marker_normal.points.push_back(res.closest); marker_normal.points.push_back(sdp_normalvec_ros); - marker_normal.scale.x = 0.1; marker_normal.scale.y = 0.5; marker_normal.scale.z = 0.5; marker_normal.color.a = 1.0; - marker_normal.color.r = 1.0; + marker_normal.color.r = 0.0; marker_normal.color.g = 0.0; - marker_normal.color.b = 0.0; + marker_normal.color.b = 1.0; markers.markers.push_back(marker_normal); //Publish 3D debug market diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp index 8e49de89..6ac7b1d2 100644 --- a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp @@ -15,6 +15,10 @@ #include #include #include +#include +#include +#include +#include #include #include #include From 427a75e55723f22ccb619ad6cbea3fc25f6d1560 Mon Sep 17 00:00:00 2001 From: mike Date: Sat, 12 Nov 2016 12:21:48 -0500 Subject: [PATCH 125/267] TOOLS: Add tool to play video files into ros --- utils/navigator_tools/nodes/video_player.py | 94 +++++++++++++++++++++ 1 file changed, 94 insertions(+) create mode 100755 utils/navigator_tools/nodes/video_player.py diff --git a/utils/navigator_tools/nodes/video_player.py b/utils/navigator_tools/nodes/video_player.py new file mode 100755 index 00000000..7c08ad83 --- /dev/null +++ b/utils/navigator_tools/nodes/video_player.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +""" +Usage: rosrun navigator_tools video_player.py _filename:=MyVideoFile.mp4 + +Utility to play video files into a ros topic +with some added conveniences like pausing and +scrolling through the video with a slider bar. + +Plays any videofile that OpenCV can open (mp4,etc) +Publishes to video_file/image_raw + +Set the following rosparams for customization +~filename string what file to load +~slider boolean True to open window with slider bar and pause +~start_frames int number of frames into video to start playback at + +If slider is set to True (default), a window will open with a +slider bar for moving through the video. Press space in this window +to pause the video and update the slider position. While paused, +you can press s to go frame by frame. + +""" + +import sys +import rospy +import cv2 +from sensor_msgs.msg import Image +from cv_bridge import CvBridge, CvBridgeError + +class RosVideoPlayer: + + def __init__(self): + self.bridge = CvBridge() + self.image_pub = rospy.Publisher("video_file/image_raw",Image,queue_size=10) + self.filename = rospy.get_param('~filename', 'video.mp4') + self.slider = rospy.get_param('~slider',True) + self.start_frame = rospy.get_param('~start_frames',0) + self.cap = cv2.VideoCapture(self.filename) + self.num_frames = self.cap.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT) + self.fps = rospy.get_param('~fps',self.cap.get(cv2.cv.CV_CAP_PROP_FPS)) + self.cap.set(cv2.cv.CV_CAP_PROP_POS_FRAMES,self.start_frame) + self.num_frames = self.cap.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT) + self.paused = False + self.ended = False + self.last_key = 255 + + if (self.slider): + cv2.namedWindow('Video Control') + cv2.createTrackbar('Frame','Video Control',int(0),int(self.num_frames),self.trackbar_cb) + rospy.loginfo("Playing {} at {}fps starting at frame {} ({} Total Frames)".format(self.filename,self.fps,self.start_frame,self.num_frames)) + + def run(self): + r = rospy.Rate(self.fps) # 10hz + while (not rospy.is_shutdown()) and self.cap.isOpened(): + if self.slider: + k = cv2.waitKey(1) & 0xFF + if not k == self.last_key: + self.last_key = k + if k == 27: + return + elif k == 32: + self.pause() + elif k == 115 and self.paused: + self.one_frame() + if self.paused: + cv2.setTrackbarPos('Frame','Video Control',int(self.cap.get(cv2.cv.CV_CAP_PROP_POS_FRAMES))) + else: + self.one_frame() + r.sleep() + + def one_frame(self): + ret, frame = self.cap.read() + if not ret: + if not self.ended: + rospy.loginfo("File {} ended".format(self.filename)) + self.ended = True + return + else: + self.ended = False + self.image_pub.publish(self.bridge.cv2_to_imgmsg(frame, "bgr8")) + + def trackbar_cb(self,x): + self.cap.set(cv2.cv.CV_CAP_PROP_POS_FRAMES,x) + + def pause(self): + self.paused = not self.paused + +def main(): + rospy.init_node('video_player') + player = RosVideoPlayer() + player.run() + +if __name__ == '__main__': + main() From 72ee87f57cf43ce26e2e6b0be02cfa79d8ed7a8f Mon Sep 17 00:00:00 2001 From: mike Date: Sat, 12 Nov 2016 12:33:45 -0500 Subject: [PATCH 126/267] TOOLS: tf fudger starts with current tf - Tries for one second to get current tf, otherwise everything is zero - press r to resetfudge to original - press z to set everything to zero --- utils/navigator_tools/nodes/tf_fudger.py | 51 ++++++++++++++++++++++-- 1 file changed, 48 insertions(+), 3 deletions(-) diff --git a/utils/navigator_tools/nodes/tf_fudger.py b/utils/navigator_tools/nodes/tf_fudger.py index 316049d6..a2a81dc8 100755 --- a/utils/navigator_tools/nodes/tf_fudger.py +++ b/utils/navigator_tools/nodes/tf_fudger.py @@ -3,6 +3,7 @@ import rospy import tf +import tf.transformations as trns import cv2 import argparse import sys @@ -53,6 +54,9 @@ toTfLin = lambda x: x * x_res - 0.5 * x_range toTfAng = lambda x: x * ang_res - 0.5 * ang_range +toCvLin = lambda x: (x+0.5*x_range)/x_res +toCvAng = lambda x: (x+0.5*ang_range)/ang_res + p = q = None q_mode = False @@ -63,20 +67,61 @@ args.tf_child = args.tf_child[1:] if args.tf_child[0] == '/' else args.tf_child args.tf_parent = args.tf_parent[1:] if args.tf_parent[0] == '/' else args.tf_parent +listener = tf.TransformListener() + +p_original = (0,0,0) +rpy_original = (0,0,0) +try: + listener.waitForTransform(args.tf_parent,args.tf_child, rospy.Time(0), rospy.Duration(1)) + (trans,rot) = listener.lookupTransform(args.tf_parent,args.tf_child, rospy.Time(0)) + euler = trns.euler_from_quaternion(rot) + p_original = trans + rpy_original = euler +except tf.Exception: + print "TF not found, setting everything to zero" + +def set_bars(p, rpy): + cv2.setTrackbarPos("x", "tf",int( toCvLin(p[0]) ) ) + cv2.setTrackbarPos("y", "tf",int( toCvLin(p[1]) ) ) + cv2.setTrackbarPos("z", "tf",int( toCvLin(p[2]) ) ) + cv2.setTrackbarPos("roll", "tf",int( toCvAng(rpy[0]) )) + cv2.setTrackbarPos("pitch", "tf",int( toCvAng(rpy[1]) )) + cv2.setTrackbarPos("yaw", "tf",int( toCvAng(rpy[2]) )) + k = cv2.waitKey(100) & 0xFF + if k == ord('q'): + q_mode = not q_mode + +def reset(): + set_bars(p_original,rpy_original) + +reset() + +p_last = p_original +rpy_last = rpy_original + while not rospy.is_shutdown(): x, y, z = toTfLin(cv2.getTrackbarPos("x", "tf")), toTfLin(cv2.getTrackbarPos("y", "tf")), toTfLin(cv2.getTrackbarPos("z", "tf")) p = (x, y, z) rpy = (toTfAng(cv2.getTrackbarPos("roll", "tf")), toTfAng(cv2.getTrackbarPos("pitch", "tf")), toTfAng(cv2.getTrackbarPos("yaw", "tf"))) q = tf.transformations.quaternion_from_euler(*rpy) - rpy_feedback = "xyz: {}, euler: {}".format([round(x, 5) for x in p], [round(np.degrees(x), 5) for x in rpy]) - q_feedback = "xyz: {}, q: {}".format([round(x, 5) for x in p], [round(x, 5) for x in q]) - print q_feedback if q_mode else rpy_feedback + if (not p == p_last) or (not rpy == rpy_last): + rpy_feedback = "xyz: {}, euler: {}".format([round(x, 5) for x in p], [round(np.degrees(x), 5) for x in rpy]) + q_feedback = "xyz: {}, q: {}".format([round(x, 5) for x in p], [round(x, 5) for x in q]) + print q_feedback if q_mode else rpy_feedback + p_last = p + rpy_last = rpy k = cv2.waitKey(100) & 0xFF if k == ord('q'): q_mode = not q_mode + if k == ord('r'): + reset() + + if k == ord('z'): + set_bars((0, 0, 0), (0, 0, 0)) + if k == ord('s'): # Save the transform in navigator_launch/launch/tf.launch replacing the line import rospkg From 1d89978880d699da0a742a6ee30a27f8d20e3dd1 Mon Sep 17 00:00:00 2001 From: mike Date: Sun, 13 Nov 2016 11:29:13 -0500 Subject: [PATCH 127/267] TOOLS: add tool to publish voltage and wrench to rviz --- utils/navigator_tools/nodes/boat_info.py | 70 ++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100755 utils/navigator_tools/nodes/boat_info.py diff --git a/utils/navigator_tools/nodes/boat_info.py b/utils/navigator_tools/nodes/boat_info.py new file mode 100755 index 00000000..bdfcdcb5 --- /dev/null +++ b/utils/navigator_tools/nodes/boat_info.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +from geometry_msgs.msg import WrenchStamped +from navigator_msgs.srv import WrenchSelect +import rospy +from std_msgs.msg import Bool, String, Float32 +from visualization_msgs.msg import Marker, MarkerArray +from navigator_alarm import AlarmListener +import numpy as np + +class RvizStrings(object): + ID = 1337 + NS = "boat_info" + X_POS = -4.0 + + def __init__(self): + self.station_holding = False + self.kill_alarm = False + self.voltage = None + self.wrench = None + rospy.Subscriber("/wrench/current", String, self.wrench_current_cb) + rospy.Subscriber("/battery_monitor",Float32, self.battery_monitor_cb) + self.markers_pub = rospy.Publisher('/boat_info', Marker, queue_size=10) + self.kill_listener = AlarmListener('kill', self.kill_alarm_cb) + self.station_hold_listner = AlarmListener('station_hold', self.station_alarm_cb) + + def update(self): + marker = Marker() + marker.scale.x = 1 + marker.scale.y = 1 + marker.scale.z = 1 + marker.action = Marker.ADD; + marker.header.frame_id = "base_link" + marker.header.stamp = rospy.Time() + marker.type = Marker.TEXT_VIEW_FACING + marker.ns = self.NS + marker.pose.position.x = self.X_POS + marker.id = self.ID + marker.color.a = 1 + marker.color.b = 1 + if not self.voltage == None: + marker.text += "Voltage {}".format(self.voltage) + if not self.wrench == None: + marker.text += "\nWrench {}".format(self.wrench) + if self.station_holding: + marker.text += "\nStation Holding" + if self.kill_alarm: + marker.text += "\nKILLED" + self.markers_pub.publish(marker) + + def station_alarm_cb(self, alarm): + self.station_holding = not alarm.clear + self.update() + + def kill_alarm_cb(self, alarm): + self.kill_alarm = not alarm.clear + self.update() + + def wrench_current_cb(self,string): + self.wrench = string.data + self.update() + + def battery_monitor_cb(self,voltage): + self.voltage = np.round(voltage.data,2) + self.update() + + +if __name__ == "__main__": + rospy.init_node('info_markers') + arb = RvizStrings() + rospy.spin() From 4a68bcaea926be28e0a9419d7d641bc7e5f3b661 Mon Sep 17 00:00:00 2001 From: Tess Date: Fri, 11 Nov 2016 09:10:19 -0500 Subject: [PATCH 128/267] TESTING FRAMEWORK: adds unit test for the mission planner. This also includes ways to spoof services and publishers so we can test independent of other nodes. --- utils/navigator_tools/navigator_tools/__init__.py | 2 ++ .../navigator_tools/missing_perception_object.py | 10 ++++++++++ .../navigator_tools/object_database_helper.py | 5 ++++- 3 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 utils/navigator_tools/navigator_tools/missing_perception_object.py diff --git a/utils/navigator_tools/navigator_tools/__init__.py b/utils/navigator_tools/navigator_tools/__init__.py index b2fae0f2..75f7ddc1 100644 --- a/utils/navigator_tools/navigator_tools/__init__.py +++ b/utils/navigator_tools/navigator_tools/__init__.py @@ -6,6 +6,7 @@ import rviz_helpers import general_helpers import object_database_helper +import missing_perception_object from init_helpers import * from image_helpers import * @@ -15,3 +16,4 @@ from rviz_helpers import * from general_helpers import * from object_database_helper import * +from missing_perception_object import * diff --git a/utils/navigator_tools/navigator_tools/missing_perception_object.py b/utils/navigator_tools/navigator_tools/missing_perception_object.py new file mode 100644 index 00000000..7bb8eeaa --- /dev/null +++ b/utils/navigator_tools/navigator_tools/missing_perception_object.py @@ -0,0 +1,10 @@ +class MissingPerceptionObject(Exception): + + def __init__(self, missing_object, message="An object from the database is missing"): + + # Call the base class constructor with the parameters it needs + super(MissingPerceptionObject, self).__init__(message) + + # Now for your custom code... + self.missing_object = missing_object + self.message = message diff --git a/utils/navigator_tools/navigator_tools/object_database_helper.py b/utils/navigator_tools/navigator_tools/object_database_helper.py index 91903374..360c8af0 100644 --- a/utils/navigator_tools/navigator_tools/object_database_helper.py +++ b/utils/navigator_tools/navigator_tools/object_database_helper.py @@ -45,11 +45,14 @@ def begin_observing(self, cb): def object_cb(self, perception_objects): """Callback for the object database.""" for o in perception_objects.objects: - if o.name not in self.found and not o[0].isupper(): + if o.name not in self.found and not o.name[0].isupper(): self.found.add(o.name) if self.new_object_subscriber is not None: self.new_object_subscriber(o) + def remove_found(self, name): + self.found.remove(name) + def set_color(self, color, name): """Set the color of an object in the database.""" raise NotImplementedError() From e3667420ef3c198adfeb0636894627b8f80b4d6b Mon Sep 17 00:00:00 2001 From: Tess Date: Sat, 12 Nov 2016 21:50:56 -0500 Subject: [PATCH 129/267] MISSION PLANNER: adds timeouts to missions, constantly checks for object dependencies instead of relying on the missions for this. Adds corresponding tests. --- .../missing_perception_object.py | 10 -------- .../navigator_tools/object_database_helper.py | 23 +++++++++++++++++++ 2 files changed, 23 insertions(+), 10 deletions(-) delete mode 100644 utils/navigator_tools/navigator_tools/missing_perception_object.py diff --git a/utils/navigator_tools/navigator_tools/missing_perception_object.py b/utils/navigator_tools/navigator_tools/missing_perception_object.py deleted file mode 100644 index 7bb8eeaa..00000000 --- a/utils/navigator_tools/navigator_tools/missing_perception_object.py +++ /dev/null @@ -1,10 +0,0 @@ -class MissingPerceptionObject(Exception): - - def __init__(self, missing_object, message="An object from the database is missing"): - - # Call the base class constructor with the parameters it needs - super(MissingPerceptionObject, self).__init__(message) - - # Now for your custom code... - self.missing_object = missing_object - self.message = message diff --git a/utils/navigator_tools/navigator_tools/object_database_helper.py b/utils/navigator_tools/navigator_tools/object_database_helper.py index 360c8af0..7c6cb7bd 100644 --- a/utils/navigator_tools/navigator_tools/object_database_helper.py +++ b/utils/navigator_tools/navigator_tools/object_database_helper.py @@ -15,6 +15,9 @@ def __init__(self, nh): self.found = Set() self.nh = nh self.new_object_subscriber = None + self.ensuring_objects = False + self.ensuring_object_dep = None + self.ensuring_object_cb = None @util.cancellableInlineCallbacks def init_(self): @@ -49,10 +52,30 @@ def object_cb(self, perception_objects): self.found.add(o.name) if self.new_object_subscriber is not None: self.new_object_subscriber(o) + if self.ensuring_objects: + missings_objs = [] + names = (o.name for o in perception_objects.objects) + for o in self.ensuring_object_dep: + if o not in names: + missings_objs.append(o) + if len(missings_objs) > 0: + self.ensuring_object_cb(missings_objs) def remove_found(self, name): self.found.remove(name) + def ensure_object_permanence(self, object_dep, cb): + """Ensure that all the objects in the object_dep list remain in the database. Call the callback if this isn't true.""" + if object_dep is None or cb is None: + return + self.ensuring_objects = True + self.ensuring_object_cb = cb + self.ensuring_object_dep = object_dep + + def stop_ensuring_object_permanence(self): + """Stop ensuring that objects remain in the database.""" + self.ensuring_objects = False + def set_color(self, color, name): """Set the color of an object in the database.""" raise NotImplementedError() From be18f1fa61781a9f9b3d16a3603f55967a7e4ccc Mon Sep 17 00:00:00 2001 From: Tess Date: Sat, 12 Nov 2016 23:04:57 -0500 Subject: [PATCH 130/267] NAVIGATOR TESTS: removes missing import --- utils/navigator_tools/navigator_tools/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/__init__.py b/utils/navigator_tools/navigator_tools/__init__.py index 75f7ddc1..b2fae0f2 100644 --- a/utils/navigator_tools/navigator_tools/__init__.py +++ b/utils/navigator_tools/navigator_tools/__init__.py @@ -6,7 +6,6 @@ import rviz_helpers import general_helpers import object_database_helper -import missing_perception_object from init_helpers import * from image_helpers import * @@ -16,4 +15,3 @@ from rviz_helpers import * from general_helpers import * from object_database_helper import * -from missing_perception_object import * From 7590075b349a328a391925b17124191ce3f2eb22 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Thu, 10 Nov 2016 16:02:35 -0500 Subject: [PATCH 131/267] BOUNDS: Move bounds to dynamic reconfig --- utils/navigator_tools/CMakeLists.txt | 11 +- utils/navigator_tools/cfg/BoundsConfig.cfg | 14 ++ utils/navigator_tools/nodes/bounds.py | 130 ++++++++++-------- .../nodes/coordinate_conversion_server.py | 12 +- 4 files changed, 108 insertions(+), 59 deletions(-) create mode 100755 utils/navigator_tools/cfg/BoundsConfig.cfg diff --git a/utils/navigator_tools/CMakeLists.txt b/utils/navigator_tools/CMakeLists.txt index 7272f75d..4a806468 100644 --- a/utils/navigator_tools/CMakeLists.txt +++ b/utils/navigator_tools/CMakeLists.txt @@ -1,11 +1,18 @@ cmake_minimum_required(VERSION 2.8.3) project(navigator_tools) -find_package(catkin REQUIRED COMPONENTS rostest std_msgs) +find_package(catkin REQUIRED COMPONENTS rostest std_msgs dynamic_reconfigure) catkin_python_setup() + +generate_dynamic_reconfigure_options( + cfg/BoundsConfig.cfg +) + +# make sure configure headers are built before any node using them +#add_dependencies(lidar ${PROJECT_NAME}_gencfg) + catkin_package() # Add folders to be run by python nosetests if(CATKIN_ENABLE_TESTING) catkin_add_nosetests(tests) endif() - diff --git a/utils/navigator_tools/cfg/BoundsConfig.cfg b/utils/navigator_tools/cfg/BoundsConfig.cfg new file mode 100755 index 00000000..2cdc6e05 --- /dev/null +++ b/utils/navigator_tools/cfg/BoundsConfig.cfg @@ -0,0 +1,14 @@ +#!/usr/bin/env python +PACKAGE = "navigator_tools" + +from dynamic_reconfigure.parameter_generator_catkin import * + +gen = ParameterGenerator() + +gen.add("enforce", bool_t, 0, "Whether or not to remove the ogrid around the boat.", True) +gen.add("lla_1", str_t, 0, "'(Lat, Long)' point of corner 1.", "(29.535011, -82.303323)") +gen.add("lla_2", str_t, 0, "'(Lat, Long)' point of corner 2.", "(29.534647, -82.304280)") +gen.add("lla_3", str_t, 0, "'(Lat, Long)' point of corner 3.", "(29.533440, -82.303577)") +gen.add("lla_4", str_t, 0, "'(Lat, Long)' point of corner 4.", "(29.533803, -82.302639)") + +exit(gen.generate(PACKAGE, "navigator_tools", "Bounds")) diff --git a/utils/navigator_tools/nodes/bounds.py b/utils/navigator_tools/nodes/bounds.py index 1226c702..062b9ed0 100755 --- a/utils/navigator_tools/nodes/bounds.py +++ b/utils/navigator_tools/nodes/bounds.py @@ -1,63 +1,85 @@ #!/usr/bin/env python import numpy as np -import txros +import rospy import navigator_tools from twisted.internet import defer -from coordinate_conversion_server import Converter -from navigator_msgs.srv import Bounds, BoundsResponse, \ - CoordinateConversionRequest - - -class Param(): - @classmethod - @txros.util.cancellableInlineCallbacks - def get(cls, nh, param): - try: - p = yield nh.get_param(param) - print param, p - defer.returnValue(cls(p)) - - except txros.rosxmlrpc.Error: - # Param not found - defer.returnValue(cls(None)) - - def __init__(self, value): - self.value = value - self.found = value is not None - self.found_and_true = self.value and self.found - - -@txros.util.cancellableInlineCallbacks -def got_request(nh, req, convert): - to_frame = "enu" if req.to_frame == '' else req.to_frame - - resp = BoundsResponse(enforce=False) - # Lets get all these defers started up - enforce = yield Param.get(nh, "/bounds/enforce") - lla_bounds = yield Param.get(nh, "/bounds/lla") +from navigator_tools.cfg import BoundsConfig +from dynamic_reconfigure.server import Server - if enforce.found_and_true: - bounds = [] - for lla_bound in lla_bounds.value: - bound = yield convert.request(CoordinateConversionRequest(frame="lla", point=np.append(lla_bound, 0))) - bounds.append(navigator_tools.numpy_to_point(getattr(bound, to_frame))) - - resp = BoundsResponse(enforce=True, bounds=bounds) - - defer.returnValue(resp) - - -@txros.util.cancellableInlineCallbacks -def main(): - nh = yield txros.NodeHandle.from_argv("bounds_server", anonymous=True) - - convert = Converter() - yield convert.init(nh) - nh.advertise_service('/get_bounds', Bounds, lambda req: got_request(nh, req, convert)) - - yield defer.Deferred() # never exit +from navigator_msgs.srv import Bounds, BoundsResponse, \ + CoordinateConversion, CoordinateConversionRequest + + +class BoundsServer(object): + def __init__(self): + self.convert = rospy.ServiceProxy('/convert', CoordinateConversion) + + self.enforce = False + self.lla_bounds = [] + + rospy.set_param("/bounds/enu/", [[0,0],[0,0],[0,0],[0,0]]) + rospy.set_param("/bounds/enforce", self.enforce) + + #self.convert = Converter() + Server(BoundsConfig, self.update_config) + rospy.Service('/get_bounds', Bounds, self.got_request) + + def destringify(self, lat_long_string): + if any(c.isalpha() for c in lat_long_string): + return False + + floats = map(float, lat_long_string.replace('(', '').replace(')', '').split(',')) + + if len(floats) != 3: + return False + + return floats + + def update_config(self, config, level): + self.enforce = config['enforce'] + lla_bounds_str = [config['lla_1'] + ", 0", config['lla_2'] + ", 0", + config['lla_3'] + ", 0", config['lla_4'] + ", 0"] + lla_bounds = map(self.destringify, lla_bounds_str) + + # If any returned false, don't update + if all(lla_bounds): + bounds = [self.convert(CoordinateConversionRequest(frame="lla", point=lla)) for lla in lla_bounds] + config['enu_1_lat'] = bounds[0].enu[0] + config['enu_1_long'] = bounds[0].enu[1] + + config['enu_2_lat'] = bounds[1].enu[0] + config['enu_2_long'] = bounds[1].enu[1] + + config['enu_3_lat'] = bounds[2].enu[0] + config['enu_3_long'] = bounds[2].enu[1] + + config['enu_4_lat'] = bounds[3].enu[0] + config['enu_4_long'] = bounds[3].enu[1] + + rospy.set_param("/bounds/enu/", map(lambda bound: bound.enu[:2], bounds)) + else: + rospy.set_param("/bounds/enu/", [[0,0],[0,0],[0,0],[0,0]]) + + rospy.set_param("/bounds/enforce", self.enforce) + return config + + def got_request(self, req): + to_frame = "enu" if req.to_frame == '' else req.to_frame + + print self.lla_bounds + + resp = BoundsResponse(enforce=False) + + bounds = [navigator_tools.numpy_to_point(getattr(response, to_frame)) for response in bounds] + + resp.enforce = self.enforce + resp.bounds = bounds + + return resp if __name__ == "__main__": - txros.util.launch_main(main) \ No newline at end of file + rospy.init_node("bounds_server") + bs = BoundsServer() + rospy.spin() diff --git a/utils/navigator_tools/nodes/coordinate_conversion_server.py b/utils/navigator_tools/nodes/coordinate_conversion_server.py index fdd52f61..a811ca53 100755 --- a/utils/navigator_tools/nodes/coordinate_conversion_server.py +++ b/utils/navigator_tools/nodes/coordinate_conversion_server.py @@ -31,8 +31,14 @@ def not_found(self): def request(self, srv): self.point = np.array(srv.point) - self.enu_pos = navigator_tools.odometry_to_numpy((yield self.odom_sub.get_next_message()))[0][0] - self.ecef_pos = navigator_tools.odometry_to_numpy((yield self.abs_odom_sub.get_next_message()))[0][0] + last_odom = yield self.odom_sub.get_last_message() + last_absodom = yield self.abs_odom_sub.get_last_message() + + if last_odom is None or last_absodom is None: + defer.returnValue(CoordinateConversionResponse()) + + self.enu_pos = navigator_tools.odometry_to_numpy(last_odom)[0][0] + self.ecef_pos = navigator_tools.odometry_to_numpy(last_absodom)[0][0] enu, ecef, lla = getattr(self, srv.frame)() @@ -88,4 +94,4 @@ def main(): yield defer.Deferred() # never exit if __name__ == "__main__": - txros.util.launch_main(main) \ No newline at end of file + txros.util.launch_main(main) From b20b30296650247b3f6705f16dfb0bdf8832e808 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 11 Nov 2016 00:55:03 -0500 Subject: [PATCH 132/267] OGRID: Draw bounds on ogrid --- utils/navigator_tools/nodes/bounds.py | 30 +++++++++++++-------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/utils/navigator_tools/nodes/bounds.py b/utils/navigator_tools/nodes/bounds.py index 062b9ed0..34679adb 100755 --- a/utils/navigator_tools/nodes/bounds.py +++ b/utils/navigator_tools/nodes/bounds.py @@ -17,11 +17,11 @@ def __init__(self): self.convert = rospy.ServiceProxy('/convert', CoordinateConversion) self.enforce = False - self.lla_bounds = [] + self.bounds = [] rospy.set_param("/bounds/enu/", [[0,0],[0,0],[0,0],[0,0]]) - rospy.set_param("/bounds/enforce", self.enforce) + rospy.set_param("/bounds/enforce", self.enforce) #self.convert = Converter() Server(BoundsConfig, self.update_config) rospy.Service('/get_bounds', Bounds, self.got_request) @@ -45,34 +45,32 @@ def update_config(self, config, level): # If any returned false, don't update if all(lla_bounds): - bounds = [self.convert(CoordinateConversionRequest(frame="lla", point=lla)) for lla in lla_bounds] - config['enu_1_lat'] = bounds[0].enu[0] - config['enu_1_long'] = bounds[0].enu[1] + self.bounds = [self.convert(CoordinateConversionRequest(frame="lla", point=lla)) for lla in lla_bounds] + config['enu_1_lat'] = self.bounds[0].enu[0] + config['enu_1_long'] = self.bounds[0].enu[1] - config['enu_2_lat'] = bounds[1].enu[0] - config['enu_2_long'] = bounds[1].enu[1] + config['enu_2_lat'] = self.bounds[1].enu[0] + config['enu_2_long'] = self.bounds[1].enu[1] - config['enu_3_lat'] = bounds[2].enu[0] - config['enu_3_long'] = bounds[2].enu[1] + config['enu_3_lat'] = self.bounds[2].enu[0] + config['enu_3_long'] = self.bounds[2].enu[1] - config['enu_4_lat'] = bounds[3].enu[0] - config['enu_4_long'] = bounds[3].enu[1] + config['enu_4_lat'] = self.bounds[3].enu[0] + config['enu_4_long'] = self.bounds[3].enu[1] - rospy.set_param("/bounds/enu/", map(lambda bound: bound.enu[:2], bounds)) + rospy.set_param("/bounds/enu/", map(lambda bound: bound.enu[:2], self.bounds)) else: rospy.set_param("/bounds/enu/", [[0,0],[0,0],[0,0],[0,0]]) - + rospy.set_param("/bounds/enforce", self.enforce) return config def got_request(self, req): to_frame = "enu" if req.to_frame == '' else req.to_frame - print self.lla_bounds - resp = BoundsResponse(enforce=False) - bounds = [navigator_tools.numpy_to_point(getattr(response, to_frame)) for response in bounds] + bounds = [navigator_tools.numpy_to_point(getattr(response, to_frame)) for response in self.bounds] resp.enforce = self.enforce resp.bounds = bounds From 46354e505be88af40e9c69a427ac98df12a59715 Mon Sep 17 00:00:00 2001 From: Matthew Langford Date: Fri, 11 Nov 2016 07:04:11 -0500 Subject: [PATCH 133/267] MOVE: Add circle to move command --- utils/navigator_tools/nodes/rviz_waypoint.py | 36 ++++++++++++++------ 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/utils/navigator_tools/nodes/rviz_waypoint.py b/utils/navigator_tools/nodes/rviz_waypoint.py index 37b67376..a38d5035 100755 --- a/utils/navigator_tools/nodes/rviz_waypoint.py +++ b/utils/navigator_tools/nodes/rviz_waypoint.py @@ -5,31 +5,47 @@ import numpy as np import navigator_tools from twisted.internet import defer -from geometry_msgs.msg import PoseStamped +from geometry_msgs.msg import PoseStamped, PointStamped class RvizRepublisher(object): @txros.util.cancellableInlineCallbacks def init(self): self.nh = yield txros.NodeHandle.from_argv("rviz_republisher") - self.rviz_republish = self.nh.advertise("/rviz_goal", PoseStamped) + self.point_republish = self.nh.advertise("/rviz_point", PointStamped) + self.pose_republish = self.nh.advertise("/rviz_goal", PoseStamped) + self.rviz_goal = self.nh.subscribe("/move_base_simple/goal", PoseStamped) + self.clicked_point = self.nh.subscribe("/clicked_point", PointStamped) + + self.delay = .1 # s + self.publish_point() + self.publish_pose() - yield self.rviz_goal.get_next_message() + @txros.util.cancellableInlineCallbacks + def publish_point(self): + yield self.clicked_point.get_next_message() while True: - yield self.nh.sleep(.1) - yield self.do() + yield self.nh.sleep(self.delay) + last_point = yield self.clicked_point.get_last_message() + self.point_republish.publish(last_point) @txros.util.cancellableInlineCallbacks - def do(self): - last = yield self.rviz_goal.get_last_message() - self.rviz_republish.publish(last) + def publish_pose(self): + yield self.rviz_goal.get_next_message() + + while True: + yield self.nh.sleep(self.delay) + last_pose = yield self.rviz_goal.get_last_message() + self.pose_republish.publish(last_pose) + @txros.util.cancellableInlineCallbacks def main(): rr = RvizRepublisher() yield rr.init() - yield defer.Deferred() # never exit + yield defer.Deferred() + -txros.util.launch_main(main) \ No newline at end of file +txros.util.launch_main(main) From 0dc1db4eb1e00de4e0abe93bf50a34e143b5a666 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 11 Nov 2016 18:00:37 -0500 Subject: [PATCH 134/267] VISION: Add node to estimate color of totem buoys --- .../navigator_vision/nodes/database_color.py | 279 ++++++++++++++++++ 1 file changed, 279 insertions(+) create mode 100755 perception/navigator_vision/nodes/database_color.py diff --git a/perception/navigator_vision/nodes/database_color.py b/perception/navigator_vision/nodes/database_color.py new file mode 100755 index 00000000..87377baa --- /dev/null +++ b/perception/navigator_vision/nodes/database_color.py @@ -0,0 +1,279 @@ +#!/usr/bin/env python +import cv2 +import numpy as np + +import rospy +import tf +import sensor_msgs.point_cloud2 as pc2 +from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest +from sensor_msgs.msg import CameraInfo, Image + +from cv_bridge import CvBridge, CvBridgeError +from image_geometry import PinholeCameraModel + +import navigator_tools +from navigator_tools import fprint as _fprint + + +camera_root = "/stereo/right" # /camera_root/root +rospy.init_node("database_colorer") + +ros_t = lambda t: rospy.Duration(t) +fprint = lambda *args, **kwargs: _fprint(title="COLORAMA", time="", *args, **kwargs) + +class ImageHolder(object): + @classmethod + def from_msg(cls, msg): + time = msg.header.stamp + try: + image = CvBridge().imgmsg_to_cv2(msg) + return cls(image, time) + except CvBridgeError, e: + # Intentionally absorb CvBridge Errors + rospy.logerr(e) + + return cls() + + def __init__(self, image=None, time=None): + self.image = image + self.time = time + + def __repr__(self): + if self.contains_valid_image: + return "Image from t={}".format(self.time.to_sec()) + return "Invalid Image" + + @property + def contains_valid_image(self): + return self.image is not None and self.time is not None + + +class ImageHistory(object): + def __init__(self, image_topic, history=30): + assert history > 1 + + self._history_length = history + self._images = [] + rospy.Subscriber(image_topic, Image, self._add_image) + + @property + def newest_image(self): + if len(self._images) == 0: + return ImageHolder() + return max(self._images, key=lambda image: image.time) + + def _add_image(self, msg): + image = ImageHolder.from_msg(msg) + self._push(image) + + def _push(self, data): + ''' + Push to history list and only keep the specified length of history. + This enforces that all images are valid. + ''' + if data.contains_valid_image: + self._images.append(data) + self._images = self._images[-self._history_length:] + + def get_around_time(self, time, margin=.05): + ''' + Returns the image that is closest to the specified time. + Returns `None` if closest image is more than `margin` seconds away. + ''' + if len(self._images) == 0: + return ImageHolder() + closest = min(self._images, key=lambda image: abs(image.time - time)) + if abs(closest.time - time) > ros_t(margin): + return ImageHolder() # Return invalid image + + return closest + + +class DebugImage(object): + def __init__(self, topic, encoding="bgr8", queue_size=1, prd=1): + self.bridge = CvBridge() + self.encoding = encoding + self.im_pub = rospy.Publisher(topic, Image, queue_size=queue_size) + self.image = None + rospy.Timer(ros_t(prd), self.publish) + + + def publish(self, *args): + if self.image is not None: + try: + image_message = self.bridge.cv2_to_imgmsg(self.image, self.encoding) + self.im_pub.publish(image_message) + except CvBridgeError, e: + # Intentionally absorb CvBridge Errors + rospy.logerr(e) + + +class Colorama(object): + def __init__(self): + info_topic = camera_root + "/camera_info" + image_topic = camera_root + "/image_rect_color" # Change this to rect on boat + + self.tf_listener = tf.TransformListener() + + db_request = rospy.ServiceProxy("/database/requests", ObjectDBQuery) + self.make_request = lambda **kwargs: db_request(ObjectDBQueryRequest(**kwargs)) + + self.image_history = ImageHistory(image_topic) + + # Wait for camera info, and exit if not found + try: + fprint("Waiting for camera info on: '{}'".format(info_topic)) + camera_info_msg = rospy.wait_for_message(info_topic, CameraInfo, timeout=3) + except rospy.exceptions.ROSException: + fprint("Camera info not found! Terminating.", msg_color="red") + rospy.signal_shutdown("Camera not found!") + return + + fprint("Camera info found!", msg_color="green") + self.camera_model = PinholeCameraModel() + self.camera_model.fromCameraInfo(camera_info_msg) + + self.debug = DebugImage("/colorama/debug", prd=.5) + + # These are color mappings from Hue values [0, 360] + self.color_map = {'red': np.radians(0), 'yellow': np.radians(60), + 'green': np.radians(120), 'blue': np.radians(240)} + + # Some tunable parameters + self.update_time = .5 # s + self.saturation_reject = 50 + self.value_reject = 50 + self.hue_error_reject = .4 # rads + + rospy.Timer(ros_t(self.update_time), self.do_update) + + def do_update(self, *args): + resp = self.make_request(name='all') + + if resp.found: + # temp + time_of_marker = rospy.Time.now() - ros_t(.2) # header.stamp not filled out + image_holder = self.image_history.get_around_time(time_of_marker) + if not image_holder.contains_valid_image: + return + + header = navigator_tools.make_header(frame="/enu", stamp=image_holder.time) #resp.objects[0].header + image = image_holder.image + self.debug.image = np.copy(image) + + cam_tf = self.camera_model.tfFrame() + fprint("Getting transform between /enu and {}...".format(cam_tf)) + self.tf_listener.waitForTransform("/enu", cam_tf, time_of_marker, ros_t(1)) + t_mat44 = self.tf_listener.asMatrix(cam_tf, header) # homogenous 4x4 transformation matrix + + for obj in resp.objects: + if len(obj.points) > 0 and obj.name == "totem": + fprint("{} {}".format(obj.id, "=" * 50)) + + print obj.position + # Get objects position in camera frame and make sure it's in view + object_cam = t_mat44.dot(np.append(navigator_tools.point_to_numpy(obj.position), 1)) + object_px = map(int, self.camera_model.project3dToPixel(object_cam[:3])) + if not self._object_in_frame(object_cam): + continue + + #print object_px + + points_np = np.array(map(navigator_tools.point_to_numpy, obj.points)) + # We dont want points below a certain level + points_np = points_np[points_np[:, 2] > -2.5] + # Shove ones in there to make homogenous points + points_np_homo = np.hstack((points_np, np.ones((points_np.shape[0], 1)))).T + points_cam = t_mat44.dot(points_np_homo).T + points_px = map(self.camera_model.project3dToPixel, points_cam[:, :3]) + #print points_px + + roi = self._get_ROI_from_points(points_px) + hue = self._get_color_from_ROI(roi, image) + c = (0, 0, 0) if hue is None else (int(hue), 255, 255) + + hsv = np.array([[c]])[:, :3].astype(np.uint8) + bgr = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)[0, 0] + bgr = tuple(bgr.astype(np.uint8).tolist()) + + [cv2.circle(self.debug.image, tuple(map(int, p)), 2, bgr, -1) for p in points_px] + cv2.circle(self.debug.image, tuple(object_px), 10, bgr, -1) + font = cv2.FONT_HERSHEY_SIMPLEX + cv2.putText(self.debug.image, str(obj.id), tuple(object_px), font, 1, bgr, 2) + + + print '\n' * 2 + + def _get_ROI_from_points(self, image_points): + # Probably will return a set of contours + cnt = np.array(image_points).astype(np.float32) + + rect = cv2.minAreaRect(cnt) + box = cv2.cv.BoxPoints(rect) + box = np.int0(box) + fprint("Drawing rectangle") + #cv2.drawContours(self.debug.image, [box], 0, (0, 0, 255), 2) + return box + + def _get_color_from_ROI(self, roi, img): + mask = np.zeros(img.shape[:2], np.uint8) + cv2.drawContours(mask, [roi], 0, 255, -1) + bgr = cv2.mean(img, mask) + # We have to treat that bgr value as a 3d array to get cvtColor to work + bgr = np.array([[bgr]])[:, :3].astype(np.uint8) + h, s, v = cv2.cvtColor(bgr, cv2.COLOR_BGR2HSV)[0, 0] + # Now check that s and v are in a good range + if s < self.saturation_reject or v < self.value_reject: + fprint("The colors aren't expressive enough. Rejecting.", msg_color='red') + return None + + # Compute hue error in SO2 + hue_angle = np.radians(h * 2) + c = np.cos(hue_angle) + s = np.sin(hue_angle) + error = np.inf + likely_color = 'bazinga' + for color, h_val in self.color_map.iteritems(): + cg = np.cos(h_val) + sg = np.sin(h_val) + this_error = np.abs(np.arctan2(sg*c - cg*s, cg*c + sg*s)) + if this_error < error: + error = this_error + likely_color = color + + if error > self.hue_error_reject: + fprint("Closest color was {} with an error of {} rads (> {}) Rejecting.".format(likely_color, np.round(error, 3), + self.hue_error_reject), msg_color='red') + return None + + fprint("Likely color: {} with an hue error of {} rads.".format(likely_color, np.round(error, 3))) + return np.degrees(self.color_map[likely_color]) / 2.0 + + def _object_in_frame(self, object_point): + """ + Returns if the object is in frame given the centroid of the object. + `object_point` should be in the camera_model's frame. + """ + print object_point + if object_point[2] < 0: + return False + + px = np.array(self.camera_model.project3dToPixel(object_point)) + resoultion = self.camera_model.fullResolution() + if np.any([0,0] > px) or np.any(px > resoultion): + return False + + return True + + +#rospy.Subscriber("/velodyne_points", pc2.PointCloud2, cb) +# req = rospy.ServiceProxy("/database/requests", ObjectDBQuery) +# resp = req(ObjectDBQueryRequest(name="all")) + +# if resp.found: +# for db_object in resp.objects: +# if len(db_object.points) > 0: +# print db_object.points +# print +c = Colorama() +rospy.spin() From 46acb4437b45a8a1b7a6a64ba64ca4575b45ce9b Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 11 Nov 2016 20:11:14 -0500 Subject: [PATCH 135/267] BOUNDS: Fix bug where node would crash on startup --- utils/navigator_tools/cfg/sim.yaml | 24 ++++++++++++++++++++++++ utils/navigator_tools/nodes/bounds.py | 17 ++++++++++++----- 2 files changed, 36 insertions(+), 5 deletions(-) create mode 100644 utils/navigator_tools/cfg/sim.yaml diff --git a/utils/navigator_tools/cfg/sim.yaml b/utils/navigator_tools/cfg/sim.yaml new file mode 100644 index 00000000..6b167309 --- /dev/null +++ b/utils/navigator_tools/cfg/sim.yaml @@ -0,0 +1,24 @@ +!!python/object/new:dynamic_reconfigure.encoding.Config +dictitems: + groups: !!python/object/new:dynamic_reconfigure.encoding.Config + dictitems: + enforce: true + groups: !!python/object/new:dynamic_reconfigure.encoding.Config + state: [] + id: 0 + lla_1: (29.5358153431059, -82.30261036235201) + lla_2: (29.5358153387807, -82.30467366185054) + lla_3: (29.5340110111349, -82.30467363859142) + lla_4: (29.5340110154601, -82.30261037571948) + name: Default + parameters: !!python/object/new:dynamic_reconfigure.encoding.Config + state: [] + parent: 0 + state: true + type: '' + state: [] + lla_1: (29.5358153431059, -82.30261036235201) + lla_2: (29.5358153387807, -82.30467366185054) + lla_3: (29.5340110111349, -82.30467363859142) + lla_4: (29.5340110154601, -82.30261037571948) +state: [] diff --git a/utils/navigator_tools/nodes/bounds.py b/utils/navigator_tools/nodes/bounds.py index 34679adb..671fd246 100755 --- a/utils/navigator_tools/nodes/bounds.py +++ b/utils/navigator_tools/nodes/bounds.py @@ -19,9 +19,9 @@ def __init__(self): self.enforce = False self.bounds = [] - rospy.set_param("/bounds/enu/", [[0,0],[0,0],[0,0],[0,0]]) - + rospy.set_param("/bounds/enu/", [[0,0,0],[0,0,0],[0,0,0],[0,0,0]]) rospy.set_param("/bounds/enforce", self.enforce) + #self.convert = Converter() Server(BoundsConfig, self.update_config) rospy.Service('/get_bounds', Bounds, self.got_request) @@ -42,10 +42,16 @@ def update_config(self, config, level): lla_bounds_str = [config['lla_1'] + ", 0", config['lla_2'] + ", 0", config['lla_3'] + ", 0", config['lla_4'] + ", 0"] lla_bounds = map(self.destringify, lla_bounds_str) - + self.lla_bounds = lla_bounds + # If any returned false, don't update if all(lla_bounds): - self.bounds = [self.convert(CoordinateConversionRequest(frame="lla", point=lla)) for lla in lla_bounds] + try: + self.bounds = [self.convert(CoordinateConversionRequest(frame="lla", point=lla)) for lla in lla_bounds] + except: + print "No service provider found" + return config + config['enu_1_lat'] = self.bounds[0].enu[0] config['enu_1_long'] = self.bounds[0].enu[1] @@ -60,7 +66,7 @@ def update_config(self, config, level): rospy.set_param("/bounds/enu/", map(lambda bound: bound.enu[:2], self.bounds)) else: - rospy.set_param("/bounds/enu/", [[0,0],[0,0],[0,0],[0,0]]) + rospy.set_param("/bounds/enu/", [[0,0,00],[0,0,0],[0,0,0],[0,0,0]]) rospy.set_param("/bounds/enforce", self.enforce) return config @@ -70,6 +76,7 @@ def got_request(self, req): resp = BoundsResponse(enforce=False) + self.bounds = [self.convert(CoordinateConversionRequest(frame="lla", point=lla)) for lla in self.lla_bounds] bounds = [navigator_tools.numpy_to_point(getattr(response, to_frame)) for response in self.bounds] resp.enforce = self.enforce From 039f067bffc02504597dda553b969d2267cbe430 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 11 Nov 2016 20:39:07 -0500 Subject: [PATCH 136/267] BOUNDS: Add config files for sim and wauberg to set bounds --- utils/navigator_tools/cfg/wauberg.yaml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 utils/navigator_tools/cfg/wauberg.yaml diff --git a/utils/navigator_tools/cfg/wauberg.yaml b/utils/navigator_tools/cfg/wauberg.yaml new file mode 100644 index 00000000..99b81fd3 --- /dev/null +++ b/utils/navigator_tools/cfg/wauberg.yaml @@ -0,0 +1,25 @@ +!!python/object/new:dynamic_reconfigure.encoding.Config +dictitems: + enforce: true + groups: !!python/object/new:dynamic_reconfigure.encoding.Config + dictitems: + enforce: true + groups: !!python/object/new:dynamic_reconfigure.encoding.Config + state: [] + id: 0 + lla_1: (29.535011, -82.303323) + lla_2: (29.534647, -82.304280) + lla_3: (29.533440, -82.303577) + lla_4: (29.533803, -82.302639) + name: Default + parameters: !!python/object/new:dynamic_reconfigure.encoding.Config + state: [] + parent: 0 + state: true + type: '' + state: [] + lla_1: (29.535011, -82.303323) + lla_2: (29.534647, -82.304280) + lla_3: (29.533440, -82.303577) + lla_4: (29.533803, -82.302639) +state: [] From 5fa15d21b3a3793568d6709d7021f2a458f44733 Mon Sep 17 00:00:00 2001 From: Matthew Langford Date: Sat, 12 Nov 2016 11:43:11 -0500 Subject: [PATCH 137/267] VISION: Set color in databse --- perception/navigator_vision/nodes/database_color.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/perception/navigator_vision/nodes/database_color.py b/perception/navigator_vision/nodes/database_color.py index 87377baa..961bb0bd 100755 --- a/perception/navigator_vision/nodes/database_color.py +++ b/perception/navigator_vision/nodes/database_color.py @@ -140,6 +140,7 @@ def __init__(self): 'green': np.radians(120), 'blue': np.radians(240)} # Some tunable parameters + self.min_height = -.7 # m self.update_time = .5 # s self.saturation_reject = 50 self.value_reject = 50 @@ -181,7 +182,8 @@ def do_update(self, *args): points_np = np.array(map(navigator_tools.point_to_numpy, obj.points)) # We dont want points below a certain level - points_np = points_np[points_np[:, 2] > -2.5] + print points_np[:, 2] + points_np = points_np[points_np[:, 2] > self.min_height] # Shove ones in there to make homogenous points points_np_homo = np.hstack((points_np, np.ones((points_np.shape[0], 1)))).T points_cam = t_mat44.dot(points_np_homo).T @@ -195,6 +197,10 @@ def do_update(self, *args): hsv = np.array([[c]])[:, :3].astype(np.uint8) bgr = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)[0, 0] bgr = tuple(bgr.astype(np.uint8).tolist()) + + if hue is not None: + print bgr + self.make_request(cmd='{name}={bgr[2]},{bgr[1]},{bgr[0]},{_id}'.format(name=obj.name,_id=obj.id, bgr=bgr)) [cv2.circle(self.debug.image, tuple(map(int, p)), 2, bgr, -1) for p in points_px] cv2.circle(self.debug.image, tuple(object_px), 10, bgr, -1) @@ -212,7 +218,7 @@ def _get_ROI_from_points(self, image_points): box = cv2.cv.BoxPoints(rect) box = np.int0(box) fprint("Drawing rectangle") - #cv2.drawContours(self.debug.image, [box], 0, (0, 0, 255), 2) + cv2.drawContours(self.debug.image, [box], 0, (0, 0, 255), 2) return box def _get_color_from_ROI(self, roi, img): From db4b50703dd66bb24d3e3554f31164a06eb685ba Mon Sep 17 00:00:00 2001 From: mattlangford Date: Sun, 13 Nov 2016 08:56:14 -0500 Subject: [PATCH 138/267] COLORAMA: Add service to get color directly --- .../navigator_vision/nodes/database_color.py | 100 +++++++++++++++--- 1 file changed, 83 insertions(+), 17 deletions(-) diff --git a/perception/navigator_vision/nodes/database_color.py b/perception/navigator_vision/nodes/database_color.py index 961bb0bd..281f7c39 100755 --- a/perception/navigator_vision/nodes/database_color.py +++ b/perception/navigator_vision/nodes/database_color.py @@ -5,7 +5,7 @@ import rospy import tf import sensor_msgs.point_cloud2 as pc2 -from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest +from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest, VisionRequest, VisionRequestResponse from sensor_msgs.msg import CameraInfo, Image from cv_bridge import CvBridge, CvBridgeError @@ -111,13 +111,18 @@ def publish(self, *args): class Colorama(object): def __init__(self): info_topic = camera_root + "/camera_info" - image_topic = camera_root + "/image_rect_color" # Change this to rect on boat + image_topic = camera_root + "/image_raw" # Change this to rect on boat self.tf_listener = tf.TransformListener() + # Map id => [{color, error}, ...] for determining when a color is good + self.colored = {} + db_request = rospy.ServiceProxy("/database/requests", ObjectDBQuery) self.make_request = lambda **kwargs: db_request(ObjectDBQueryRequest(**kwargs)) + rospy.Service("/vision/buoy_colorizer", VisionRequest, self.got_request) + self.image_history = ImageHistory(image_topic) # Wait for camera info, and exit if not found @@ -140,14 +145,68 @@ def __init__(self): 'green': np.radians(120), 'blue': np.radians(240)} # Some tunable parameters - self.min_height = -.7 # m self.update_time = .5 # s self.saturation_reject = 50 self.value_reject = 50 self.hue_error_reject = .4 # rads + # To decide when to accept color observations + self.error_tolerance = .4 # rads + self.spottings_req = 4 + self.similarity_reject = .1 # If two colors are within this amount of error, reject + rospy.Timer(ros_t(self.update_time), self.do_update) + def valid_color(self, _id): + """ + Either returns valid color or None depending if the color is valid or not + """ + if _id not in self.colored: + return None + + object_data = self.colored[_id] + print "object_data", object_data + + # Maps color => [err1, err2, ..] + potential_colors = {} + for data in object_data: + color, err = data['color'], data['err'] + if color not in potential_colors: + potential_colors[color] = [] + + potential_colors[color].append(err) + + potential_colors_avg = {} + for color, errs in potential_colors.iteritems(): + if len(errs) > self.spottings_req: + potential_colors_avg[color] = np.average(errs) + + print "potential_colors_avg", potential_colors_avg + if len(potential_colors_avg) == 1: + # No debate about the color + color = potential_colors_avg.keys()[0] + err = potential_colors_avg[color] + fprint("Only one color found, error: {} (/ {})".format(err, self.error_tolerance)) + if len(potential_colors[color]) > self.spottings_req and err <= self.error_tolerance: + return color + + elif len(potential_colors_avg) > 1: + # More than one color, see if one of them is still valid + fprint("More than one color detected. Removing all.") + self.colored[_id] = [] + return None + + + def got_request(self, req): + # Threading blah blah something unsafe + + color = self.valid_color(req.target_id) + if color is None: + fprint("ID {} is NOT a valid color".format(req.target_id), msg_color='red') + return VisionRequestResponse(found=False) + + return VisionRequestResponse(found=True, color=color) + def do_update(self, *args): resp = self.make_request(name='all') @@ -182,8 +241,7 @@ def do_update(self, *args): points_np = np.array(map(navigator_tools.point_to_numpy, obj.points)) # We dont want points below a certain level - print points_np[:, 2] - points_np = points_np[points_np[:, 2] > self.min_height] + points_np = points_np[points_np[:, 2] > -2.5] # Shove ones in there to make homogenous points points_np_homo = np.hstack((points_np, np.ones((points_np.shape[0], 1)))).T points_cam = t_mat44.dot(points_np_homo).T @@ -191,23 +249,31 @@ def do_update(self, *args): #print points_px roi = self._get_ROI_from_points(points_px) - hue = self._get_color_from_ROI(roi, image) - c = (0, 0, 0) if hue is None else (int(hue), 255, 255) - - hsv = np.array([[c]])[:, :3].astype(np.uint8) - bgr = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)[0, 0] - bgr = tuple(bgr.astype(np.uint8).tolist()) - - if hue is not None: - print bgr - self.make_request(cmd='{name}={bgr[2]},{bgr[1]},{bgr[0]},{_id}'.format(name=obj.name,_id=obj.id, bgr=bgr)) + color_info = self._get_color_from_ROI(roi, image) # hue, string_color, error + bgr = (0, 0, 0) + + if color_info is not None: + hue, color, error = color_info + c = (int(hue), 255, 255) + hsv = np.array([[c]])[:, :3].astype(np.uint8) + bgr = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)[0, 0] + bgr = tuple(bgr.astype(np.uint8).tolist()) + + if hue is not None: + if obj.id not in self.colored: + self.colored[obj.id] = [] + + self.colored[obj.id].append({'color':color, 'err':error}) + + if self.valid_color(obj.id): + fprint("COLOR IS VALID", msg_color='green') + self.make_request(cmd='{name}={bgr[2]},{bgr[1]},{bgr[0]},{_id}'.format(name=obj.name,_id= obj.id, bgr=bgr)) [cv2.circle(self.debug.image, tuple(map(int, p)), 2, bgr, -1) for p in points_px] cv2.circle(self.debug.image, tuple(object_px), 10, bgr, -1) font = cv2.FONT_HERSHEY_SIMPLEX cv2.putText(self.debug.image, str(obj.id), tuple(object_px), font, 1, bgr, 2) - print '\n' * 2 def _get_ROI_from_points(self, image_points): @@ -253,7 +319,7 @@ def _get_color_from_ROI(self, roi, img): return None fprint("Likely color: {} with an hue error of {} rads.".format(likely_color, np.round(error, 3))) - return np.degrees(self.color_map[likely_color]) / 2.0 + return [np.degrees(self.color_map[likely_color]) / 2.0, likely_color, error] def _object_in_frame(self, object_point): """ From 9cbbdaaf8a4aee0e95b6d0000912e12d81d555b0 Mon Sep 17 00:00:00 2001 From: Matt Langford Date: Thu, 17 Nov 2016 02:36:06 -0500 Subject: [PATCH 139/267] NAVIGATOR: Make database_query raise an exception if an object isn't found --- utils/navigator_tools/navigator_tools/general_helpers.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/utils/navigator_tools/navigator_tools/general_helpers.py b/utils/navigator_tools/navigator_tools/general_helpers.py index 925d7041..71012347 100644 --- a/utils/navigator_tools/navigator_tools/general_helpers.py +++ b/utils/navigator_tools/navigator_tools/general_helpers.py @@ -1,5 +1,10 @@ import rospy + +class MissingPerceptionObject(): + def __init__(self, name): + self.name = name + class Colors(): # Some cool stuff could happen here red = '\033[91m' @@ -72,4 +77,4 @@ def fprint(msg, time=None, title=None, newline=True, msg_color=None): # Note, this adds a space at the end. print to_print.format(time=time_header, title=title_header, msg=msg), -print_t = fprint # For legacy \ No newline at end of file +print_t = fprint # For legacy From 7976f72de0b8121acc1f60da893a2dc51286b141 Mon Sep 17 00:00:00 2001 From: Matt Langford Date: Wed, 16 Nov 2016 20:21:04 -0500 Subject: [PATCH 140/267] UTILS: Add check for valid rosmsg->numpy conversion --- utils/navigator_tools/navigator_tools/msg_helpers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/utils/navigator_tools/navigator_tools/msg_helpers.py b/utils/navigator_tools/navigator_tools/msg_helpers.py index cd440e38..5599659f 100644 --- a/utils/navigator_tools/navigator_tools/msg_helpers.py +++ b/utils/navigator_tools/navigator_tools/msg_helpers.py @@ -39,6 +39,8 @@ def rosmsg_to_numpy(rosmsg, keys=None): else: break + assert len(output_array) is not 0, "Input type {} has none of these attributes {}.".format(type(rosmsg).__name__, keys) + return np.array(output_array).astype(np.float32) else: From a704c2f2423b86cfcfe4d51f95eb9167cfc76096 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 18 Nov 2016 10:33:28 -0500 Subject: [PATCH 141/267] BAG: Add fake action server and image proc --- .../nodes/fake_action_server.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100755 utils/navigator_tools/nodes/fake_action_server.py diff --git a/utils/navigator_tools/nodes/fake_action_server.py b/utils/navigator_tools/nodes/fake_action_server.py new file mode 100755 index 00000000..5a612403 --- /dev/null +++ b/utils/navigator_tools/nodes/fake_action_server.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +import rospy +import actionlib + +from geometry_msgs.msg import PoseStamped +from lqrrt_ros.msg import MoveAction, MoveFeedback, MoveResult + +import navigator_tools +from navigator_tools import fprint as _fprint + + +fprint = lambda *args, **kwargs: _fprint(title="FAKE_ACTION_SERVER", time="", *args, **kwargs) + +class FakeActionServer(object): + def __init__(self): + self.goal_pose_pub = rospy.Publisher("/lqrrt/goal", PoseStamped, queue_size=3) + + self.move_server = actionlib.SimpleActionServer("/move_to", MoveAction, execute_cb=self.move_cb, auto_start=False) + self.move_server.start() + rospy.sleep(.1) + + fprint("Fake action server ready!", msg_color="green") + + def move_cb(self, msg): + fprint("Move request received!", msg_color="blue") + + if msg.move_type not in ['hold', 'drive', 'skid', 'circle']: + fprint("Move type {} not found", msg_color='red') + self.move_server.set_aborted(MoveResult('move_type')) + + p = PoseStamped() + p.header = navigator_tools.make_header(frame="enu") + p.pose = msg.goal + self.goal_pose_pub.publish(p) + + # Sleep before you continue + rospy.sleep(2) + + fprint("Finished move!", newline=2) + self.move_server.set_succeeded(MoveResult('')) + +if __name__ == "__main__": + rospy.init_node("fake_action_server") + f = FakeActionServer() + rospy.spin() From 73f0d88131d7655fabb11991b6eed8c386d45f14 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 18 Nov 2016 11:39:15 -0500 Subject: [PATCH 142/267] BAG: Add feasibility check to fake action client --- .../nodes/fake_action_server.py | 60 ++++++++++++++++++- 1 file changed, 58 insertions(+), 2 deletions(-) diff --git a/utils/navigator_tools/nodes/fake_action_server.py b/utils/navigator_tools/nodes/fake_action_server.py index 5a612403..c5090c57 100755 --- a/utils/navigator_tools/nodes/fake_action_server.py +++ b/utils/navigator_tools/nodes/fake_action_server.py @@ -1,12 +1,19 @@ #!/usr/bin/env python +from __future__ import division + +import numpy as np + import rospy import actionlib +import tf.transformations as trns +from nav_msgs.msg import OccupancyGrid from geometry_msgs.msg import PoseStamped from lqrrt_ros.msg import MoveAction, MoveFeedback, MoveResult import navigator_tools from navigator_tools import fprint as _fprint +from behaviors import params fprint = lambda *args, **kwargs: _fprint(title="FAKE_ACTION_SERVER", time="", *args, **kwargs) @@ -14,6 +21,13 @@ class FakeActionServer(object): def __init__(self): self.goal_pose_pub = rospy.Publisher("/lqrrt/goal", PoseStamped, queue_size=3) + + # Parameters to simulate lqrrt + self.blind = False + + self.ogrid = None + set_ogrid = lambda msg: setattr(self, "ogrid", msg) + rospy.Subscriber("/ogrid_master", OccupancyGrid, set_ogrid) self.move_server = actionlib.SimpleActionServer("/move_to", MoveAction, execute_cb=self.move_cb, auto_start=False) self.move_server.start() @@ -25,20 +39,62 @@ def move_cb(self, msg): fprint("Move request received!", msg_color="blue") if msg.move_type not in ['hold', 'drive', 'skid', 'circle']: - fprint("Move type {} not found", msg_color='red') + fprint("Move type '{}' not found".format(msg.move_type), msg_color='red') self.move_server.set_aborted(MoveResult('move_type')) + return + self.blind = msg.blind + p = PoseStamped() p.header = navigator_tools.make_header(frame="enu") p.pose = msg.goal self.goal_pose_pub.publish(p) # Sleep before you continue - rospy.sleep(2) + rospy.sleep(1) + + yaw = trns.euler_from_quaternion(navigator_tools.rosmsg_to_numpy(msg.goal.orientation))[2] + if not self.is_feasible(np.array([msg.goal.position.x, msg.goal.position.y, yaw]), np.zeros(3)): + fprint("Not feasible", msg_color='red') + self.move_server.set_aborted(MoveResult('occupied')) + return fprint("Finished move!", newline=2) self.move_server.set_succeeded(MoveResult('')) + def is_feasible(self, x, u): + """ + Given a state x and effort u, returns a bool + that is only True if that (x, u) is feasible. + + """ + # If there's no ogrid yet or it's a blind move, anywhere is valid + if self.ogrid is None or self.blind: + return True + + # Body to world + c, s = np.cos(x[2]), np.sin(x[2]) + R = np.array([[c, -s], + [s, c]]) + + # Vehicle points in world frame + points = x[:2] + R.dot(params.vps).T + + # Check for collision + cpm = 1 / self.ogrid.info.resolution + origin = navigator_tools.pose_to_numpy(self.ogrid.info.origin)[0][:2] + indicies = (cpm * (points - origin)).astype(np.int64) + + try: + data = np.array(self.ogrid.data).reshape(self.ogrid.info.width, self.ogrid.info.height) + grid_values = data[indicies[:, 1], indicies[:, 0]] + except IndexError: + return False + + # Greater than threshold is a hit + return np.all(grid_values < 90) + + if __name__ == "__main__": rospy.init_node("fake_action_server") f = FakeActionServer() From e8ff405988ac9995c4aef4927cb5a9694e908b01 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 18 Nov 2016 20:10:03 -0500 Subject: [PATCH 143/267] FPRINT: Fix missing color, add yellow --- utils/navigator_tools/navigator_tools/general_helpers.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/general_helpers.py b/utils/navigator_tools/navigator_tools/general_helpers.py index 71012347..fbd87b05 100644 --- a/utils/navigator_tools/navigator_tools/general_helpers.py +++ b/utils/navigator_tools/navigator_tools/general_helpers.py @@ -8,17 +8,15 @@ def __init__(self, name): class Colors(): # Some cool stuff could happen here red = '\033[91m' - blue = '\033[94m' green = '\033[92m' + yellow = '\033[93m' + blue = '\033[94m' bold = '\033[1m' reset = '\033[0m' def __getattr__(self, arg): # If we get a non existent color, return the reset color - if hasattr(self, arg.lower()): - return getattr(self, arg.lower()) - return self.reset class Seperator(): From 8c9a2a0b9bcc2b7db2d10c70f98e1f8bb9e4e132 Mon Sep 17 00:00:00 2001 From: mike Date: Fri, 18 Nov 2016 00:59:46 -0500 Subject: [PATCH 144/267] DETECT DELIVER: Cleaner camera-to-lidar code May perform better, for sure more readable also some sneaky fixes in the mission (no yellow\!) --- .../CameraLidarTransformer.cpp | 178 ++++++++---------- .../CameraLidarTransformer.hpp | 22 +-- 2 files changed, 86 insertions(+), 114 deletions(-) diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp index 2381ae5e..429dc304 100644 --- a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp @@ -6,129 +6,111 @@ CameraLidarTransformer::CameraLidarTransformer() tfListener(tfBuffer, nh), lidarSub(nh, "/velodyne_points", 10), lidarCache(lidarSub, 10) -#ifdef DO_ROS_DEBUG + #ifdef DO_ROS_DEBUG , image_transport(nh) -#endif + #endif { -#ifdef DO_ROS_DEBUG - points_debug_publisher = - image_transport.advertise("points_debug", 1); - pubMarkers = nh.advertise( - "markers_debug", 10); -#endif + #ifdef DO_ROS_DEBUG + points_debug_publisher = image_transport.advertise("points_debug", 1); + pubMarkers = nh.advertise("markers_debug", 10); + #endif nh.param("camera_info_topic",camera_info_topic,"/right/right/camera_info"); - printf("Topic %s\n",camera_info_topic.c_str()); - // ~nh.param("MIN_Z",MIN_Z,1); - // ~nh.param("MAX_Z_ERROR",MAX_Z_ERROR,0.2); - std::cout << "Constructed" << std::endl; - cameraInfoSub = - nh.subscribe(camera_info_topic, 1, - &CameraLidarTransformer::cameraInfoCallback, this); + cameraInfoSub = nh.subscribe(camera_info_topic, 1, &CameraLidarTransformer::cameraInfoCallback, this); std::string camera_to_lidar_transform_topic; nh.param("camera_to_lidar_transform_topic",camera_to_lidar_transform_topic,"transform_camera"); - transformServiceServer = nh.advertiseService( - camera_to_lidar_transform_topic, &CameraLidarTransformer::transformServiceCallback, - this); + transformServiceServer = nh.advertiseService(camera_to_lidar_transform_topic, &CameraLidarTransformer::transformServiceCallback, this); } void CameraLidarTransformer::cameraInfoCallback(sensor_msgs::CameraInfo info) { camera_info = info; } -bool CameraLidarTransformer::transformServiceCallback( - navigator_msgs::CameraToLidarTransform::Request &req, - navigator_msgs::CameraToLidarTransform::Response &res) { +bool CameraLidarTransformer::inCameraFrame(pcl::PointXYZ& point) +{ + return point.x > 0 && point.x < camera_info.width && + point.y > 0 && point.y < camera_info.height; +} +void CameraLidarTransformer::drawPoint(cv::Mat& mat, cv::Point2d& point, cv::Scalar color) +{ + if (point.x - 20 > 0 && point.x + 20 < mat.cols && point.y - 20 > 0 && point.y + 20 < mat.rows) + cv::circle(mat, point, 3, color, -1); +} +bool CameraLidarTransformer::transformServiceCallback(navigator_msgs::CameraToLidarTransform::Request &req, navigator_msgs::CameraToLidarTransform::Response &res) +{ visualization_msgs::MarkerArray markers; sensor_msgs::PointCloud2ConstPtr scloud = - lidarCache.getElemAfterTime(req.header.stamp); + lidarCache.getElemAfterTime(req.header.stamp); if (!scloud) { res.success = false; res.error = navigator_msgs::CameraToLidarTransform::Response::CLOUD_NOT_FOUND; return true; } - geometry_msgs::TransformStamped transform = - tfBuffer.lookupTransform(req.header.frame_id, "velodyne", req.header.stamp); + geometry_msgs::TransformStamped transform = tfBuffer.lookupTransform(req.header.frame_id, "velodyne", req.header.stamp); sensor_msgs::PointCloud2 cloud_transformed; tf2::doTransform(*scloud, cloud_transformed, transform); -#ifdef DO_ROS_DEBUG - cv::Mat debug_image(camera_info.height, camera_info.width, CV_8UC3, - cv::Scalar(0)); -#endif - cv::circle(debug_image, cv::Point(req.point.x, req.point.y), 8, - cv::Scalar(255, 0, 0), -1); - //Tracks the closest lidar point to the requested camera point + #ifdef DO_ROS_DEBUG + cv::Mat debug_image(camera_info.height, camera_info.width, CV_8UC3, cv::Scalar(0)); + cv::circle(debug_image, cv::Point(req.point.x, req.point.y), 8, cv::Scalar(255, 0, 0), -1); + #endif double minDistance = std::numeric_limits::max(); - pcl::PointCloud cloud2; - pcl::fromROSMsg(cloud_transformed, cloud2); + pcl::PointCloud cloud; + pcl::fromROSMsg(cloud_transformed, cloud); std::vector indices; - for (auto ii = 0, jj = 0; ii < cloud_transformed.width; - ++ii, jj += cloud_transformed.point_step) { - floatConverter x, y, z, i; - for (int kk = 0; kk < 4; ++kk) { - x.data[kk] = cloud_transformed.data[jj + kk]; - y.data[kk] = cloud_transformed.data[jj + 4 + kk]; - z.data[kk] = cloud_transformed.data[jj + 8 + kk]; - i.data[kk] = cloud_transformed.data[jj + 16 + kk]; - } - cam_model.fromCameraInfo(camera_info); - if (int(z.f) < 0 || int(z.f) > 30) continue; - cv::Point2d point = cam_model.project3dToPixel(cv::Point3d(x.f, y.f, z.f)); + cam_model.fromCameraInfo(camera_info); + for (unsigned index = 0; index < cloud.size(); index++) + { + pcl::PointXYZ& p = cloud[index]; + if (p.z < 0 || p.z > 30) continue; + cv::Point2d point = cam_model.project3dToPixel(cv::Point3d(p.x, p.y, p.z)); + if (inCameraFrame(p)) + { + #ifdef DO_ROS_DEBUG + visualization_msgs::Marker marker_point; + marker_point.header = req.header; + marker_point.header.seq = 0; + marker_point.header.frame_id = req.header.frame_id; + marker_point.id = index; + marker_point.type = visualization_msgs::Marker::CUBE; + marker_point.pose.position.x = p.x; + marker_point.pose.position.y = p.y; + marker_point.pose.position.z = p.z; + marker_point.scale.x = 0.1; + marker_point.scale.y = 0.1; + marker_point.scale.z = 0.1; + marker_point.color.a = 1.0; + marker_point.color.r = 0.0; + marker_point.color.g = 1.0; + marker_point.color.b = 0.0; + drawPoint(debug_image, point); + #endif + double distance = sqrt(pow(point.x - req.point.x ,2) + pow(point.y - req.point.y,2) ); //Distance (2D) from request point to projected lidar point + if (distance < req.tolerance) + { + geometry_msgs::Point geo_point; + geo_point.x = p.x; + geo_point.y = p.y; + geo_point.z = p.z; + if (distance < minDistance) + { + res.closest = geo_point; + minDistance = distance; + } + indices.push_back(index); - if (int(point.x) > 0 && int(point.x) < camera_info.width && - int(point.y) > 0 && int(point.y) < camera_info.height) { -#ifdef DO_ROS_DEBUG - visualization_msgs::Marker marker_point; - marker_point.header = req.header; - marker_point.header.seq = 0; - marker_point.header.frame_id = req.header.frame_id; - marker_point.id = (int)ii; - marker_point.type = visualization_msgs::Marker::CUBE; - marker_point.pose.position.x = x.f; - marker_point.pose.position.y = y.f; - marker_point.pose.position.z = z.f; - marker_point.scale.x = 0.1; - marker_point.scale.y = 0.1; - marker_point.scale.z = 0.1; - marker_point.color.a = 1.0; - marker_point.color.r = 0.0; - marker_point.color.g = 1.0; - marker_point.color.b = 0.0; - //marker_point.lifetime = ros::Duration(0.5); + res.transformed.push_back(geo_point); - // Draw - if (int(point.x - 20) > 0 && int(point.x + 20) < camera_info.width && - int(point.y - 20) > 0 && int(point.y + 20) < camera_info.height) { - cv::circle(debug_image, point, 3, - cv::Scalar(0, 0, 255 * std::min(1.0, z.f / 20.0)), -1); - } -#endif - double distance = sqrt(pow(point.x - req.point.x ,2) + pow(point.y - req.point.y,2) ); //Distance (2D) from request point to projected lidar point - if (distance < req.tolerance) { -#ifdef DO_ROS_DEBUG - if (int(point.x - 20) > 0 && int(point.x + 20) < camera_info.width && - int(point.y - 20) > 0 && int(point.y + 20) < camera_info.height) { - cv::circle(debug_image, point, 3, cv::Scalar(0, 255, 0), -1); - } -#endif + #ifdef DO_ROS_DEBUG + drawPoint(debug_image, point, cv::Scalar(0, 255, 0)); + marker_point.color.r = 1.0; + marker_point.color.g = 0.0; + marker_point.color.b = 0.0; + #endif + } + #ifdef DO_ROS_DEBUG + markers.markers.push_back(marker_point); + #endif - geometry_msgs::Point geo_point; - geo_point.x = x.f; - geo_point.y = y.f; - geo_point.z = z.f; - if (distance < minDistance) - { - res.closest = geo_point; - minDistance = distance; - } - indices.push_back(ii); - marker_point.color.r = 1.0; - marker_point.color.g = 0.0; - marker_point.color.b = 0.0; - res.transformed.push_back(geo_point); } - markers.markers.push_back(marker_point); - - } } if (res.transformed.size() < 1) { res.success = false; @@ -138,7 +120,7 @@ bool CameraLidarTransformer::transformServiceCallback( float x,y,z,n; pcl::NormalEstimation ne; - ne.computePointNormal (cloud2,indices,x,y,z,n); + ne.computePointNormal (cloud, indices, x, y, z, n); Eigen::Vector3d normal_vector = Eigen::Vector3d(x,y,z).normalized(); res.normal.x = -normal_vector(0, 0); res.normal.y = -normal_vector(1, 0); diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp index 6ac7b1d2..fe25e115 100644 --- a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp @@ -1,6 +1,5 @@ #pragma once #include -// PCL specific includes #include #include #include @@ -8,9 +7,6 @@ #include #include #include -#include -#include -#include #include #include #include @@ -32,6 +28,7 @@ #include #define DO_ROS_DEBUG + #ifdef DO_ROS_DEBUG #include "opencv2/opencv.hpp" #include @@ -42,16 +39,7 @@ class CameraLidarTransformer { private: - union floatConverter - { - float f; - struct - { - uint8_t data[4]; - }; - }; std::string camera_info_topic; - // ~double MIN_Z,MAX_Z_ERROR; ros::NodeHandle nh; ros::ServiceServer transformServiceServer; tf2_ros::Buffer tfBuffer; @@ -61,13 +49,15 @@ class CameraLidarTransformer ros::Subscriber cameraInfoSub; image_geometry::PinholeCameraModel cam_model; sensor_msgs::CameraInfo camera_info; - ros::Publisher pubMarkers; + bool inCameraFrame(pcl::PointXYZ& p); void cameraInfoCallback(const sensor_msgs::CameraInfo info); + void drawPoint(cv::Mat& mat, cv::Point2d& p, cv::Scalar color=cv::Scalar(0, 0, 255)); bool transformServiceCallback(navigator_msgs::CameraToLidarTransform::Request &req,navigator_msgs::CameraToLidarTransform::Response &res); -#ifdef DO_ROS_DEBUG + #ifdef DO_ROS_DEBUG + ros::Publisher pubMarkers; image_transport::ImageTransport image_transport; image_transport::Publisher points_debug_publisher; -#endif + #endif public: CameraLidarTransformer(); }; From 9790b59d4afec43218a61fae01e04b304280f528 Mon Sep 17 00:00:00 2001 From: David Soto Date: Sat, 19 Nov 2016 19:16:47 -0500 Subject: [PATCH 145/267] PINGER: add least squares based pinger pose estimator yes I know it's in navigator_vision but just CHILL THE HECK OUT its all good i also know that david isn't done with this, but it needs to be in repo so we can use without me fumbeling with another screen session PINGER: add tested least squares pinger YES I know it is in navigator_vision package. Dont worry it will be fine we just should have this in the repo and progressvely improve upon it --- .../navigator_vision/nodes/pinger_finder.py | 170 ++++++++++++++++++ 1 file changed, 170 insertions(+) create mode 100755 perception/navigator_vision/nodes/pinger_finder.py diff --git a/perception/navigator_vision/nodes/pinger_finder.py b/perception/navigator_vision/nodes/pinger_finder.py new file mode 100755 index 00000000..6c64b046 --- /dev/null +++ b/perception/navigator_vision/nodes/pinger_finder.py @@ -0,0 +1,170 @@ +#!/usr/bin/env python +import rospy +import numpy as np +from numpy import linalg as npl +import tf +from navigator_msgs.srv import FindPinger, SetFrequency, SetFrequencyResponse +from hydrophones.msg import ProcessedPing +from geometry_msgs.msg import Point, PoseStamped, Pose +from visualization_msgs.msg import Marker +from std_srvs.srv import SetBool, SetBoolResponse +import navigator_tools + +target_freq = 35000 +freq_tol = 1000 +min_amp = 200 +max_samples = 200 # will delete entries with least amplitude after we get pass this count +line_array = np.empty((0, 4), float) +sample_amplitudes = np.empty((0, 0), float) +pinger_position = Point(0, 0, 0) +viz_pub = rospy.Publisher("/visualization_marker", Marker, queue_size=10) +point_distance = 10 # p1 is aling the direction of the pinger but we do not know the actual + # range p1 will be adjusted to be 'point_distance' away from p0 +debug_arrow_id = 0 +listen = False + +# line_arr represents the equation of a 2d line. Structure: [x1, y1, x2, y2] +# Calculates the point in the plane with the least cummulative distance to +# every line in line_arr +def LS_intersection(line_arr): + global pinger_position + def line_norm(line_end_pts): + assert len(line_end_pts) == 4 + begin = line_end_pts[:2] + end = line_end_pts[2:] + diff = end - begin + return npl.norm(diff) + + num_lines = line_arr.shape[0] + begin_pts = line_arr[:, :2] + end_pts = line_arr[:, 2:4] + diffs = end_pts - begin_pts + norms = np.apply_along_axis(line_norm, 1, line_arr) + norms = norms.reshape(norms.shape[0], 1) + perp_unit_vecs = np.apply_along_axis(lambda x: np.array([[0, -1], [1, 0]]).dot(x), 1, diffs / norms) + A_sum = np.zeros((2, 2)) + Ap_sum = np.zeros((2, 1)) + + for x, y in zip(begin_pts, perp_unit_vecs): + begin = x.reshape(2, 1) + perp_vec = y.reshape(2, 1) + A = perp_vec.dot(perp_vec.T) + Ap = A.dot(begin) + A_sum += A + Ap_sum += Ap + + res = npl.inv(A_sum).dot(Ap_sum) + pinger_position = Point(res[0], res[1], 0) + + +def find_pinger(arg): + if line_array.shape[0] < 2: + return None + LS_intersection(line_array) + return {'pinger_position' : pinger_position, 'num_samples' : line_array.shape[0]} + + +# receives pings and adds the representation of a line between the hydrophone array and the pinger +# to our line_arr linear equation system +def ping_cb(processed_ping): + global line_array, sample_amplitudes + if not listen: + return + print "Got processed ping message:\n{}".format(processed_ping) + if abs(processed_ping.freq - target_freq) < freq_tol and processed_ping.amplitude > min_amp: + print "\x1b[34mTrustworthy pinger heading\x1b[0m" + trans, rot = None, None + try: + tf_listener.waitForTransform("/enu", "/hydrophones", processed_ping.header.stamp, rospy.Duration(0.25)) + trans, rot = tf_listener.lookupTransform("/enu", "/hydrophones", processed_ping.header.stamp) + p0 = np.array([trans[0], trans[1]]) + R = tf.transformations.quaternion_matrix(rot)[:3, :3] + print "quat: {} rot mat: {}".format(rot, R) + print "delta hyd frame: {}".format(processed_ping.position) + delta = R.dot(navigator_tools.point_to_numpy(processed_ping.position))[:2] + print "delta enu frame: {}".format(delta) + p1 = p0 + point_distance * delta / npl.norm(delta) + line_coeffs = np.array([[p0[0], p0[1], p1[0], p1[1]]]) # p0 and p1 define a line + visualize_line(Point(p0[0], p0[1], 0), Point(p1[0], p1[1], 0)) + line_array = np.append(line_array, line_coeffs, 0) + sample_amplitudes = np.append(sample_amplitudes, processed_ping.amplitude) + except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException) as e: + print e + # delete softest samples if we have over max_samples + if len(line_array) >= max_samples: + softest_idx = np.argmin(sample_amplitudes) + line_array = np.delete(line_array, softest_idx, axis=0) + sample_amplitudes = np.delete(sample_amplitudes, softest_idx) + print "Number of good samples: {}".format(line_array.shape) + else: + print "Untrustworthy pinger heading. Freq = {} kHZ".format(processed_ping.freq) + +def visualize_pinger(event): + marker = Marker() + marker.ns = "pinger" + marker.header.stamp = rospy.Time(0) + marker.header.frame_id = "/enu" + marker.type = marker.CUBE + marker.action = marker.ADD + marker.scale.x = 1.0 + marker.scale.y = 1.0 + marker.scale.z = 1.0 + marker.color.g = 1.0 + marker.color.a = 1.0 + marker.pose.orientation.w = 1.0 + marker.pose.position = pinger_position + print "POSE: ", pinger_position + if pinger_position != Point(0, 0, 0): + print "vis pinger" + viz_pub.publish(marker) + else: + print "pinger position: {}".format(pinger_position) + +def visualize_line(p0, p1): + global debug_arrow_id + marker = Marker() + marker.ns = "pinger/debug" + marker.header.stamp = rospy.Time(0) + marker.header.frame_id = "/enu" + marker.id = debug_arrow_id + debug_arrow_id += 1 + marker.type = marker.ARROW + marker.action = marker.ADD + marker.lifetime = rospy.Duration(10, 0) + marker.points.append(p0) + marker.points.append(p1) + marker.scale.x = 0.5 + marker.scale.y = 1.0 + marker.scale.z = 1.0 + marker.color.g = 1.0 + marker.color.b = 1.0 + marker.color.a = 1.0 + print "viz line" + viz_pub.publish(marker) + +def set_listen(listen_status): + global listen + listen = listen_status.data + if listen: + print "PINGERFINDER: setting listening to on" + else: + print "PINGERFINDER: setting listening to off" + return {'success': True, 'message': ""} + +def set_freq(msg): + global target_freq, line_array, sample_amplitudes, pinger_position + target_freq = msg.frequency + line_array = np.empty((0, 4), float) + sample_amplitudes = np.empty((0, 0), float) + pinger_position = Point(0, 0, 0) + return SetFrequencyResponse() + + +rospy.init_node('pinger_finder') +tf_listener = tf.TransformListener() +ping_sub = rospy.Subscriber("/hydrophones/processed", ProcessedPing, callback=ping_cb) +pinger_finder_srv = rospy.Service('hydrophones/find_pinger', FindPinger, find_pinger) +activate_listening = rospy.Service('hydrophones/set_listen', SetBool, set_listen) +set_freq_srv = rospy.Service('hydrophones/set_freq', SetFrequency, set_freq) +pinger_viz = rospy.Timer(rospy.Duration(0.3), visualize_pinger) +rospy.spin() From 88df3d0b78c6b204399a47eee04b33739fe96329 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Sun, 20 Nov 2016 00:13:08 -0500 Subject: [PATCH 146/267] PINGER: framework for mission with color identification --- perception/navigator_vision/nodes/pinger_finder.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/perception/navigator_vision/nodes/pinger_finder.py b/perception/navigator_vision/nodes/pinger_finder.py index 6c64b046..2c3fa313 100755 --- a/perception/navigator_vision/nodes/pinger_finder.py +++ b/perception/navigator_vision/nodes/pinger_finder.py @@ -112,13 +112,10 @@ def visualize_pinger(event): marker.color.g = 1.0 marker.color.a = 1.0 marker.pose.orientation.w = 1.0 - marker.pose.position = pinger_position - print "POSE: ", pinger_position + marker.pose.position = pinger_position if pinger_position != Point(0, 0, 0): - print "vis pinger" + print "POSE: ", pinger_position viz_pub.publish(marker) - else: - print "pinger position: {}".format(pinger_position) def visualize_line(p0, p1): global debug_arrow_id From e7a3f99602404bbf85c46b4d8d5403dd4bfcf0c0 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Tue, 22 Nov 2016 15:03:15 -0500 Subject: [PATCH 147/267] DETECT DELIVER: Fix bug in shape perception when approaching at extreme angles --- .../CameraLidarTransformer.cpp | 98 ++++++++++--------- .../CameraLidarTransformer.hpp | 3 +- 2 files changed, 56 insertions(+), 45 deletions(-) diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp index 429dc304..102fe7ef 100644 --- a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp @@ -5,7 +5,8 @@ CameraLidarTransformer::CameraLidarTransformer() tfBuffer(), tfListener(tfBuffer, nh), lidarSub(nh, "/velodyne_points", 10), - lidarCache(lidarSub, 10) + lidarCache(lidarSub, 10), + camera_info_received(false) #ifdef DO_ROS_DEBUG , image_transport(nh) @@ -24,8 +25,10 @@ CameraLidarTransformer::CameraLidarTransformer() void CameraLidarTransformer::cameraInfoCallback(sensor_msgs::CameraInfo info) { camera_info = info; + cam_model.fromCameraInfo(camera_info); + camera_info_received = true; } -bool CameraLidarTransformer::inCameraFrame(pcl::PointXYZ& point) +bool CameraLidarTransformer::inCameraFrame(cv::Point2d& point) { return point.x > 0 && point.x < camera_info.width && point.y > 0 && point.y < camera_info.height; @@ -37,32 +40,38 @@ void CameraLidarTransformer::drawPoint(cv::Mat& mat, cv::Point2d& point, cv::Sca } bool CameraLidarTransformer::transformServiceCallback(navigator_msgs::CameraToLidarTransform::Request &req, navigator_msgs::CameraToLidarTransform::Response &res) { + if (!camera_info_received) + { + res.success = false; + res.error = "NO CAMERA INFO"; + return true; + } visualization_msgs::MarkerArray markers; - sensor_msgs::PointCloud2ConstPtr scloud = - lidarCache.getElemAfterTime(req.header.stamp); + sensor_msgs::PointCloud2ConstPtr scloud = lidarCache.getElemAfterTime(req.header.stamp); if (!scloud) { res.success = false; res.error = navigator_msgs::CameraToLidarTransform::Response::CLOUD_NOT_FOUND; return true; } - geometry_msgs::TransformStamped transform = tfBuffer.lookupTransform(req.header.frame_id, "velodyne", req.header.stamp); + geometry_msgs::TransformStamped transform = tfBuffer.lookupTransform(req.header.frame_id, "velodyne", ros::Time(0)); sensor_msgs::PointCloud2 cloud_transformed; tf2::doTransform(*scloud, cloud_transformed, transform); + #ifdef DO_ROS_DEBUG cv::Mat debug_image(camera_info.height, camera_info.width, CV_8UC3, cv::Scalar(0)); cv::circle(debug_image, cv::Point(req.point.x, req.point.y), 8, cv::Scalar(255, 0, 0), -1); #endif + double minDistance = std::numeric_limits::max(); pcl::PointCloud cloud; pcl::fromROSMsg(cloud_transformed, cloud); std::vector indices; - cam_model.fromCameraInfo(camera_info); for (unsigned index = 0; index < cloud.size(); index++) { pcl::PointXYZ& p = cloud[index]; if (p.z < 0 || p.z > 30) continue; cv::Point2d point = cam_model.project3dToPixel(cv::Point3d(p.x, p.y, p.z)); - if (inCameraFrame(p)) + if (inCameraFrame(point)) { #ifdef DO_ROS_DEBUG visualization_msgs::Marker marker_point; @@ -112,54 +121,55 @@ bool CameraLidarTransformer::transformServiceCallback(navigator_msgs::CameraToLi } } - if (res.transformed.size() < 1) { - res.success = false; - res.error = navigator_msgs::CameraToLidarTransform::Response::NO_POINTS_FOUND; - return true; - } + if (res.transformed.size() > 0) { + float x,y,z,n; + pcl::NormalEstimation ne; + ne.computePointNormal (cloud, indices, x, y, z, n); + Eigen::Vector3d normal_vector = Eigen::Vector3d(x,y,z).normalized(); + res.normal.x = -normal_vector(0, 0); + res.normal.y = -normal_vector(1, 0); + res.normal.z = -normal_vector(2, 0); - float x,y,z,n; - pcl::NormalEstimation ne; - ne.computePointNormal (cloud, indices, x, y, z, n); - Eigen::Vector3d normal_vector = Eigen::Vector3d(x,y,z).normalized(); - res.normal.x = -normal_vector(0, 0); - res.normal.y = -normal_vector(1, 0); - res.normal.z = -normal_vector(2, 0); + #ifdef DO_ROS_DEBUG + //Add a marker for the normal to the plane + geometry_msgs::Point sdp_normalvec_ros; + sdp_normalvec_ros.x = res.closest.x + res.normal.x; + sdp_normalvec_ros.y = res.closest.y + res.normal.y; + sdp_normalvec_ros.z = res.closest.z + res.normal.z; + visualization_msgs::Marker marker_normal; + marker_normal.header = req.header; + marker_normal.header.seq = 0; + marker_normal.header.frame_id = req.header.frame_id; + marker_normal.id = 3000; + marker_normal.type = visualization_msgs::Marker::ARROW; + marker_normal.points.push_back(res.closest); + marker_normal.points.push_back(sdp_normalvec_ros); + marker_normal.scale.x = 0.1; + marker_normal.scale.y = 0.5; + marker_normal.scale.z = 0.5; + marker_normal.color.a = 1.0; + marker_normal.color.r = 0.0; + marker_normal.color.g = 0.0; + marker_normal.color.b = 1.0; + markers.markers.push_back(marker_normal); + #endif - res.distance = res.closest.z; + res.distance = res.closest.z; + res.success = true; + } else { + res.error = navigator_msgs::CameraToLidarTransform::Response::NO_POINTS_FOUND; + res.success = false; + } #ifdef DO_ROS_DEBUG - //Add a marker for the normal to the plane - geometry_msgs::Point sdp_normalvec_ros; - sdp_normalvec_ros.x = res.closest.x + res.normal.x; - sdp_normalvec_ros.y = res.closest.y + res.normal.y; - sdp_normalvec_ros.z = res.closest.z + res.normal.z; - visualization_msgs::Marker marker_normal; - marker_normal.header = req.header; - marker_normal.header.seq = 0; - marker_normal.header.frame_id = req.header.frame_id; - marker_normal.id = 3000; - marker_normal.type = visualization_msgs::Marker::ARROW; - marker_normal.points.push_back(res.closest); - marker_normal.points.push_back(sdp_normalvec_ros); - marker_normal.scale.x = 0.1; - marker_normal.scale.y = 0.5; - marker_normal.scale.z = 0.5; - marker_normal.color.a = 1.0; - marker_normal.color.r = 0.0; - marker_normal.color.g = 0.0; - marker_normal.color.b = 1.0; - markers.markers.push_back(marker_normal); - //Publish 3D debug market pubMarkers.publish(markers); - //Publish debug image cv_bridge::CvImage ros_debug_image; ros_debug_image.encoding = "bgr8"; ros_debug_image.image = debug_image.clone(); points_debug_publisher.publish(ros_debug_image.toImageMsg()); #endif - res.success = true; + return true; } diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp index fe25e115..97eb8f9e 100644 --- a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp @@ -48,8 +48,9 @@ class CameraLidarTransformer message_filters::Cache lidarCache; ros::Subscriber cameraInfoSub; image_geometry::PinholeCameraModel cam_model; + bool camera_info_received; sensor_msgs::CameraInfo camera_info; - bool inCameraFrame(pcl::PointXYZ& p); + bool inCameraFrame(cv::Point2d& p); void cameraInfoCallback(const sensor_msgs::CameraInfo info); void drawPoint(cv::Mat& mat, cv::Point2d& p, cv::Scalar color=cv::Scalar(0, 0, 255)); bool transformServiceCallback(navigator_msgs::CameraToLidarTransform::Request &req,navigator_msgs::CameraToLidarTransform::Response &res); From 9d61e4d2ebfffc969fe451818080096a2fa349e5 Mon Sep 17 00:00:00 2001 From: David Soto Date: Tue, 22 Nov 2016 15:55:48 -0500 Subject: [PATCH 148/267] PINGER: put pinger perception into class, cleaner code --- .../navigator_vision/nodes/pinger_finder.py | 290 +++++++++--------- 1 file changed, 148 insertions(+), 142 deletions(-) diff --git a/perception/navigator_vision/nodes/pinger_finder.py b/perception/navigator_vision/nodes/pinger_finder.py index 2c3fa313..5279d555 100755 --- a/perception/navigator_vision/nodes/pinger_finder.py +++ b/perception/navigator_vision/nodes/pinger_finder.py @@ -1,5 +1,6 @@ #!/usr/bin/env python import rospy +import rospkg import numpy as np from numpy import linalg as npl import tf @@ -10,158 +11,163 @@ from std_srvs.srv import SetBool, SetBoolResponse import navigator_tools -target_freq = 35000 -freq_tol = 1000 -min_amp = 200 -max_samples = 200 # will delete entries with least amplitude after we get pass this count -line_array = np.empty((0, 4), float) -sample_amplitudes = np.empty((0, 0), float) -pinger_position = Point(0, 0, 0) -viz_pub = rospy.Publisher("/visualization_marker", Marker, queue_size=10) -point_distance = 10 # p1 is aling the direction of the pinger but we do not know the actual - # range p1 will be adjusted to be 'point_distance' away from p0 -debug_arrow_id = 0 -listen = False +class PingerFinder(object): + """ + This class will find the position of a pinger at a given frequency. For it + to find a good solution, it needs to get observations from at least 2 sufficiently + different positions. Observations are considered valid if they fall within + self.freq_tol of self.target_freq, where self.target_freq can be set + using 'rosservice call /hydrophones/set_freq "frequency: ____"'. -# line_arr represents the equation of a 2d line. Structure: [x1, y1, x2, y2] -# Calculates the point in the plane with the least cummulative distance to -# every line in line_arr -def LS_intersection(line_arr): - global pinger_position - def line_norm(line_end_pts): - assert len(line_end_pts) == 4 - begin = line_end_pts[:2] - end = line_end_pts[2:] - diff = end - begin - return npl.norm(diff) + The default behaviour is to not store observations. To start recording valid + observations, run 'rosservice call /hydrophones/set_listen "data: true"'. Remember + to run 'rosservice call /hydrophones/set_listen "data: false"' to stop recording + observations. For best results, it is best to only record observations while the + vehicle is station-keeping. - num_lines = line_arr.shape[0] - begin_pts = line_arr[:, :2] - end_pts = line_arr[:, 2:4] - diffs = end_pts - begin_pts - norms = np.apply_along_axis(line_norm, 1, line_arr) - norms = norms.reshape(norms.shape[0], 1) - perp_unit_vecs = np.apply_along_axis(lambda x: np.array([[0, -1], [1, 0]]).dot(x), 1, diffs / norms) - A_sum = np.zeros((2, 2)) - Ap_sum = np.zeros((2, 1)) + When running, this class will publish arrow markers to /visualization_marker and + a green cube marker representing the estimated position of the pinger whenever the + /hydrophones/find_pinger service is called. + """ - for x, y in zip(begin_pts, perp_unit_vecs): - begin = x.reshape(2, 1) - perp_vec = y.reshape(2, 1) - A = perp_vec.dot(perp_vec.T) - Ap = A.dot(begin) - A_sum += A - Ap_sum += Ap + def __init__(self): + self.map_frame = "/enu" + self.hydrophone_frame = "/hydrophones" + self.tf_listener = tf.TransformListener() + self.target_freq = 35E3 + self.freq_tol = 1E3 + self.min_amp = 200 # if too many outliers, increase this + self.max_observations = 200 + self.line_array =np.empty((0, 4), float) + self.observation_amplitudes = np.empty((0, 0), float) + self.pinger_position = Point(0, 0, 0) + self.heading_pseudorange = 10 + self.debug_arrow_id = 0 + self.debug_arrow_lifetime = rospy.Duration(10, 0) + self.listen = False + self.ping_sub = rospy.Subscriber("/hydrophones/processed", ProcessedPing, callback=self.ping_cb) + self.marker_pub = rospy.Publisher("/visualization_marker", Marker, queue_size=10) + self.pinger_finder_srv = rospy.Service('hydrophones/find_pinger', FindPinger, self.find_pinger) + self.activate_listening = rospy.Service('hydrophones/set_listen', SetBool, self.set_listen) + self.set_freq_srv = rospy.Service('hydrophones/set_freq', SetFrequency, self.set_freq) - res = npl.inv(A_sum).dot(Ap_sum) - pinger_position = Point(res[0], res[1], 0) + def ping_cb(self, ping): + print rospkg.get_ros_package_path() + print "PINGERFINDER: freq={p.freq:.0f} amp={p.amplitude:.0f}".format(p=ping) + if abs(ping.freq - self.target_freq) < self.freq_tol and ping.amplitude > self.min_amp and self.listen: + trans, rot = None, None + try: + self.tf_listener.waitForTransform(self.map_frame, self.hydrophone_frame, ping.header.stamp, rospy.Duration(0.25)) + trans, rot = self.tf_listener.lookupTransform(self.map_frame, self.hydrophone_frame, ping.header.stamp) + except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException) as e: + print e + return + p0 = np.array([trans[0], trans[1]]) + R = tf.transformations.quaternion_matrix(rot)[:3, :3] + delta = R.dot(navigator_tools.point_to_numpy(ping.position))[:2] + p1 = p0 + self.heading_pseudorange * delta / npl.norm(delta) + line_coeffs = np.array([[p0[0], p0[1], p1[0], p1[1]]]) # p0 and p1 define a line + self.visualize_arrow(Point(p0[0], p0[1], 0), Point(p1[0], p1[1], 0)) + self.line_array = np.append(self.line_array, line_coeffs, 0) + self.observation_amplitudes = np.append(self.observation_amplitudes, ping.amplitude) + if len(self.line_array) >= self.max_observations: # delete softest samples if we have over max_observations + softest_idx = np.argmin(self.observation_amplitudes) + self.line_array = np.delete(self.line_array, softest_idx, axis=0) + self.observation_amplitudes = np.delete(self.observation_amplitudes, softest_idx) + print "PINGERFINDER: Observation collected. Total: {}".format(self.line_array.shape[0]) + def LS_intersection(self): + """ + self.line_array represents a system of 2d line equations. Each row represents a different + observation of a line in map frame on which the pinger lies. Row structure: [x1, y1, x2, y2] + Calculates the point in the plane with the least cummulative distance to every line + in self.line_array. For more information, see: + https://en.wikipedia.org/wiki/Line-line_intersection#In_two_dimensions_2 + """ -def find_pinger(arg): - if line_array.shape[0] < 2: - return None - LS_intersection(line_array) - return {'pinger_position' : pinger_position, 'num_samples' : line_array.shape[0]} + def line_segment_norm(line_end_pts): + assert len(line_end_pts) == 4 + return npl.norm(line_end_pts[2:] - line_end_pts[:2]) + begin_pts = self.line_array[:, :2] + diffs = self.line_array[:, 2:4] - begin_pts + norms = np.apply_along_axis(line_segment_norm, 1, self.line_array).reshape(diffs.shape[0], 1) + rot_left_90 = np.array([[0, -1], [1, 0]]) + perp_unit_vecs = np.apply_along_axis(lambda unit_diffs: rot_left_90.dot(unit_diffs), 1, diffs / norms) + A_sum = np.zeros((2, 2)) + Ap_sum = np.zeros((2, 1)) -# receives pings and adds the representation of a line between the hydrophone array and the pinger -# to our line_arr linear equation system -def ping_cb(processed_ping): - global line_array, sample_amplitudes - if not listen: - return - print "Got processed ping message:\n{}".format(processed_ping) - if abs(processed_ping.freq - target_freq) < freq_tol and processed_ping.amplitude > min_amp: - print "\x1b[34mTrustworthy pinger heading\x1b[0m" - trans, rot = None, None - try: - tf_listener.waitForTransform("/enu", "/hydrophones", processed_ping.header.stamp, rospy.Duration(0.25)) - trans, rot = tf_listener.lookupTransform("/enu", "/hydrophones", processed_ping.header.stamp) - p0 = np.array([trans[0], trans[1]]) - R = tf.transformations.quaternion_matrix(rot)[:3, :3] - print "quat: {} rot mat: {}".format(rot, R) - print "delta hyd frame: {}".format(processed_ping.position) - delta = R.dot(navigator_tools.point_to_numpy(processed_ping.position))[:2] - print "delta enu frame: {}".format(delta) - p1 = p0 + point_distance * delta / npl.norm(delta) - line_coeffs = np.array([[p0[0], p0[1], p1[0], p1[1]]]) # p0 and p1 define a line - visualize_line(Point(p0[0], p0[1], 0), Point(p1[0], p1[1], 0)) - line_array = np.append(line_array, line_coeffs, 0) - sample_amplitudes = np.append(sample_amplitudes, processed_ping.amplitude) - except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException) as e: - print e - # delete softest samples if we have over max_samples - if len(line_array) >= max_samples: - softest_idx = np.argmin(sample_amplitudes) - line_array = np.delete(line_array, softest_idx, axis=0) - sample_amplitudes = np.delete(sample_amplitudes, softest_idx) - print "Number of good samples: {}".format(line_array.shape) - else: - print "Untrustworthy pinger heading. Freq = {} kHZ".format(processed_ping.freq) + for x, y in zip(begin_pts, perp_unit_vecs): + begin = x.reshape(2, 1) + perp_vec = y.reshape(2, 1) + A = perp_vec.dot(perp_vec.T) + Ap = A.dot(begin) + A_sum += A + Ap_sum += Ap + + res = npl.inv(A_sum).dot(Ap_sum) + self.pinger_position = Point(res[0], res[1], 0) + return self.pinger_position -def visualize_pinger(event): - marker = Marker() - marker.ns = "pinger" - marker.header.stamp = rospy.Time(0) - marker.header.frame_id = "/enu" - marker.type = marker.CUBE - marker.action = marker.ADD - marker.scale.x = 1.0 - marker.scale.y = 1.0 - marker.scale.z = 1.0 - marker.color.g = 1.0 - marker.color.a = 1.0 - marker.pose.orientation.w = 1.0 - marker.pose.position = pinger_position - if pinger_position != Point(0, 0, 0): - print "POSE: ", pinger_position - viz_pub.publish(marker) + def find_pinger(self, srv_request): + if self.line_array.shape[0] < 2: + print "PINGER_FINDER: Can't locate pinger. Less than two valid observations have been recorded." + return None + res = {'pinger_position' : self.LS_intersection(), 'num_samples' : self.line_array.shape[0]} + self.visualize_pinger() + return res -def visualize_line(p0, p1): - global debug_arrow_id - marker = Marker() - marker.ns = "pinger/debug" - marker.header.stamp = rospy.Time(0) - marker.header.frame_id = "/enu" - marker.id = debug_arrow_id - debug_arrow_id += 1 - marker.type = marker.ARROW - marker.action = marker.ADD - marker.lifetime = rospy.Duration(10, 0) - marker.points.append(p0) - marker.points.append(p1) - marker.scale.x = 0.5 - marker.scale.y = 1.0 - marker.scale.z = 1.0 - marker.color.g = 1.0 - marker.color.b = 1.0 - marker.color.a = 1.0 - print "viz line" - viz_pub.publish(marker) - -def set_listen(listen_status): - global listen - listen = listen_status.data - if listen: - print "PINGERFINDER: setting listening to on" - else: - print "PINGERFINDER: setting listening to off" - return {'success': True, 'message': ""} + def visualize_pinger(self): + marker = Marker() + marker.ns = "pinger{}".format(self.target_freq) + marker.header.stamp = rospy.Time(0) + marker.header.frame_id = self.map_frame + marker.type = marker.CUBE + marker.action = marker.ADD + marker.scale.x = 1.0 + marker.scale.y = 1.0 + marker.scale.z = 1.0 + marker.color.g = 1.0 + marker.color.a = 0.75 + marker.pose.position = self.pinger_position + if self.pinger_position != Point(0, 0, 0): + self.marker_pub.publish(marker) + print "PINGERFINDER: position: ({p.x[0]:.2f}, {p.y[0]:.2f})".format(p=self.pinger_position) + -def set_freq(msg): - global target_freq, line_array, sample_amplitudes, pinger_position - target_freq = msg.frequency - line_array = np.empty((0, 4), float) - sample_amplitudes = np.empty((0, 0), float) - pinger_position = Point(0, 0, 0) - return SetFrequencyResponse() + def visualize_arrow(self, tail, head): + marker = Marker() + marker.ns = "pinger{}/heading".format(self.target_freq) + marker.header.stamp = rospy.Time(0) + marker.header.frame_id = self.map_frame + marker.id = self.debug_arrow_id + self.debug_arrow_id += 1 + marker.type = marker.ARROW + marker.action = marker.ADD + marker.lifetime = self.debug_arrow_lifetime + marker.points.append(tail) + marker.points.append(head) + marker.scale.x = 0.5 + marker.scale.y = 1.0 + marker.scale.z = 1.0 + marker.color.g = 1.0 + marker.color.b = 1.0 + marker.color.a = 0.5 + self.marker_pub.publish(marker) + + def set_listen(self, listen_status): + self.listen = listen_status.data + print "PINGERFINDER: setting listening to on" if self.listen else "PINGERFINDER: setting listening to off" + return {'success': True, 'message': ""} + def set_freq(self, msg): + self.target_freq = msg.frequency + self.line_array = np.empty((0, 4), float) + self.sample_amplitudes = np.empty((0, 0), float) + self.pinger_position = Point(0, 0, 0) + return SetFrequencyResponse() -rospy.init_node('pinger_finder') -tf_listener = tf.TransformListener() -ping_sub = rospy.Subscriber("/hydrophones/processed", ProcessedPing, callback=ping_cb) -pinger_finder_srv = rospy.Service('hydrophones/find_pinger', FindPinger, find_pinger) -activate_listening = rospy.Service('hydrophones/set_listen', SetBool, set_listen) -set_freq_srv = rospy.Service('hydrophones/set_freq', SetFrequency, set_freq) -pinger_viz = rospy.Timer(rospy.Duration(0.3), visualize_pinger) -rospy.spin() +if __name__ == '__main__': + rospy.init_node('pinger_finder') + as_per_the_ususal = PingerFinder() + rospy.spin() From 74a24349eb34d93d5c2a9b7d5f1ced7471fe704e Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Tue, 22 Nov 2016 18:26:47 -0500 Subject: [PATCH 149/267] TOOLS: add tool to record rviz clicked points to csv --- .../nodes/clicked_point_recorder.py | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100755 utils/navigator_tools/nodes/clicked_point_recorder.py diff --git a/utils/navigator_tools/nodes/clicked_point_recorder.py b/utils/navigator_tools/nodes/clicked_point_recorder.py new file mode 100755 index 00000000..f9596fc6 --- /dev/null +++ b/utils/navigator_tools/nodes/clicked_point_recorder.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +""" +Listens to RVIZ clicked points, storing points in a csv file + +Usage: rosrun navigator_tools clicked_point_recorder.py +""" + +import rospy +import datetime +import csv +from geometry_msgs.msg import PointStamped +class ClickedPointRecorder: + def __init__(self): + self.point_sub = rospy.Subscriber("/clicked_point", PointStamped, self.point_cb) + self.points = [] + + def point_to_dict(self, point): + return { 'seq': point.header.seq, + 'secs' : point.header.stamp.secs, + 'nsecs' : point.header.stamp.nsecs, + 'frame_id' : point.header.frame_id, + 'x' : point.point.x, + 'y': point.point.y, + 'z': point.point.z} + + def write_file(self): + time = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S") + filename = 'clickedpoints{}.csv'.format(time) + with open(filename ,'wx') as csvfile: + fieldnames = ['seq','secs','nsecs','frame_id','x','y','z'] + writer = csv.DictWriter(csvfile, fieldnames=fieldnames) + writer.writeheader() + for p in self.points: + d = self.point_to_dict(p) + writer.writerow(d) + rospy.loginfo("Writing points to {}".format(filename)) + + def point_cb(self,point): + rospy.loginfo("Received new point: {}".format(point)) + self.points.append(point) + +if __name__ == '__main__': + rospy.init_node('clicked_point_recorder') + recorder = ClickedPointRecorder() + def shutdown_cb(): + recorder.write_file() + rospy.on_shutdown(shutdown_cb) + rospy.spin() From dcefb148e05cb5cb3102fa0bde56817ebc7c9828 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Tue, 22 Nov 2016 23:40:40 -0500 Subject: [PATCH 150/267] TOOLS: Add roi controls to video player --- utils/navigator_tools/nodes/video_player.py | 35 +++++++++++++++++++-- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/utils/navigator_tools/nodes/video_player.py b/utils/navigator_tools/nodes/video_player.py index 7c08ad83..23fa0123 100755 --- a/utils/navigator_tools/nodes/video_player.py +++ b/utils/navigator_tools/nodes/video_player.py @@ -36,17 +36,33 @@ def __init__(self): self.slider = rospy.get_param('~slider',True) self.start_frame = rospy.get_param('~start_frames',0) self.cap = cv2.VideoCapture(self.filename) + + self.width = self.cap.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH) + self.height = self.cap.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT) + + self.roi_y_offset = rospy.get_param('~y_offset', 0) + self.roi_x_offset = rospy.get_param('~x_offset', 0) + self.roi_height = rospy.get_param('~height', self.height) + self.roi_width = rospy.get_param('~width', self.width) + self.num_frames = self.cap.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT) self.fps = rospy.get_param('~fps',self.cap.get(cv2.cv.CV_CAP_PROP_FPS)) self.cap.set(cv2.cv.CV_CAP_PROP_POS_FRAMES,self.start_frame) self.num_frames = self.cap.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT) + if (self.num_frames < 1): + raise Exception("Cannot read video {}".format(self.filename)) + self.paused = False self.ended = False self.last_key = 255 - + if (self.slider): cv2.namedWindow('Video Control') - cv2.createTrackbar('Frame','Video Control',int(0),int(self.num_frames),self.trackbar_cb) + cv2.createTrackbar('Frame','Video Control',int(self.start_frame),int(self.num_frames),self.trackbar_cb) + cv2.createTrackbar('ROI x_offset', 'Video Control', int(self.roi_x_offset), int(self.width)-1, self.roi_x_cb) + cv2.createTrackbar('ROI y_offset', 'Video Control', int(self.roi_y_offset), int(self.height)-1, self.roi_y_cb) + cv2.createTrackbar('ROI height', 'Video Control', int(self.roi_height), int(self.height), self.roi_height_cb) + cv2.createTrackbar('ROI width', 'Video Control', int(self.roi_width), int(self.width), self.roi_width_cb) rospy.loginfo("Playing {} at {}fps starting at frame {} ({} Total Frames)".format(self.filename,self.fps,self.start_frame,self.num_frames)) def run(self): @@ -77,11 +93,24 @@ def one_frame(self): return else: self.ended = False - self.image_pub.publish(self.bridge.cv2_to_imgmsg(frame, "bgr8")) + frame_roied = frame[self.roi_y_offset:self.roi_height, self.roi_x_offset:self.roi_width] + self.image_pub.publish(self.bridge.cv2_to_imgmsg(frame_roied, "bgr8")) def trackbar_cb(self,x): self.cap.set(cv2.cv.CV_CAP_PROP_POS_FRAMES,x) + def roi_x_cb(self, d): + self.roi_x_offset = d + + def roi_y_cb(self, d): + self.roi_y_offset = d + + def roi_height_cb(self, d): + self.roi_height = d + + def roi_width_cb(self, d): + self.roi_width = d + def pause(self): self.paused = not self.paused From ce0f2110c50cc0aff0af8aebf1db4beab802b536 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Thu, 24 Nov 2016 02:39:43 -0500 Subject: [PATCH 151/267] DETECT DELIVER, PINGER, TOOLS: address pr comments --- perception/navigator_vision/nodes/pinger_finder.py | 2 +- utils/navigator_tools/nodes/clicked_point_recorder.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/perception/navigator_vision/nodes/pinger_finder.py b/perception/navigator_vision/nodes/pinger_finder.py index 5279d555..c2ad59ce 100755 --- a/perception/navigator_vision/nodes/pinger_finder.py +++ b/perception/navigator_vision/nodes/pinger_finder.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import division import rospy import rospkg import numpy as np @@ -52,7 +53,6 @@ def __init__(self): self.set_freq_srv = rospy.Service('hydrophones/set_freq', SetFrequency, self.set_freq) def ping_cb(self, ping): - print rospkg.get_ros_package_path() print "PINGERFINDER: freq={p.freq:.0f} amp={p.amplitude:.0f}".format(p=ping) if abs(ping.freq - self.target_freq) < self.freq_tol and ping.amplitude > self.min_amp and self.listen: trans, rot = None, None diff --git a/utils/navigator_tools/nodes/clicked_point_recorder.py b/utils/navigator_tools/nodes/clicked_point_recorder.py index f9596fc6..fc01a03e 100755 --- a/utils/navigator_tools/nodes/clicked_point_recorder.py +++ b/utils/navigator_tools/nodes/clicked_point_recorder.py @@ -12,8 +12,8 @@ from geometry_msgs.msg import PointStamped class ClickedPointRecorder: def __init__(self): - self.point_sub = rospy.Subscriber("/clicked_point", PointStamped, self.point_cb) self.points = [] + self.point_sub = rospy.Subscriber("/clicked_point", PointStamped, self.point_cb) def point_to_dict(self, point): return { 'seq': point.header.seq, @@ -44,6 +44,6 @@ def point_cb(self,point): rospy.init_node('clicked_point_recorder') recorder = ClickedPointRecorder() def shutdown_cb(): - recorder.write_file() + recorder.write_file() rospy.on_shutdown(shutdown_cb) rospy.spin() From 8d56d420b5dc22d059fa08e60b6d7c0ed211a20f Mon Sep 17 00:00:00 2001 From: Navigator Date: Sun, 13 Nov 2016 14:38:36 -0500 Subject: [PATCH 152/267] TESS CHANGES Conflicts: mission_control/navigator_missions/nav_missions/scan_the_code.py --- .../navigator_tools/navigator_tools/object_database_helper.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/object_database_helper.py b/utils/navigator_tools/navigator_tools/object_database_helper.py index 7c6cb7bd..3d32bb82 100644 --- a/utils/navigator_tools/navigator_tools/object_database_helper.py +++ b/utils/navigator_tools/navigator_tools/object_database_helper.py @@ -41,14 +41,14 @@ def begin_observing(self, cb): for o in resp.objects: # The is upper call is because the first case is upper case if it is a 'fake' object... WHYYYYY - if o.name not in self.found and not o.name[0].isupper(): + if o.name not in self.found: self.found.add(o.name) self.new_object_subscriber(o) def object_cb(self, perception_objects): """Callback for the object database.""" for o in perception_objects.objects: - if o.name not in self.found and not o.name[0].isupper(): + if o.name not in self.found: self.found.add(o.name) if self.new_object_subscriber is not None: self.new_object_subscriber(o) From 7c0b846ce320e8e6c1be422008c4cdac4815eb79 Mon Sep 17 00:00:00 2001 From: Navigator Date: Sun, 13 Nov 2016 16:31:36 -0500 Subject: [PATCH 153/267] TESS CHANGES --- .../navigator_tools/object_database_helper.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/utils/navigator_tools/navigator_tools/object_database_helper.py b/utils/navigator_tools/navigator_tools/object_database_helper.py index 3d32bb82..a8098212 100644 --- a/utils/navigator_tools/navigator_tools/object_database_helper.py +++ b/utils/navigator_tools/navigator_tools/object_database_helper.py @@ -38,13 +38,19 @@ def begin_observing(self, cb): req = ObjectDBQueryRequest() req.name = 'all' resp = yield self._database(req) - + req.name = 'All' + resp1 = yield self._database(req) for o in resp.objects: # The is upper call is because the first case is upper case if it is a 'fake' object... WHYYYYY if o.name not in self.found: self.found.add(o.name) self.new_object_subscriber(o) + for o in resp1.objects: + # The is upper call is because the first case is upper case if it is a 'fake' object... WHYYYYY + if o.name not in self.found: + self.found.add(o.name) + self.new_object_subscriber(o) def object_cb(self, perception_objects): """Callback for the object database.""" for o in perception_objects.objects: From cf4e6352c25a687c6dc575f2f049150570a73176 Mon Sep 17 00:00:00 2001 From: Tess Date: Thu, 17 Nov 2016 04:44:18 -0500 Subject: [PATCH 154/267] MISSION PLANNER: Published when a new missions completes or starts. Fixes some aspects of scan the code, Accepts MissingPerceptionObject as an exception in mission planner Conflicts: mission_control/navigator_missions/nav_missions/scan_the_code.py mission_control/navigator_missions/navigator_singleton/navigator.py --- utils/navigator_tools/navigator_tools/__init__.py | 2 ++ .../navigator_tools/missing_perception_object.py | 10 ++++++++++ 2 files changed, 12 insertions(+) create mode 100644 utils/navigator_tools/navigator_tools/missing_perception_object.py diff --git a/utils/navigator_tools/navigator_tools/__init__.py b/utils/navigator_tools/navigator_tools/__init__.py index b2fae0f2..0fa0c06c 100644 --- a/utils/navigator_tools/navigator_tools/__init__.py +++ b/utils/navigator_tools/navigator_tools/__init__.py @@ -6,6 +6,7 @@ import rviz_helpers import general_helpers import object_database_helper +import missing_perception_object from init_helpers import * from image_helpers import * @@ -15,3 +16,4 @@ from rviz_helpers import * from general_helpers import * from object_database_helper import * +from missing_perception_object import * \ No newline at end of file diff --git a/utils/navigator_tools/navigator_tools/missing_perception_object.py b/utils/navigator_tools/navigator_tools/missing_perception_object.py new file mode 100644 index 00000000..63d4054c --- /dev/null +++ b/utils/navigator_tools/navigator_tools/missing_perception_object.py @@ -0,0 +1,10 @@ +class MissingPerceptionObject(Exception): + + def __init__(self, missing_object, message="An object from the database is missing"): + + # Call the base class constructor with the parameters it needs + super(MissingPerceptionObject, self).__init__(message) + + # Now for your custom code... + self.missing_object = missing_object + self.message = message \ No newline at end of file From 1e2c1c9e80c39adbb3947802563875bc0dd6ed0c Mon Sep 17 00:00:00 2001 From: Tess Date: Fri, 18 Nov 2016 23:46:08 -0500 Subject: [PATCH 155/267] MISSION PLANNER: adds requested changes to the mission planner Conflicts: perception/navigator_lidar_oa/src/lidar.cpp --- .../navigator_tools/object_database_helper.py | 114 ++++++++++++++++-- 1 file changed, 107 insertions(+), 7 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/object_database_helper.py b/utils/navigator_tools/navigator_tools/object_database_helper.py index a8098212..a9881b3f 100644 --- a/utils/navigator_tools/navigator_tools/object_database_helper.py +++ b/utils/navigator_tools/navigator_tools/object_database_helper.py @@ -1,9 +1,15 @@ """Use the DBHelper class to interface with the Database without having to deal with ROS things.""" from navigator_msgs.msg import PerceptionObjectArray from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest -from twisted.internet import defer +from nav_msgs.msg import Odometry +from twisted.internet import defer, threads from txros import util +import time +import sys from sets import Set +from missing_perception_object import MissingPerceptionObject +import navigator_tools as nt +import numpy as np __author__ = "Tess Bianchi" @@ -14,19 +20,28 @@ def __init__(self, nh): """Initialize the DB helper class.""" self.found = Set() self.nh = nh + self.position = None + self.rot = None self.new_object_subscriber = None self.ensuring_objects = False self.ensuring_object_dep = None self.ensuring_object_cb = None @util.cancellableInlineCallbacks - def init_(self): + def init_(self, navigator=None): """Initialize the txros parts of the DBHelper.""" self._sub_database = yield self.nh.subscribe('/database/objects', PerceptionObjectArray, self.object_cb) self._database = yield self.nh.get_service_client("/database/requests", ObjectDBQuery) - + self.navigator = navigator + if navigator is None: + self._odom_sub = yield self.nh.subscribe('/odom', Odometry, self._odom_cb) + else: + self.position = navigator.pose[0] defer.returnValue(self) + def _odom_cb(self, odom): + self.position, self.rot = nt.odometry_to_numpy(odom)[0] + @util.cancellableInlineCallbacks def begin_observing(self, cb): """ @@ -51,8 +66,10 @@ def begin_observing(self, cb): if o.name not in self.found: self.found.add(o.name) self.new_object_subscriber(o) + def object_cb(self, perception_objects): """Callback for the object database.""" + self.total_num = len(perception_objects.objects) for o in perception_objects.objects: if o.name not in self.found: self.found.add(o.name) @@ -68,6 +85,7 @@ def object_cb(self, perception_objects): self.ensuring_object_cb(missings_objs) def remove_found(self, name): + """Remove an object that has been listed as found.""" self.found.remove(name) def ensure_object_permanence(self, object_dep, cb): @@ -82,12 +100,94 @@ def stop_ensuring_object_permanence(self): """Stop ensuring that objects remain in the database.""" self.ensuring_objects = False - def set_color(self, color, name): - """Set the color of an object in the database.""" - raise NotImplementedError() + def _wait_for_position(self, timeout=10): + count = 0 + while self.position is None: + if self.navigator is not None: + self.position = self.navigator.pose[0] + if count > timeout: + return False + count += 1 + time.sleep(1) + return True + + @util.cancellableInlineCallbacks + def get_closest_object(self, objects): + """Get the closest mission.""" + pobjs = [] + for obj in objects: + req = ObjectDBQueryRequest() + req.name = obj.name + resp = yield self._database(req) + if len(resp.objects) != 0: + pobjs.extend(resp.objects) + + if len(pobjs) == 0: + raise MissingPerceptionObject("All") + + min_dist = sys.maxint + min_obj = None + for o in pobjs: + dist = yield self._dist(o) + if dist < min_dist: + min_dist = dist + min_obj = o + + defer.returnValue(min_obj) - def get_object(self, name): + @util.cancellableInlineCallbacks + def _dist(self, x): + if self.position is None: + success = yield threads.deferToThread(self._wait_for_position) + if not success: + raise Exception("There is a problem with odom.") + defer.returnValue(np.linalg.norm(nt.rosmsg_to_numpy(x.position) - self.position)) + + @util.cancellableInlineCallbacks + def get_object(self, object_name, volume_only=False, thresh=30, thresh_strict=10): """Get an object from the database.""" + if volume_only: + req = ObjectDBQueryRequest() + req.name = object_name + resp = yield self._database(req) + if not resp.found: + raise MissingPerceptionObject(object_name) + defer.returnValue(resp.objects) + else: + req = ObjectDBQueryRequest() + req.name = "full" + resp = yield self._database(req) + closest_potential_object = None + min_dist = sys.maxint + actual_objects = [] + for o in resp.objects: + distance = self._dist(o) + if o.name == object_name and distance < thresh_strict: + actual_objects.append(o) + if distance < thresh and distance < min_dist: + min_dist = distance + closest_potential_object = o + if len(actual_objects) == 0 and min_dist == sys.maxint: + raise MissingPerceptionObject(object_name) + + if len(actual_objects) > 1: + defer.returnValue(min(self.actual_objects, key=lambda x: self._dist(x))) + + if len(actual_objects) == 1: + defer.returnValue(self.actual_objects[0]) + + defer.returnValue(closest_potential_object) + + def wait_for_additional_objects(self, timeout=60): + num_items = self.num_items + start = time() + while(timeout < time() - start): + if self.num_items > num_items: + return True + return False + + def set_color(self, color, name): + """Set the color of an object in the database.""" raise NotImplementedError() def set_fake_position(self, pos): From 0b82744461c43b03c077a24d1fdf40b2f2ffd11b Mon Sep 17 00:00:00 2001 From: Tess Date: Sat, 19 Nov 2016 00:05:13 -0500 Subject: [PATCH 156/267] MISSION PLANNER: fix broken dependency on navigator_singleton --- .../navigator_tools/navigator_tools/object_database_helper.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/utils/navigator_tools/navigator_tools/object_database_helper.py b/utils/navigator_tools/navigator_tools/object_database_helper.py index a9881b3f..45431097 100644 --- a/utils/navigator_tools/navigator_tools/object_database_helper.py +++ b/utils/navigator_tools/navigator_tools/object_database_helper.py @@ -36,7 +36,8 @@ def init_(self, navigator=None): if navigator is None: self._odom_sub = yield self.nh.subscribe('/odom', Odometry, self._odom_cb) else: - self.position = navigator.pose[0] + self.position = yield navigator.tx_pose + self.position = self.position[0] defer.returnValue(self) def _odom_cb(self, odom): From a781cdee4a52755cc5bfb468437660083000ac50 Mon Sep 17 00:00:00 2001 From: Tess Date: Sat, 19 Nov 2016 02:51:01 -0500 Subject: [PATCH 157/267] EXPLORE MISSION: adds kwargs to missions, makes explore mission, fixes scan the code to work with new circles --- .../navigator_tools/object_database_helper.py | 52 +++++++++++++++++-- 1 file changed, 49 insertions(+), 3 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/object_database_helper.py b/utils/navigator_tools/navigator_tools/object_database_helper.py index 45431097..9ea4469d 100644 --- a/utils/navigator_tools/navigator_tools/object_database_helper.py +++ b/utils/navigator_tools/navigator_tools/object_database_helper.py @@ -26,6 +26,8 @@ def __init__(self, nh): self.ensuring_objects = False self.ensuring_object_dep = None self.ensuring_object_cb = None + self.looking_for = None + self.is_found = False @util.cancellableInlineCallbacks def init_(self, navigator=None): @@ -43,6 +45,14 @@ def init_(self, navigator=None): def _odom_cb(self, odom): self.position, self.rot = nt.odometry_to_numpy(odom)[0] + @util.cancellableInlineCallbacks + def get_object_by_id(self, my_id): + req = ObjectDBQueryRequest() + req.name = 'all' + resp = yield self._database(req) + ans = [obj for obj in resp.objects if obj.id == my_id][0] + defer.returnValue(ans) + @util.cancellableInlineCallbacks def begin_observing(self, cb): """ @@ -68,10 +78,35 @@ def begin_observing(self, cb): self.found.add(o.name) self.new_object_subscriber(o) + @util.cancellableInlineCallbacks + def get_unknown_and_low_conf(self): + req = ObjectDBQueryRequest() + req.name = 'all' + resp = yield self._database(req) + m = [] + for o in resp.objects: + if o.name == 'unknown': + m.append(o) + elif o.confidence < 50: + m.append(o) + defer.returnValue(m) + + def set_looking_for(self, name): + self.looking_for = name + + def found(self): + if self.is_found: + self.looking_for = None + self.is_found = False + return True + return False + def object_cb(self, perception_objects): """Callback for the object database.""" self.total_num = len(perception_objects.objects) for o in perception_objects.objects: + if o.name == self.looking_for: + self.is_found = True if o.name not in self.found: self.found.add(o.name) if self.new_object_subscriber is not None: @@ -156,7 +191,7 @@ def get_object(self, object_name, volume_only=False, thresh=30, thresh_strict=10 defer.returnValue(resp.objects) else: req = ObjectDBQueryRequest() - req.name = "full" + req.name = "all" resp = yield self._database(req) closest_potential_object = None min_dist = sys.maxint @@ -168,14 +203,25 @@ def get_object(self, object_name, volume_only=False, thresh=30, thresh_strict=10 if distance < thresh and distance < min_dist: min_dist = distance closest_potential_object = o + # print [x.name for x in actual_objects] + # print closest_potential_object.name + # sys.exit() + if len(actual_objects) == 0 and min_dist == sys.maxint: raise MissingPerceptionObject(object_name) if len(actual_objects) > 1: - defer.returnValue(min(self.actual_objects, key=lambda x: self._dist(x))) + min_dist = sys.maxint + min_obj = None + for o in actual_objects: + dist = yield self._dist(o) + if dist < min_dist: + min_dist = dist + min_obj = o + defer.returnValue(min_obj) if len(actual_objects) == 1: - defer.returnValue(self.actual_objects[0]) + defer.returnValue(actual_objects[0]) defer.returnValue(closest_potential_object) From d31889abb6ffc041d29cc6f20e47fb76e3a19b91 Mon Sep 17 00:00:00 2001 From: Tess Date: Sat, 19 Nov 2016 03:20:20 -0500 Subject: [PATCH 158/267] EXPLORER MISSION: finish the explore mission --- utils/navigator_tools/navigator_tools/object_database_helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/navigator_tools/navigator_tools/object_database_helper.py b/utils/navigator_tools/navigator_tools/object_database_helper.py index 9ea4469d..1857ee5c 100644 --- a/utils/navigator_tools/navigator_tools/object_database_helper.py +++ b/utils/navigator_tools/navigator_tools/object_database_helper.py @@ -94,7 +94,7 @@ def get_unknown_and_low_conf(self): def set_looking_for(self, name): self.looking_for = name - def found(self): + def is_found_func(self): if self.is_found: self.looking_for = None self.is_found = False From 5d96abaa077e1fc3118d6e1e0100cef5d7e9914f Mon Sep 17 00:00:00 2001 From: Tess Date: Sat, 19 Nov 2016 21:50:05 -0500 Subject: [PATCH 159/267] LAKE DAY CHANGES: fixes from lake day --- .../navigator_tools/object_database_helper.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/object_database_helper.py b/utils/navigator_tools/navigator_tools/object_database_helper.py index 1857ee5c..4c3543ab 100644 --- a/utils/navigator_tools/navigator_tools/object_database_helper.py +++ b/utils/navigator_tools/navigator_tools/object_database_helper.py @@ -47,6 +47,7 @@ def _odom_cb(self, odom): @util.cancellableInlineCallbacks def get_object_by_id(self, my_id): + print my_id req = ObjectDBQueryRequest() req.name = 'all' resp = yield self._database(req) @@ -88,7 +89,8 @@ def get_unknown_and_low_conf(self): if o.name == 'unknown': m.append(o) elif o.confidence < 50: - m.append(o) + pass + # m.append(o) defer.returnValue(m) def set_looking_for(self, name): @@ -177,10 +179,14 @@ def _dist(self, x): success = yield threads.deferToThread(self._wait_for_position) if not success: raise Exception("There is a problem with odom.") + if self.navigator is not None: + position = yield self.navigator.tx_pose + position = position[0] + self.position = position defer.returnValue(np.linalg.norm(nt.rosmsg_to_numpy(x.position) - self.position)) @util.cancellableInlineCallbacks - def get_object(self, object_name, volume_only=False, thresh=30, thresh_strict=10): + def get_object(self, object_name, volume_only=False, thresh=50, thresh_strict=50): """Get an object from the database.""" if volume_only: req = ObjectDBQueryRequest() @@ -197,7 +203,7 @@ def get_object(self, object_name, volume_only=False, thresh=30, thresh_strict=10 min_dist = sys.maxint actual_objects = [] for o in resp.objects: - distance = self._dist(o) + distance = yield self._dist(o) if o.name == object_name and distance < thresh_strict: actual_objects.append(o) if distance < thresh and distance < min_dist: @@ -220,6 +226,8 @@ def get_object(self, object_name, volume_only=False, thresh=30, thresh_strict=10 min_obj = o defer.returnValue(min_obj) + print "good" + if len(actual_objects) == 1: defer.returnValue(actual_objects[0]) From 5943c7611979fd2d318b4e35a1f3e120d6f03d8c Mon Sep 17 00:00:00 2001 From: David Date: Fri, 21 Oct 2016 17:00:11 -0400 Subject: [PATCH 160/267] UTILS: add navigator_tools c++ library --- utils/navigator_tools/CMakeLists.txt | 24 ++++++++++- .../include/navigator_tools.hpp | 15 +++++++ utils/navigator_tools/package.xml | 7 ++- utils/navigator_tools/src/navigator_tools.cpp | 43 +++++++++++++++++++ 4 files changed, 85 insertions(+), 4 deletions(-) create mode 100644 utils/navigator_tools/include/navigator_tools.hpp create mode 100644 utils/navigator_tools/src/navigator_tools.cpp diff --git a/utils/navigator_tools/CMakeLists.txt b/utils/navigator_tools/CMakeLists.txt index 4a806468..223229ae 100644 --- a/utils/navigator_tools/CMakeLists.txt +++ b/utils/navigator_tools/CMakeLists.txt @@ -1,6 +1,14 @@ cmake_minimum_required(VERSION 2.8.3) project(navigator_tools) -find_package(catkin REQUIRED COMPONENTS rostest std_msgs dynamic_reconfigure) +SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -pedantic -Wall") + +find_package(catkin + REQUIRED COMPONENTS + rostest + std_msgs + roscpp + dynamic_reconfigure +) catkin_python_setup() generate_dynamic_reconfigure_options( @@ -10,9 +18,21 @@ generate_dynamic_reconfigure_options( # make sure configure headers are built before any node using them #add_dependencies(lidar ${PROJECT_NAME}_gencfg) -catkin_package() +catkin_package( + INCLUDE_DIRS + include + LIBRARIES + navigator_tools_lib + CATKIN_DEPENDS + roscpp +) # Add folders to be run by python nosetests if(CATKIN_ENABLE_TESTING) catkin_add_nosetests(tests) endif() + +add_library(navigator_tools_lib src/navigator_tools.cpp) +target_include_directories(navigator_tools_lib PUBLIC include) +target_link_libraries(navigator_tools_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES}) +target_compile_options(navigator_tools_lib PUBLIC -std=c++11) \ No newline at end of file diff --git a/utils/navigator_tools/include/navigator_tools.hpp b/utils/navigator_tools/include/navigator_tools.hpp new file mode 100644 index 00000000..32709819 --- /dev/null +++ b/utils/navigator_tools/include/navigator_tools.hpp @@ -0,0 +1,15 @@ +#pragma once + +#include +#include +#include + +namespace nav{ +namespace tools{ + +// Returns a vector of topic names (std::string) that end with "image_rect_color" +// if color is false then it looks for those that end in "image_rect" +std::vector getRectifiedImageTopics(bool color = true); + +} // namespace nav::tools +} // namespace nav \ No newline at end of file diff --git a/utils/navigator_tools/package.xml b/utils/navigator_tools/package.xml index 8ecc7aa5..2da73538 100644 --- a/utils/navigator_tools/package.xml +++ b/utils/navigator_tools/package.xml @@ -2,12 +2,15 @@ navigator_tools 1.0.0 - The math_helpers package - Zach Goins + The navigator_tools package + David Soto + Matthew Langford MIT catkin rostest + roscpp rospy + roscpp \ No newline at end of file diff --git a/utils/navigator_tools/src/navigator_tools.cpp b/utils/navigator_tools/src/navigator_tools.cpp new file mode 100644 index 00000000..50eab951 --- /dev/null +++ b/utils/navigator_tools/src/navigator_tools.cpp @@ -0,0 +1,43 @@ +#include + +namespace nav{ +namespace tools{ + +using namespace std; + +vector getRectifiedImageTopics(bool color) +{ + // get all currently published topics from master + ros::master::V_TopicInfo master_topics; + ros::master::getTopics(master_topics); + + // Define lambda to determine if a topic is a rectified img topic + string target_pattern; + target_pattern = color? "image_rect_color" : "image_rect"; + auto isRectImgTopic = [&target_pattern](ros::master::TopicInfo topic_info) + { + // Find last slash + size_t final_slash = topic_info.name.rfind('/'); + + // Match end of topic name to image_rect pattern + if(final_slash == string::npos) + return false; + else + { + string topic_name_end = topic_info.name.substr(final_slash + 1); + return (topic_name_end.find(target_pattern) != string::npos)? true : false; + } + }; // end lambda isRectImgTopic + + // return list of rectified image topics + vector image_rect_topics; + for(ros::master::TopicInfo topic : master_topics) + { + if(isRectImgTopic(topic)) + image_rect_topics.push_back(topic.name); + } + return image_rect_topics; +} + +} // namespace nav::tools +} // namespace nav \ No newline at end of file From 4033489d5dcdf658282c169ba1cbb2db240adbf8 Mon Sep 17 00:00:00 2001 From: David Date: Fri, 21 Oct 2016 17:04:30 -0400 Subject: [PATCH 161/267] PERCEPTION: clean up navigator_perception CMakeLists --- perception/navigator_vision/CMakeLists.txt | 116 +++++++-------------- 1 file changed, 36 insertions(+), 80 deletions(-) diff --git a/perception/navigator_vision/CMakeLists.txt b/perception/navigator_vision/CMakeLists.txt index 220eb2cc..5404c100 100644 --- a/perception/navigator_vision/CMakeLists.txt +++ b/perception/navigator_vision/CMakeLists.txt @@ -35,10 +35,9 @@ catkin_python_setup() catkin_package( INCLUDE_DIRS include - exFAST_SparseStereo/src/sparsestereo LIBRARIES navigator_vision_lib - sparsestereo + navigator_sparsestereo CATKIN_DEPENDS roscpp rospy @@ -47,6 +46,7 @@ catkin_package( geometry_msgs sensor_msgs navigator_msgs + navigator_tools DEPENDS system_lib image_transport @@ -57,7 +57,6 @@ catkin_package( include_directories( include - exFAST_SparseStereo/src SYSTEM ${PCL_INCLUDE_DIRS} ${Boost_INCLUDE_DIR} @@ -66,89 +65,23 @@ include_directories( link_directories(${PCL_LIBRARY_DIRS}) add_definitions(${PCL_DEFINITIONS}) -SET(EXFAST_SRC_DIR "exFAST_SparseStereo/src/sparsestereo") -add_library( navigator_sparsestereo SHARED - ${EXFAST_SRC_DIR}/calibrationresult.h - ${EXFAST_SRC_DIR}/calibrationresult.cpp - ${EXFAST_SRC_DIR}/censuswindow.h - ${EXFAST_SRC_DIR}/exception.h - ${EXFAST_SRC_DIR}/extendedfast.cpp - ${EXFAST_SRC_DIR}/extendedfast.h - ${EXFAST_SRC_DIR}/fast9.h - ${EXFAST_SRC_DIR}/fast9-inl.h - ${EXFAST_SRC_DIR}/hammingdistance.cpp - ${EXFAST_SRC_DIR}/hammingdistance.h - ${EXFAST_SRC_DIR}/simd.h - ${EXFAST_SRC_DIR}/simd.cpp - ${EXFAST_SRC_DIR}/sparsematch.h - ${EXFAST_SRC_DIR}/sparserectification.cpp - ${EXFAST_SRC_DIR}/sparserectification.h - ${EXFAST_SRC_DIR}/sparsestereo.h - ${EXFAST_SRC_DIR}/sparsestereo-inl.h - ${EXFAST_SRC_DIR}/stereorectification.cpp - ${EXFAST_SRC_DIR}/stereorectification.h - ${EXFAST_SRC_DIR}/imageconversion.h - ${EXFAST_SRC_DIR}/imageconversion.cpp - ${EXFAST_SRC_DIR}/census.h - ${EXFAST_SRC_DIR}/census-inl.h - ${EXFAST_SRC_DIR}/census.cpp -) - -target_link_libraries( - navigator_sparsestereo - ${catkin_LIBRARIES} - ${Boost_LIBRARIES} - ${OpenCV_INCLUDE_DIRS} -) - +FILE(GLOB EXFAST_SOURCES exFAST_SparseStereo/src/sparsestereo/*) +add_library( navigator_sparsestereo SHARED ${EXFAST_SOURCES}) +target_include_directories(navigator_sparsestereo PUBLIC exFAST_SparseStereo/src) +target_link_libraries(navigator_sparsestereo ${catkin_LIBRARIES} ${Boost_LIBRARIES} ${OpenCV_INCLUDE_DIRS}) set_target_properties(navigator_sparsestereo PROPERTIES COMPILE_FLAGS "-O3 -DNDEBUG -fopenmp -g -Wall -march=native -msse -msse2 -msse3 -mssse3 -msse4 -ffast-math -mfpmath=sse") -install(TARGETS navigator_sparsestereo - ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} - LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} - RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} -) - add_library( navigator_vision_lib src/navigator_vision_lib/visualization.cc src/navigator_vision_lib/cv_utils.cc - src/navigator_vision_lib/point_cloud_algorithms.cc + src/navigator_vision_lib/pcd_colorizer.cpp ) - target_include_directories(navigator_vision_lib PUBLIC include/navigator_vision_lib) +target_link_libraries(navigator_vision_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES}) -target_link_libraries( - - navigator_vision_lib - ${catkin_LIBRARIES} - ${Boost_LIBRARIES} -) - -install(TARGETS navigator_vision_lib - ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} - LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} - RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} -) - -install(DIRECTORY include/${PROJECT_NAME}/ - DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} - FILES_MATCHING PATTERN "*.hpp" - PATTERN ".svn" EXCLUDE -) - - -add_executable( - stereo_point_cloud_driver - nodes/stereo_point_cloud_driver.cpp -) - -add_dependencies( - stereo_point_cloud_driver - navigator_msgs_generate_messages_cpp - ${catkin_EXPORTED_TARGETS} -) - +add_executable(stereo_point_cloud_driver nodes/stereo_point_cloud_driver.cpp) +add_dependencies(stereo_point_cloud_driver navigator_vision_lib ${catkin_EXPORTED_TARGETS}) target_link_libraries( stereo_point_cloud_driver navigator_sparsestereo @@ -160,12 +93,11 @@ target_link_libraries( ${Boost_LIBRARIES} ${OpenCV_INCLUDE_DIRS} ) - set_target_properties(stereo_point_cloud_driver PROPERTIES COMPILE_FLAGS "-O3 -DNDEBUG -fopenmp -g -Wall -march=native -msse -msse2 -msse3 -mssse3 -msse4 -ffast-math -mfpmath=sse") -add_executable( camera_stream_demo nodes/camera_stream_demo.cpp) - +add_executable(camera_stream_demo nodes/camera_stream_demo.cpp) +add_dependencies(camera_stream_demo navigator_vision_lib ${catkin_EXPORTED_TARGETS}) target_link_libraries( camera_stream_demo navigator_vision_lib @@ -200,6 +132,30 @@ target_link_libraries(camera_to_lidar ${OpenCV_LIBS} ) +add_executable(velodyne_pcd_colorizer nodes/velodyne_pcd_colorizer.cpp) +add_dependencies(velodyne_pcd_colorizer navigator_vision_lib ${catkin_EXPORTED_TARGETS}) +target_link_libraries( + velodyne_pcd_colorizer + navigator_vision_lib + navigator_tools_lib + ${PCL_COMMON_LIBRARIES} + ${PCL_IO_LIBRARIES} + ${PCL_LIBRARIES} + ${catkin_LIBRARIES} + ${Boost_LIBRARIES} + ${OpenCV_INCLUDE_DIRS} +) + +install(TARGETS navigator_vision_lib navigator_sparsestereo + ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} + RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +) +install(DIRECTORY include/${PROJECT_NAME}/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} + FILES_MATCHING PATTERN "*.hpp" + PATTERN ".svn" EXCLUDE +) install( TARGETS ) From 7614400bfce1448d46d668a60143d9cba972dd98 Mon Sep 17 00:00:00 2001 From: David Date: Fri, 21 Oct 2016 17:10:19 -0400 Subject: [PATCH 162/267] PERCEPTION: separate pcd_colorizer node from stereo driver --- .../include/navigator_vision_lib/camera_model.hpp | 2 +- .../navigator_vision/nodes/stereo_point_cloud_driver.cpp | 2 +- perception/navigator_vision/package.xml | 4 +++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/perception/navigator_vision/include/navigator_vision_lib/camera_model.hpp b/perception/navigator_vision/include/navigator_vision_lib/camera_model.hpp index 329430b4..477b5497 100644 --- a/perception/navigator_vision/include/navigator_vision_lib/camera_model.hpp +++ b/perception/navigator_vision/include/navigator_vision_lib/camera_model.hpp @@ -37,7 +37,7 @@ class CameraModel{ int COLS = 0; // Distortion model (only the plum-bob model is currently supported) - T D[] = {0, 0, 0, 0, 0}; + T D[5] = {0, 0, 0, 0, 0}; // Camera Geometry Eigen::Matrix K; // Camera intrinsics diff --git a/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp b/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp index 015d5d83..cf76eda9 100644 --- a/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp +++ b/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp @@ -41,7 +41,7 @@ #include #include #include -#include +#include void left_image_callback( const sensor_msgs::ImageConstPtr &image_msg_ptr, diff --git a/perception/navigator_vision/package.xml b/perception/navigator_vision/package.xml index 40a0abe9..d351512d 100644 --- a/perception/navigator_vision/package.xml +++ b/perception/navigator_vision/package.xml @@ -5,7 +5,7 @@ Nodes and libraries used for computer vision perception on Navigator tess - david + David Soto MIT catkin roscpp @@ -32,4 +32,6 @@ tf2_sensor_msgs tf2_sensor_msgs tf2_geometry_msgs + navigator_tools + navigator_tools From ff924dbb4dd43aa5af0af59760b6d4e37dcbcf69 Mon Sep 17 00:00:00 2001 From: David Date: Thu, 27 Oct 2016 22:15:24 -0400 Subject: [PATCH 163/267] PERCEPTION: restructure pcd_colorizer and image acquisition code, add pcd IO --- perception/navigator_vision/CMakeLists.txt | 6 ++++-- perception/navigator_vision/nodes/camera_stream_demo.cpp | 2 +- .../navigator_vision/nodes/stereo_point_cloud_driver.cpp | 2 -- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/perception/navigator_vision/CMakeLists.txt b/perception/navigator_vision/CMakeLists.txt index 5404c100..ebc2ce41 100644 --- a/perception/navigator_vision/CMakeLists.txt +++ b/perception/navigator_vision/CMakeLists.txt @@ -75,10 +75,12 @@ add_library( navigator_vision_lib src/navigator_vision_lib/visualization.cc src/navigator_vision_lib/cv_utils.cc - src/navigator_vision_lib/pcd_colorizer.cpp + src/navigator_vision_lib/colorizer/pcd_colorizer.cpp + src/navigator_vision_lib/colorizer/color_observation.cpp + src/navigator_vision_lib/pcd_sub_pub_algorithm.cpp ) target_include_directories(navigator_vision_lib PUBLIC include/navigator_vision_lib) -target_link_libraries(navigator_vision_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES}) +target_link_libraries(navigator_vision_lib navigator_tools_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES}) add_executable(stereo_point_cloud_driver nodes/stereo_point_cloud_driver.cpp) add_dependencies(stereo_point_cloud_driver navigator_vision_lib ${catkin_EXPORTED_TARGETS}) diff --git a/perception/navigator_vision/nodes/camera_stream_demo.cpp b/perception/navigator_vision/nodes/camera_stream_demo.cpp index b7ca25c2..7829d222 100644 --- a/perception/navigator_vision/nodes/camera_stream_demo.cpp +++ b/perception/navigator_vision/nodes/camera_stream_demo.cpp @@ -1,7 +1,7 @@ #include #include #include -#include // dont forget this include for camera stream functionality +#include // dont forget this include for camera stream functionality using namespace std; diff --git a/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp b/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp index cf76eda9..3b06a12c 100644 --- a/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp +++ b/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp @@ -41,7 +41,6 @@ #include #include #include -#include void left_image_callback( const sensor_msgs::ImageConstPtr &image_msg_ptr, @@ -135,7 +134,6 @@ int main(int argc, char** argv) { image_geometry::PinholeCameraModel left_cam_model, right_cam_model; cv_bridge::CvImagePtr input_bridge; string activation_srv_name {"stereo/activation_srv"}; - nav::PcdColorizer vel_colorizer{nh, "/velodyne_points", "/colored_velodyne_cloud", "/stereo/left/image_rect_color", "/stereo_left_cam"}; // Default parameters string img_topic_left_default = "/stereo/left/image_raw/"; From 955d031be4d7c700628ec0e331b2d53c9c87d41e Mon Sep 17 00:00:00 2001 From: David Date: Sat, 29 Oct 2016 17:51:24 -0400 Subject: [PATCH 164/267] PERCEPTION: collect all colorizer common resources into 'common.hpp' --- perception/navigator_vision/CMakeLists.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/perception/navigator_vision/CMakeLists.txt b/perception/navigator_vision/CMakeLists.txt index ebc2ce41..1c5308c6 100644 --- a/perception/navigator_vision/CMakeLists.txt +++ b/perception/navigator_vision/CMakeLists.txt @@ -76,8 +76,9 @@ add_library( src/navigator_vision_lib/visualization.cc src/navigator_vision_lib/cv_utils.cc src/navigator_vision_lib/colorizer/pcd_colorizer.cpp + src/navigator_vision_lib/colorizer/single_cloud_processor.cpp + src/navigator_vision_lib/colorizer/camera_observer.cpp src/navigator_vision_lib/colorizer/color_observation.cpp - src/navigator_vision_lib/pcd_sub_pub_algorithm.cpp ) target_include_directories(navigator_vision_lib PUBLIC include/navigator_vision_lib) target_link_libraries(navigator_vision_lib navigator_tools_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES}) From f50590f0e3565f21cfb1c20b8f333ee7e23dddc0 Mon Sep 17 00:00:00 2001 From: David Date: Sat, 5 Nov 2016 18:18:58 -0400 Subject: [PATCH 165/267] TOOLS: add conversion to sring operator --- utils/navigator_tools/include/navigator_tools.hpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/utils/navigator_tools/include/navigator_tools.hpp b/utils/navigator_tools/include/navigator_tools.hpp index 32709819..755aae9c 100644 --- a/utils/navigator_tools/include/navigator_tools.hpp +++ b/utils/navigator_tools/include/navigator_tools.hpp @@ -11,5 +11,11 @@ namespace tools{ // if color is false then it looks for those that end in "image_rect" std::vector getRectifiedImageTopics(bool color = true); +// converts raw string literals to std:string's +inline std::string operator "" _s(const char* str, size_t /*length*/) +{ + return std::string(str); +} + } // namespace nav::tools } // namespace nav \ No newline at end of file From bd62273b447191fb2704be78adb8da6999ff6233 Mon Sep 17 00:00:00 2001 From: David Date: Sat, 5 Nov 2016 18:22:02 -0400 Subject: [PATCH 166/267] PERCEPTION: add ros shutdown handling to nodes --- .../navigator_vision/nodes/camera_stream_demo.cpp | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/perception/navigator_vision/nodes/camera_stream_demo.cpp b/perception/navigator_vision/nodes/camera_stream_demo.cpp index 7829d222..4774ff34 100644 --- a/perception/navigator_vision/nodes/camera_stream_demo.cpp +++ b/perception/navigator_vision/nodes/camera_stream_demo.cpp @@ -1,7 +1,7 @@ #include #include #include -#include // dont forget this include for camera stream functionality +#include // dont forget this include for camera stream functionality using namespace std; @@ -13,17 +13,18 @@ int main(int argc, char **argv) ros::init(argc, argv, "camera_stream_demo"); ros::NodeHandle nh; - // These two lines are all you need to create a self updating buffer - // of images published to an image topic. // Template argument should be cv::Vec3b for color images or uint8_t // For grayscale images - nav::ROSCameraStream left_cam_stream(nh, history_length); - left_cam_stream.init(cam_topic); + nav::ROSCameraStream left_cam_stream(nh, history_length); // Constructs empty inactive + // camera stream object + if(!left_cam_stream.init(cam_topic)) // Initializes object, if sucessful, object will automatically + return -1; // store a history of images published to cam_topic in its buffer // Display most recent and oldest frame in the buffer - while(cv::waitKey(100) == -1) + while(cv::waitKey(100) == -1 && left_cam_stream.ok()) { - size_t size = left_cam_stream.length(); + static int i = 0; + size_t size = left_cam_stream.size(); if(size == 200) { cv::imshow("newest_frame", left_cam_stream[0]->image()); From 019fe1a613541a53a00bc47ada0abb7dd678d5c4 Mon Sep 17 00:00:00 2001 From: David Date: Mon, 28 Nov 2016 15:44:28 -0500 Subject: [PATCH 167/267] PERCEPTION: refactoring of pcd colorizer that gets observations from all cameras --- perception/navigator_vision/CMakeLists.txt | 3 +- .../colorizer/camera_observer.hpp | 45 +++ .../colorizer/color_observation.hpp | 45 +++ .../navigator_vision_lib/colorizer/common.hpp | 30 ++ .../colorizer/pcd_colorizer.hpp | 72 ++++ .../colorizer/single_cloud_processor.hpp | 41 +++ .../image_acquisition/camera_frame.hpp | 202 +++++++++++ .../camera_frame_sequence.hpp | 71 ++++ .../image_acquisition/camera_model.hpp | 62 ++++ .../image_acquisition/ros_camera_stream.hpp | 340 ++++++++++++++++++ .../pcd_sub_pub_algorithm.hpp | 81 +++++ .../nodes/camera_stream_demo.cpp | 1 - .../navigator_vision/nodes/pinger_finder.py | 2 +- .../navigator_vision/nodes/pinger_locator.py | 45 +++ .../nodes/velodyne_pcd_colorizer.cpp | 26 ++ perception/navigator_vision/package.xml | 2 + .../colorizer/camera_observer.cpp | 73 ++++ .../colorizer/color_observation.cpp | 15 + .../colorizer/pcd_colorizer.cpp | 173 +++++++++ .../colorizer/single_cloud_processor.cpp | 58 +++ 20 files changed, 1384 insertions(+), 3 deletions(-) create mode 100644 perception/navigator_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/colorizer/color_observation.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/colorizer/common.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp create mode 100644 perception/navigator_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp mode change 100644 => 100755 perception/navigator_vision/nodes/camera_stream_demo.cpp create mode 100644 perception/navigator_vision/nodes/pinger_locator.py create mode 100644 perception/navigator_vision/nodes/velodyne_pcd_colorizer.cpp create mode 100644 perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp create mode 100644 perception/navigator_vision/src/navigator_vision_lib/colorizer/color_observation.cpp create mode 100644 perception/navigator_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp create mode 100644 perception/navigator_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp diff --git a/perception/navigator_vision/CMakeLists.txt b/perception/navigator_vision/CMakeLists.txt index 1c5308c6..c1887b89 100644 --- a/perception/navigator_vision/CMakeLists.txt +++ b/perception/navigator_vision/CMakeLists.txt @@ -47,6 +47,7 @@ catkin_package( sensor_msgs navigator_msgs navigator_tools + pcl_ros DEPENDS system_lib image_transport @@ -81,7 +82,7 @@ add_library( src/navigator_vision_lib/colorizer/color_observation.cpp ) target_include_directories(navigator_vision_lib PUBLIC include/navigator_vision_lib) -target_link_libraries(navigator_vision_lib navigator_tools_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES}) +target_link_libraries(navigator_vision_lib navigator_tools_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES} ${PCL_COMMON_LIBRARIES} ${PCL_COMMON_LIBRARIES}) add_executable(stereo_point_cloud_driver nodes/stereo_point_cloud_driver.cpp) add_dependencies(stereo_point_cloud_driver navigator_vision_lib ${catkin_EXPORTED_TARGETS}) diff --git a/perception/navigator_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp b/perception/navigator_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp new file mode 100644 index 00000000..bf33008f --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp @@ -0,0 +1,45 @@ +#pragma once + +#include // Common includes are here +#include +#include +#include +#include + +namespace nav{ + +class CameraObserver +{ + using CamStream = ROSCameraStream; + + ros::NodeHandle _nh; + + ROSCameraStream _cam_stream; + tf::TransformListener _tf_listener; + + std::string _err_msg{""}; + bool _ok{false}; + +public: + CameraObserver(ros::NodeHandle &nh, std::string &pcd_in_topic, std::string &cam_topic, size_t hist_size); + + std::shared_ptr getCameraModelPtr() const + { + return _cam_stream.getCameraModelPtr(); + } + + std::vector operator()(const PCD::ConstPtr &pcd) + { + return std::vector{}; + } + + bool ok() const + { + return _ok && ros::ok(); + } + + ColorObservation::VecImg get_color_observations(const PCD::ConstPtr &pcd); + +}; + +} // namespace nav diff --git a/perception/navigator_vision/include/navigator_vision_lib/colorizer/color_observation.hpp b/perception/navigator_vision/include/navigator_vision_lib/colorizer/color_observation.hpp new file mode 100644 index 00000000..b35c7807 --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/colorizer/color_observation.hpp @@ -0,0 +1,45 @@ +#pragma once + +#include +#include +#include +#include + + +namespace nav{ + +struct alignas(16) ColorObservation +{ + using Vec = std::vector; + using VecImg = cv::Mat_; + // float tstamp; // seconds + float xyz[3]; + uint8_t bgr[3]; +}; + + +class UnoccludedPointsImg +{ +/* This class stores the indices of points on a point cloud that are not occluded + from a given camera view. It quantizes the projection of the point cloud points + into the image plane into pixel bins and selects the point closest to the center + of projection to be the only visible one for a given image pixel. + */ + + ColorObservation::VecImg unouccluded_pt_idxs; + cv::Mat_ distance_image; + PCDPtr cloud_ptr; + +public: + UnoccludedPointsImg(); +}; + +struct PointColorStats +{ + float xyz[3]; + uint8_t bgr[3]; + float var[3]; + int n; +}; + +} //namespace nav \ No newline at end of file diff --git a/perception/navigator_vision/include/navigator_vision_lib/colorizer/common.hpp b/perception/navigator_vision/include/navigator_vision_lib/colorizer/common.hpp new file mode 100644 index 00000000..465e6240 --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/colorizer/common.hpp @@ -0,0 +1,30 @@ +#pragma once + +/* + This file includes all of the common dependencies for the files in the colorizer directory. + It also forward declares many of the most commonly used types and type aliases. +*/ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace nav{ + +template using PCD = pcl::PointCloud; +template using PCDPtr = std::shared_ptr>; +template using SPtrVector = std::vector>; +template using UPtrVector = std::vector>; + +} // namespace nav \ No newline at end of file diff --git a/perception/navigator_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp b/perception/navigator_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp new file mode 100644 index 00000000..04bb9760 --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp @@ -0,0 +1,72 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +namespace nav{ + +class PcdColorizer{ + /* + This class takes adds color information to XYZ only point clouds if the + points in the cloud can be observed by a camera that takes color images + Note: needs accurate TF because it uses the ros tf package to transform + arbitrary point cloud frames into the frame of the color pinhole camera + Note: the rgb camera topic should be streaming rectified images + */ + + using CamStream = ROSCameraStream; + +public: + + PcdColorizer(ros::NodeHandle nh, std::string input_pcd_topic); + + bool ok() const + { + return _ok && ros::ok(); + } + +private: + + std::mutex change_input_mtx; // Assures ptr to input cloud won't be changed + // while work relying on it is being done + + // Flags + bool _work_to_do = false; // Unprocessed PCD + bool _active = false; // Activation status + bool _ok = false; // Error flag + std::string _err_msg; + + ros::NodeHandle _nh; + size_t _img_hist_size{10}; // All camera streams will keep this many frames + // in their buffers + SingleCloudProcessor _cloud_processor; + + // Subscribing and storing input + std::string _input_pcd_topic; + ros::Subscriber _cloud_sub; + PCD<>::ConstPtr _current_color_pcd; // Template default argument is in common.hpp + + // Storing result and publishing + std::string _output_pcd_topic; + ros::Publisher _cloud_pub; + PCD<>::ConstPtr _output_pcd; + + // SPtrVector _ros_cam_ptrs; + // SPtrVector _transformed_cloud_ptrs; + // SPtrVector _obs_vec_img_ptrs; + // PCD color_permanence_pcd; + + + void _cloud_cb(const PCD<>::ConstPtr &cloud_in); + // void _process_pcd(const PointCloud::ConstPtr &cloud_in); + // void _combine_pcd(); + // tf::TransformListener _tf_listener; + +}; // end class PcdColorizer + +} // namespace nav diff --git a/perception/navigator_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp b/perception/navigator_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp new file mode 100644 index 00000000..5b8743fb --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp @@ -0,0 +1,41 @@ +#pragma once + +#include +#include +#include +#include +#include +#include + +namespace nav { + +class SingleCloudProcessor +{ +public: + SingleCloudProcessor(ros::NodeHandle nh, std::string &in_pcd_topic, size_t hist_size); + void operator()(const PCD::ConstPtr &pcd); + bool ok() const { return _ok && ros::ok(); } + +private: + ros::NodeHandle _nh; + std::string in_pcd_topic; + size_t _hist_size; // image history buffer size + + // CameraObserver creates point color observations for a specific camera + UPtrVector _camera_observers; + + // Inter-thread communication + std::promise _start_work_prom; + std::shared_future _start_work_fut{_start_work_prom.get_future()}; + std::vector> _worker_done_proms; + std::vector> _worker_done_futs; + + + int seq = 0; + + bool _ok{false}; + std::string _err_msg{""}; + +}; + +} // namespace nav diff --git a/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp b/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp new file mode 100644 index 00000000..7a0cf66c --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp @@ -0,0 +1,202 @@ +#pragma once + +// #include +#include +#include +#include +#include + +#include +#include +#include + + +namespace nav +{ + +enum class PixelType +{ +/* + Enumeration for dealing with different image pixel types + The underlying integers for these enums are compatible with OpenCV's + "CV_C" + macros. The benefit to having these is that we do not have to have OpenCV as a dependency. + Theoretically, the CameraFrame Objects need not use a cv::Mat and this class' functionality + would not be affected. The underscore in front of these enums is to solve naming conflicts + with commonly OpenCV and Eigen macros. +*/ + _8UC1=0, _8SC1, _16UC1, _16SC1, _32SC1, _32FC1, _64FC1, + _8UC2=8, _8SC2, _16UC2, _16SC2, _32SC2, _32FC2, _64FC2, + _8UC3=16, _8SC3, _16UC3, _16SC3, _32SC3, _32FC3, _64FC3, + _8UC4=24, _8SC4, _16UC4, _16SC4, _32SC4, _32FC4, _64FC4, _UNKNOWN = -1 +}; + +template, + typename img_scalar_t = uint8_t, + typename time_t_ = ros::Time, + typename float_t = float> +class CameraFrame +{ +/* + This class is used to represent a single frame from a camera. It contains information about + the time the image was taken, the id and camera parameters of the camera that took it and the + image itself. This is a templated class whose first template parameter is the type of the + pixel elements (commonly uint8_t for grayscale images and cv::Vec3b for RGB images) +*/ + +public: + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Constructors and Destructors /////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + CameraFrame() // Default Constructor + { + } + + CameraFrame(const CameraFrame &other) // Copy Constructor + { + this->_seq = other._seq; + this->_stamp = other._stamp; + this->_image = other._image.clone(); // Object will have unoque copy of image data + this->_cam_model_ptr = other.cam_model_ptr; + } + + // From ROS img msg + CameraFrame(const sensor_msgs::ImageConstPtr &image_msg_ptr, cam_model_ptr_t &cam_model_ptr, + bool is_rectified = false, float_t store_at_scale = 1.0); + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Public Methods ///////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + cam_model_ptr_t getCameraModelPtr() const + { + return _cam_model_ptr; + } + + unsigned int seq() const + { + return _seq; + } + + time_t_ stamp() const + { + return _stamp; + } + + const cv::Mat_& image() const + { + return _image; + } + + bool rectified() const + { + return _rectified; + } + + float_t getImageScale() const + { + return _img_scale; + } + + void copyImgTo(cv::Mat dest) const + { + dest = image.clone(); + } + + bool isCameraGeometryKnown() const + { + return _cam_model_ptr == nullptr; + } + + +private: + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Private Members //////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + // Frames from the same source will have sequentially increasing seq numbers + unsigned int _seq = 0; + + // Time that this image was taken + time_t_ _stamp; + + // Stores the image data (not a cv:Mat header pointing to shared image data) + cv::Mat_ _image; + + // Points to a camera model object (shared ownership) that stores information about the intrinsic + // and extrinsic geometry of the camera used to take this image + cam_model_ptr_t _cam_model_ptr = nullptr; + + // Identifies if this image has already been rectified with the distortion parameters in the + // associated camera model object + bool _rectified = false; + + // Scale of the image compared to that which would be generated by projecting using the camera + // geometry expresed in the associated camera model object + float_t _img_scale = 1.0f; + + // Stores the pixel data type + nav::PixelType TYPE = nav::PixelType::_UNKNOWN; + + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Private Methods //////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + // void project3DPointToImagePlane(Eigen::Matrix cam_frame_pt); +}; + +/////////////////////////////////////////////////////////////////////////////////////////////////// +////// Templated function implementations ///////////////////////////////////////////////////////// +/////////////////////////////////////////////////////////////////////////////////////////////////// + +// Constructor from ROS image message and ROS pinhole camera model +template +CameraFrame::CameraFrame( + const sensor_msgs::ImageConstPtr &image_msg_ptr, cam_model_ptr_t &cam_model_ptr, + bool is_rectified, float_t store_at_scale) +try +{ + // ROS image message decoding + cv_bridge::CvImageConstPtr _ros_img_bridge; + std::string encoding = image_msg_ptr->encoding; + _ros_img_bridge = cv_bridge::toCvShare(image_msg_ptr, encoding); + _ros_img_bridge->image.copyTo(_image); + + // Resize image as requested + if(store_at_scale != 1.0) + { + cv::resize(_image, _image, cv::Size(0, 0), store_at_scale, store_at_scale); + this->_img_scale = store_at_scale; + } + + // Store ptr to cam model object + this->_cam_model_ptr = cam_model_ptr; + + this->_rectified = is_rectified; + + // Get header information + _seq = image_msg_ptr->header.seq; + _stamp = image_msg_ptr->header.stamp; +} +catch(cv_bridge::Exception &e) +{ + ROS_WARN("Error converting sensor_msgs::ImageConstPtr to cv::Mat."); +} +catch( cv::Exception &e ) +{ + std::string err_msg = "Error copying cv::Mat created from ROS image message to the cv::Mat stored in the Camera Frame Object: " + + std::string(e.what()); + ROS_WARN(err_msg.c_str()); + std::cout << "exception caught: " << err_msg << std::endl; +} +catch(const std::exception &e) +{ + ROS_WARN(e.what()); +} + +} // namespace nav + diff --git a/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp b/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp new file mode 100644 index 00000000..147d8357 --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp @@ -0,0 +1,71 @@ +#pragma once + +#include +#include +#include +#include +#include + +namespace nav +{ + +template +class CameraFrameSequence +{ +/* + This is an abstract class interface for classes that represent sequences of images. It provides + basic functionality to access individual frames along with camera geometry information. It is + intended to make it more convenient to create visual algorithms that take into account temporal + information. +*/ + +public: + + // Type aliases + using CamFrame = CameraFrame; + using CamFramePtr = std::shared_ptr; + using CamFrameConstPtr = std::shared_ptr; + using CamFrameSequence = CameraFrameSequence; + using CircularBuffer = boost::circular_buffer; + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Constructors and Destructors /////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + CameraFrameSequence() = default; + + CameraFrameSequence(const CameraFrameSequence&) = delete; // Forbid copy construction + + CameraFrameSequence(CameraFrameSequence&&) = delete; // Forbid move construction + + virtual ~CameraFrameSequence() = default; + + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Public Methods ///////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + virtual size_t size() const = 0; + + virtual cam_model_ptr_t getCameraModelPtr() const = 0; // returns nullptr if geometry is unknown + + // If geometry is constant then calls to rows, or calls will be valid + virtual bool isGeometryConst() const = 0; + + virtual int rows() const = 0; + + virtual int cols() const = 0; + + // Accessors + + // Returns a copy of the CameraFrame taken closest to the given time + virtual CamFrameConstPtr getFrameFromTime(time_t_) = 0; + + // Returns reference to the nth frame from the most recent. For example frame_sequence[0] is + // the most recent frame, frame_sequence[-1] is the oldest frame, and frame_sequence[-2] is + // the second oldest frame + virtual CamFrameConstPtr operator[](int) = 0; + +}; + +} // namespace nav \ No newline at end of file diff --git a/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp b/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp new file mode 100644 index 00000000..477b5497 --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp @@ -0,0 +1,62 @@ +#pragma once + +#include +#include + +namespace nav{ + +template +class CameraModel{ +public: + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Constructors and Destructors /////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + CameraModel() + { + } + + ~CameraModel() + { + } + +private: + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Private Methods //////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + Eigen::Matrix get_projection_matrix(); + + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Private Members //////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + int ROWS = 0; + int COLS = 0; + + // Distortion model (only the plum-bob model is currently supported) + T D[5] = {0, 0, 0, 0, 0}; + + // Camera Geometry + Eigen::Matrix K; // Camera intrinsics + Eigen::Matrix C; // Center of projection in world frame + Eigen::Matrix R; // Orientation of camera frame relative to world frame + + +}; + + +template +Eigen::Matrix CameraModel::get_projection_matrix() +{ + Eigen::Matrix v; + Eigen::DiagonalMatrix I = v.asDiagonal(); + Eigen::Matrix aug; + aug.block(0, 0, 2, 2) = I; + aug. col(2) = C; + return K * R * aug; +} + +} // namespace nav \ No newline at end of file diff --git a/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp b/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp new file mode 100644 index 00000000..3127f404 --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp @@ -0,0 +1,340 @@ +#pragma once + +#include +#include +#include + +#include +#include +#include +#include +#include + +#include +#include +#include + +// DBG +#include + + +namespace nav +{ + +template +class ROSCameraStream : public CameraFrameSequence, img_scalar_t, ros::Time, float_t> +{ + using cam_model_ptr_t = std::shared_ptr; + using time_t_ = ros::Time; + +public: + + // Type aliases + using CamFrame = CameraFrame; + using CamFramePtr = std::shared_ptr; + using CamFrameConstPtr = std::shared_ptr; + using CamFrameSequence = CameraFrameSequence; + using CircularBuffer = boost::circular_buffer; + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Constructors and Destructors /////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + // Default Constructor + ROSCameraStream(ros::NodeHandle nh, size_t buffer_size) + : _frame_ptr_circular_buffer(buffer_size), _nh(nh), _it(_nh) {} + + ~ROSCameraStream(); + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Public Methods ///////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + // Returns true if the camera stream object has been successfully initialized + bool init(std::string &camera_topic); + + // Returns false if an internal error has ocurred or ROS is in the process of shutting down the + // enclosing node + bool ok() {return _ok && ros::ok();}; + + typename CircularBuffer::iterator begin() const + { + return _frame_ptr_circular_buffer.begin(); + } + + typename CircularBuffer::iterator end() const + { + return _frame_ptr_circular_buffer.end(); + } + + size_t size() const + { + return _frame_ptr_circular_buffer.size(); + } + + cam_model_ptr_t getCameraModelPtr() const + { + return _cam_model_ptr; // returns nullptr if geometry is unknown + } + + // If geometry is constant then calls to rows, or calls will be valid + bool isGeometryConst() const + { + return true; + } + + int rows() const + { + return ROWS; + } + + int cols() const + { + return COLS; + } + + CamFrameConstPtr getFrameFromTime(ros::Time desired_time); + + CamFrameConstPtr operator[](int i); + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Public Members ///////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + +private: + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Private Methods //////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + void _addFrame(CamFramePtr &new_frame_ptr); + + void _newFrameCb(const sensor_msgs::ImageConstPtr &image_msg_ptr); + + + /////////////////////////////////////////////////////////////////////////////////////////////// + // Private Members //////////////////////////////////////////////////////////////////////////// + /////////////////////////////////////////////////////////////////////////////////////////////// + + // Container for all included CameraFrame objects + CircularBuffer _frame_ptr_circular_buffer; + + // cv::Ptr to a shared CameraInfo object for all frames in the sequence. If null, it indicates + // that the Frames have different CameraInfo objects which should be examined individually + cam_model_ptr_t _cam_model_ptr = nullptr; + + // Time bounds of buffer + time_t_ _start_time {}; + time_t_ _end_time {}; + + // Shared CameraFrame properties if camera geometry is constant + int COLS = -1; + int ROWS = -1; + nav::PixelType TYPE = nav::PixelType::_UNKNOWN; + + // Mutex for multithreaded access to camera frame data + std::mutex _mtx; + + // ROS node handle + ros::NodeHandle _nh; + + // ROS image transportg + image_transport::ImageTransport _it; + + // Custom ROS Callback Queue + ros::CallbackQueue _cb_queue; + + // Flexible topic subscription with potential for filtering and chaining + message_filters::Subscriber img_sub; + + // ROS spinner to handle callbacks in a background thread + ros::AsyncSpinner async_spinner {1, &_cb_queue}; + + // The ros topic we will subscribe to + std::string _img_topic = "uninitialized"; + + // Status Flag + bool _ok = false; + + // Store error messages here + std::string _err_msg{""}; +}; + +/////////////////////////////////////////////////////////////////////////////////////////////////// +////// Templated function implementations ///////////////////////////////////////////////////////// +/////////////////////////////////////////////////////////////////////////////////////////////////// + +// Initializer for when there is an image msg being published w/ a corresponding camera info msg +template +bool ROSCameraStream::init(std::string &camera_topic) +{ + using nav::tools::operator "" _s; // convert raw string literal to std::string + _img_topic = camera_topic; + bool success = false; + image_transport::CameraSubscriber cam_sub; + + // subscribes to image msg and camera info and initializes CameraModel Object then disconnets subscriber + auto init_lambda = + [&](const sensor_msgs::ImageConstPtr &image_msg_ptr, const sensor_msgs::CameraInfoConstPtr &info_msg_ptr) mutable + { + std::string init_msg {"ROSCameraStream: Initializing with "}; + init_msg += _img_topic; + // ROS_INFO_NAMED("ROSCameraStream", init_msg.c_str()); + // Set ptr to camera model object + std::shared_ptr camera_model_ptr{new image_geometry::PinholeCameraModel()}; + camera_model_ptr->fromCameraInfo(info_msg_ptr); + this->_cam_model_ptr = camera_model_ptr; + + // Set metadata attributes + this->ROWS = image_msg_ptr->height; + this->COLS = image_msg_ptr->width; + success = true; + cam_sub.shutdown(); + return; + }; + + // Subscribe to both camera topic and camera info topic + cam_sub = _it.subscribeCamera(_img_topic, 100, init_lambda); + + // The amount of time that we will try to wait for a cb from cam_sub + ros::WallDuration timeout{5, 0}; + + // Loop to check if we get callbacks from cam_sub + auto sub_start = ros::WallTime::now(); + ros::WallRate rate{10}; // 10 HZ so we dont spam cpu + while(ros::WallTime::now() - sub_start < timeout) + { + ros::spinOnce(); + if(success) + { + // Subscribe to ROS image topic + img_sub.subscribe(_nh, _img_topic, 100, ros::TransportHints(), &_cb_queue); + + // Register callback to process frames published on camera img topic + auto img_cb = [this](const sensor_msgs::ImageConstPtr &image_msg_ptr){_newFrameCb(image_msg_ptr);}; + img_sub.registerCallback(img_cb); + + // Start listening for image messages in a background thread + async_spinner.start(); + + _ok = true; + return _ok; // Ideal exit point for init + } + rate.sleep(); + } + + _err_msg = "ROSCameraStream: Timed out trying to initialize with camera topic "_s + _img_topic + + ". Run 'rostopic echo "_s + _img_topic + "' to make sure it is being published to."; + ROS_WARN_NAMED("ROSCameraStream", _err_msg.c_str()); + return _ok; +} +// TODO: handle construction from img msg when there is no matching camera info msg + +/////////////////////////////////////////////////////////////////////////////////////////////////// +/////////////////////////////////////////////////////////////////////////////////////////////////// + +template +typename ROSCameraStream::CamFrameConstPtr +ROSCameraStream::getFrameFromTime(ros::Time desired_time) +{ + using nav::tools::operator "" _s; + + // Check bounds on time + if(desired_time < this->_start_time || desired_time > this->_end_time) + { + ROS_WARN_STREAM_THROTTLE_NAMED(1, "ROSCameraStream", + "ROSCameraStream: The camera frame you requested is outside the time range of the buffer."); + return nullptr; + } + + CamFrameConstPtr closest_in_time = this->operator[](0); + double min_abs_nsec_time_diff = fabs((this->operator[](0)->stamp() - desired_time).toNSec()); + double abs_nsec_time_diff = -1; + for(CamFrameConstPtr frame_ptr : this->_frame_ptr_circular_buffer) + { + abs_nsec_time_diff = fabs((frame_ptr->stamp() - desired_time).toNSec()); + if(abs_nsec_time_diff < min_abs_nsec_time_diff) + { + closest_in_time = frame_ptr; + min_abs_nsec_time_diff = abs_nsec_time_diff; + } + } + return closest_in_time; +} + +/////////////////////////////////////////////////////////////////////////////////////////////////// +/////////////////////////////////////////////////////////////////////////////////////////////////// + +template +typename ROSCameraStream::CamFrameConstPtr +ROSCameraStream::operator[](int i) +{ + using nav::tools::operator "" _s; + + // Prevent adding new frames while frames are being accessed + _mtx.lock(); + + // shared_ptr to a dynamically allocated reference frame object + CamFrameConstPtr shared_ptr_to_const_frame; + + try + { + if(i >= 0) // regular access for non-negative indices + { + shared_ptr_to_const_frame = this->_frame_ptr_circular_buffer[i]; + } + else{ // reverse access for negative indices (ex. [-1] refers to the last element) + size_t past_last_idx = this->_frame_ptr_circular_buffer.end() - this->_frame_ptr_circular_buffer.begin(); // DBG + shared_ptr_to_const_frame = this->_frame_ptr_circular_buffer[past_last_idx + i]; // DBG + } + } + catch(std::exception &e) + { + auto err = "ROSCameraStream: The circular buffer index you are trying to acess is out of bounds:\n"_s + e.what(); + ROS_WARN_THROTTLE_NAMED(1, "ROSCameraStream", err.c_str()); + return nullptr; + } + + _mtx.unlock(); + return shared_ptr_to_const_frame; +} + +template +void +ROSCameraStream::_addFrame(CamFramePtr &new_frame_ptr) +{ + // Prevent accessing frames while new frames are being added + _mtx.lock(); + this->_frame_ptr_circular_buffer.push_front(new_frame_ptr); + _mtx.unlock(); +} + +/////////////////////////////////////////////////////////////////////////////////////////////////// +/////////////////////////////////////////////////////////////////////////////////////////////////// + +template +void ROSCameraStream::_newFrameCb(const sensor_msgs::ImageConstPtr &image_msg_ptr) +{ + // Check if the topic name contains the string "rect" + bool rectified = _img_topic.find(std::string("rect")) != std::string::npos; + + // Create shared pointer to dynamically allocated Camera Frame object constructed from ros img msg + CamFramePtr new_frame_ptr{new CamFrame(image_msg_ptr, this->_cam_model_ptr, rectified, 1.0)}; + + // Add shared pointer to CameraFrame object to the circular buffer + _addFrame(new_frame_ptr); + + // Update the bounding timestamps for the frame sequence + this->_start_time = image_msg_ptr->header.stamp; + this->_end_time = this->_frame_ptr_circular_buffer.back()->stamp(); +} + +template +ROSCameraStream::~ROSCameraStream() +{ + img_sub.unsubscribe(); + _cb_queue.clear(); + _cb_queue.disable(); +} + +} // namespace nav \ No newline at end of file diff --git a/perception/navigator_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp b/perception/navigator_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp new file mode 100644 index 00000000..42f3b700 --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp @@ -0,0 +1,81 @@ +#pragma once + +#include +#include +#include +#include +#include + + +namespace nav{ + +template +class PcdSubPubAlgorithm{ + /* + virtual base class for algorithms that subscribe to point cloud ROS topics, + operate on the clouds and publish output clouds to a different topic + */ + +public: + + // Type aliases + template using PCD = pcl::PointCloud; + + // Constructors and Destructors + + PcdSubPubAlgorithm(ros::NodeHandle nh, std::string input_pcd_topic, std::string output_pcd_topic) + : _nh(nh), _input_pcd_topic(input_pcd_topic), _output_pcd_topic(output_pcd_topic) + { + // Subscribe to point cloud topic + _cloud_sub = _nh.subscribe>(_input_pcd_topic, 1, &PcdSubPubAlgorithm::_cloud_cb, this); + + // Advertise output topic + _cloud_pub = _nh.advertise>(_output_pcd_topic, 1, true); + } + + // Check status methods + + bool activated() + { + return _active; + } + + bool ok() + { + return _ok && ros::ok(); + } + + // Set status methods + + void switchActivation() + { + _active = !_active; + } + + +protected: + + // runs algorithm pipeline when a new pcd msg is received + // virtual void cloud_cb(const typename PCD::ConstPtr &cloud_msg) = 0; // runs algorithm pipeline when a new pcd msg is received + virtual void _cloud_cb(const typename PCD::ConstPtr &cloud_msg) = 0; + + // Subscribing and storing input + ros::NodeHandle _nh; + std::string _input_pcd_topic; + ros::Subscriber _cloud_sub; + PCD _input_pcd2; + + // Storing result and publishing + std::string _output_pcd_topic; + ros::Publisher _cloud_pub; + PCD _output_pcd2; + + // Activation status + bool _active = false; + + // Error flag + bool _ok = false; + std::string _err_msg; +}; + +} //namespace nav \ No newline at end of file diff --git a/perception/navigator_vision/nodes/camera_stream_demo.cpp b/perception/navigator_vision/nodes/camera_stream_demo.cpp old mode 100644 new mode 100755 index 4774ff34..01eb785c --- a/perception/navigator_vision/nodes/camera_stream_demo.cpp +++ b/perception/navigator_vision/nodes/camera_stream_demo.cpp @@ -23,7 +23,6 @@ int main(int argc, char **argv) // Display most recent and oldest frame in the buffer while(cv::waitKey(100) == -1 && left_cam_stream.ok()) { - static int i = 0; size_t size = left_cam_stream.size(); if(size == 200) { diff --git a/perception/navigator_vision/nodes/pinger_finder.py b/perception/navigator_vision/nodes/pinger_finder.py index c2ad59ce..5279d555 100755 --- a/perception/navigator_vision/nodes/pinger_finder.py +++ b/perception/navigator_vision/nodes/pinger_finder.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -from __future__ import division import rospy import rospkg import numpy as np @@ -53,6 +52,7 @@ def __init__(self): self.set_freq_srv = rospy.Service('hydrophones/set_freq', SetFrequency, self.set_freq) def ping_cb(self, ping): + print rospkg.get_ros_package_path() print "PINGERFINDER: freq={p.freq:.0f} amp={p.amplitude:.0f}".format(p=ping) if abs(ping.freq - self.target_freq) < self.freq_tol and ping.amplitude > self.min_amp and self.listen: trans, rot = None, None diff --git a/perception/navigator_vision/nodes/pinger_locator.py b/perception/navigator_vision/nodes/pinger_locator.py new file mode 100644 index 00000000..a6be7103 --- /dev/null +++ b/perception/navigator_vision/nodes/pinger_locator.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +import txros +import numpy as np +from hydrophones.msg import ProcessedPing +from geometry_msgs.msg import Point, PoseStamped, Pose +from navigator_alarm import single_alarm, AlarmListener, AlarmBroadcaster +from twisted.internet import defer +import navigator_tools +import tf +from tf import transformations as trans + +ping_sub = yield navigator.nh.subscribe("/hydrophones/processed", ProcessedPing) +yield ping_sub.get_next_message() +target_freq = 35000 +while True: + processed_ping = yield ping_sub.get_next_message() + print processed_ping + if isinstance(processed_ping, ProcessedPing): + print "Got processed ping message:\n{}".format(processed_ping) + if processed_ping.freq > 35000 and processed_ping.freq < 36000: + freq_dev = abs(target_freq - processed_ping.freq) + print "Trustworthy pinger heading" + hydrophones_enu_p, hydrophones_enu_q = tf.lookupTransform("/hydrophones", "/enu", processed_ping.header.stamp) + pinger_enu_p = navigator_tools.point_to_numpy(tf.transformPoint()) + dir_ = navigator_tools.point_to_numpy(processed_ping.position) + mv_mag = 2 + mv_hyd_frame = dir_ / np.linalg.norm(dir_) + pinger_move = navigator.move.set_position(navigator_tools.point_to_numpy(processed_ping.position)).go() + + print "Heading towards pinger" + else: + print "Untrustworthy pinger heading. Freq = {} kHZ".format(processed_ping.freq) + else: + print "Expected ProcessedPing, got {}".format(type(processed_ping)) + +# Hydrophone locate mission +@txros.util.cancellableInlineCallbacks +def main(navigator): + kill_alarm_broadcaster, kill_alarm = single_alarm('kill', action_required=True, problem_description="Killing wamv to listen to pinger") + df = defer.Deferred().addCallback(head_for_pinger) + df.callback(navigator) + try: + yield txros.util.wrap_timeout(df, 15, cancel_df_on_timeout=True) + except txros.util.TimeoutError: + print "Done heading towards pinger" \ No newline at end of file diff --git a/perception/navigator_vision/nodes/velodyne_pcd_colorizer.cpp b/perception/navigator_vision/nodes/velodyne_pcd_colorizer.cpp new file mode 100644 index 00000000..4d35e2f1 --- /dev/null +++ b/perception/navigator_vision/nodes/velodyne_pcd_colorizer.cpp @@ -0,0 +1,26 @@ +#include +#include +#include +#include +#include +#include + + +using namespace std; + +int main(int argc, char** argv) +{ + // Init ROS + ros::init(argc, argv, "velodyne_pcd_colorizer"); + ros::NodeHandle nh; + + // Create PcdColorizer active object + nav::PcdColorizer colorizer{nh, "/velodyne_points"}; + + while(colorizer.ok()) + { + ROS_INFO("velodyne_pcd_colorizer: Spinning ROS callbacks"); + ros::spin(); + } + +} \ No newline at end of file diff --git a/perception/navigator_vision/package.xml b/perception/navigator_vision/package.xml index d351512d..2ef95ee2 100644 --- a/perception/navigator_vision/package.xml +++ b/perception/navigator_vision/package.xml @@ -34,4 +34,6 @@ tf2_geometry_msgs navigator_tools navigator_tools + pcl_ros + pcl_ros diff --git a/perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp b/perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp new file mode 100644 index 00000000..6d891c5c --- /dev/null +++ b/perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp @@ -0,0 +1,73 @@ +#include + +namespace nav{ + +using nav::tools::operator "" _s; // converts to std::string + +CameraObserver::CameraObserver(ros::NodeHandle &nh, std::string &pcd_in_topic, std::string &cam_topic, size_t hist_size) +: _nh{nh}, _cam_stream{nh, hist_size} +{ + try + { + if(!_cam_stream.init(cam_topic)) + { + _err_msg = "COLORIZER: ROSCameraStreams could not be initialized with "_s + cam_topic + "."_s; + return; + } + } + catch(std::exception &e) + { + std::cout << __PRETTY_FUNCTION__ << " exception caught: " << e.what() << std::endl; + } + + + // Check that tf for this camera is up (default template arg is pcl::PointXYZ) + auto velodyne_msg = ros::topic::waitForMessage>(pcd_in_topic, _nh, ros::Duration{3, 0}); + std::string src_frame_id = velodyne_msg->header.frame_id; + std::string target_frame_id; + ros::Duration tf_timeout{5, 0}; // Wait 5 seconds max for each TF + std::string err = "COLORIZER: waiting for tf between "_s + src_frame_id + " and "_s + target_frame_id + ": "_s; + if(_tf_listener.waitForTransform(_cam_stream.getCameraModelPtr()->tfFrame(), src_frame_id, ros::Time(0), + tf_timeout, ros::Duration(0.05), &err)) + { + _ok = true; + return; // Ideal return point + } + else + ROS_ERROR(err.c_str()); // TF not available +} + +ColorObservation::VecImg CameraObserver::get_color_observations(const PCD::ConstPtr &pcd) +{ + using std::vector; + using std::cout; + using std::endl; + cout << __PRETTY_FUNCTION__ << endl; + + // Structre: Image pixels are lists of ColorObservations for the respective pixels in the image that have + // pcd points that would be imaged there + auto obs_img = vector>{size_t(_cam_stream.rows()), vector{size_t(_cam_stream.cols())}}; + PCD pcd_cam{}; // _cam indicates the reference frame + auto cam_model = _cam_stream.getCameraModelPtr(); + + // We will first transform the pcd from lidar frame into the frame of the camera so that we may project + // it into the image with just the camera intrinsics + pcl_ros::transformPointCloud(cam_model->tfFrame(), *pcd, pcd_cam, _tf_listener); + + cv::Matx33d K_cv = cam_model->fullIntrinsicMatrix(); + Eigen::Matrix3d K_eigen; + cv::cv2eigen(K_cv, K_eigen); + + for(auto pt : pcd_cam) + { + cout << "Pt: " << pt << endl; + auto imaged_pt = K_eigen * Eigen::Matrix{pt.x, pt.y, pt.z}; + cout << "Imaged pt: " << imaged_pt << endl; + pt = pcl::PointXYZ(imaged_pt[0], imaged_pt[1], imaged_pt[2]); + + } + return ColorObservation::VecImg(); + +} + +} // namespace nav diff --git a/perception/navigator_vision/src/navigator_vision_lib/colorizer/color_observation.cpp b/perception/navigator_vision/src/navigator_vision_lib/colorizer/color_observation.cpp new file mode 100644 index 00000000..bc8ec4e6 --- /dev/null +++ b/perception/navigator_vision/src/navigator_vision_lib/colorizer/color_observation.cpp @@ -0,0 +1,15 @@ +#include + + +namespace nav{ + +UnoccludedPointsImg::UnoccludedPointsImg() +// : frame_ptr(frame_ptr), +// cloud_ptr(cloud_ptr) +{ + // auto cam_model = frame_ptr->getCameraModelPtr(); + + +} + +} // namespace nav \ No newline at end of file diff --git a/perception/navigator_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp b/perception/navigator_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp new file mode 100644 index 00000000..0b8c9833 --- /dev/null +++ b/perception/navigator_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp @@ -0,0 +1,173 @@ +#include + +using namespace std; +using namespace cv; + +namespace nav{ + +PcdColorizer::PcdColorizer(ros::NodeHandle nh, string input_pcd_topic) + : _nh(nh), _img_hist_size{10}, _cloud_processor{nh, input_pcd_topic, _img_hist_size}, + _input_pcd_topic{input_pcd_topic}, _output_pcd_topic{input_pcd_topic + "_colored"} +{ + using nav::tools::operator "" _s; // converts to std::string + + try + { + // Subscribe to point cloud topic + _cloud_sub = _nh.subscribe>(_input_pcd_topic, 1, &SingleCloudProcessor::operator(), &_cloud_processor); + + // Advertise output topic + _cloud_pub = _nh.advertise>(_output_pcd_topic, 1, true); // output type will probably be different + } + catch(std::exception &e) + { + _err_msg = "COLORIZER: Suscriber or publisher error caught: "_s + e.what(); + ROS_ERROR(_err_msg.c_str()); + return; + } + + std::string msg = "COLORIZER: Initialization was "_s + (_cloud_processor.ok()? "successful" : "unsuccessful"); + msg += "\n Input point cloud topic: " + _input_pcd_topic + "\n Output point cloud topic: " + _output_pcd_topic; + ROS_INFO(msg.c_str()); + + if(_cloud_processor.ok()) + { + _ok = true; + _active = true; + } +} + +void PcdColorizer::_cloud_cb(const PCD<>::ConstPtr &cloud_in) +{ + change_input_mtx.lock(); + // _input_pcd = cloud_in; // TODO: figure out a good communication mechanism b/w colorizer, single processor and merger + ROS_INFO("Got a new cloud"); + change_input_mtx.unlock(); +} + +// PcdColorizer::PcdColorizer(ros::NodeHandle nh, string input_pcd_topic) +// : PcdSubPubAlgorithm{nh, input_pcd_topic, input_pcd_topic + "_colored"}, +// _img_hist_size{10}, +// cloud_processor{nh, _img_hist_size} +// { +// cout << "img hist size: " << _img_hist_size << endl; +// // Subscribe to all rectified color img topics +// auto rect_color_topics = nav::tools::getRectifiedImageTopics(true); +// if(rect_color_topics.size() == 0) +// { +// _err_msg += "COLORIZER: There are no rectified color camera topics currently publishing on this ROS master ("; +// _err_msg += ros::master::getURI(); +// _err_msg += ") Re-run after rectified color images are being published."; +// ROS_ERROR(_err_msg.c_str()); +// return; +// } + +// // Construct and initialize Camera Stream objects +// size_t good_init = 0; +// for(size_t i = 0; i < rect_color_topics.size(); i++) +// { +// // Construct on heap but store ptrs in stack +// _ros_cam_ptrs.emplace_back(new CamStream{this->_nh, this->_img_hist_size}); +// good_init += int(_ros_cam_ptrs[i]->init(rect_color_topics[i])); // Count successfull init +// _transformed_cloud_ptrs.emplace_back(nullptr); +// } + +// // Make sure at least one ROSCameraStream was successfully initialized +// if(good_init == 0) +// { +// _err_msg = "COLORIZER: No ROSCameraStreams could be initialized."; +// ROS_ERROR_NAMED("COLORIZER", _err_msg.c_str()); +// return; +// } + +// std::mutex mtx; + +// // LAMBDAS! +// auto check_tf_lambda = [this, &rect_color_topics, &mtx](const PointCloud::ConstPtr &cloud_in) +// { +// std::lock_guard lock{mtx}; // will unlock mtx when it goes out of scope +// ROS_INFO("COLORIZER: checking that all required TF's are available"); +// std::string src_frame_id = cloud_in->header.frame_id; +// std::string target_frame_id; +// ros::Duration tf_timeout{5, 0}; // Wait 5 seconds max for each TF +// this->_ok = true; // assume TF's are ok until proven otherwise + +// // Check each TF +// for(size_t i = 0; i < rect_color_topics.size(); i++) +// { +// std::string err {"COLORIZER: waiting for tf between "}; +// err += src_frame_id + std::string(" and ") + target_frame_id + std::string(": "); +// if(! this->_tf_listener.waitForTransform(this->_ros_cam_ptrs[i]->getCameraModelPtr()->tfFrame(), +// src_frame_id, ros::Time(0), tf_timeout, ros::Duration(0.05), &err)) +// { +// ROS_ERROR(err.c_str()); // TF not available +// this->_ok = false; +// } +// } +// if(this->_ok) +// ROS_INFO("COLORIZER: Required TF's are publishing."); +// mtx.unlock(); +// }; + +// void PcdColorizer::_cloud_cb(const PCD<>::ConstPtr &cloud_in) +// { +// std::cout << "active: " << _active << std::endl; +// std::cout << "fuck!" << std::endl; +// if(_active) +// { +// if(!_ok) +// { +// ROS_INFO((std::string("COLORIZER: received cloud msg but error flag is set: ") + _err_msg).c_str()); +// return; +// } + +// // std::cout << "fuck!" << std::endl; + +// // // Transforms pcd to frame of each camera and generates list of color observations for points +// // // observed in any of the cameras +// // _process_pcd(cloud_in); + +// // // Combines current colored point cloud with stored persistent color pcd +// // _combine_pcd(); +// } +// else +// ROS_WARN_DELAYED_THROTTLE(15, "COLORIZER: receiving clouds but not active"); +// std::cout << "Exiting " << __PRETTY_FUNCTION__ << std::endl; +// } + +// inline void PcdColorizer::_process_pcd(const PCD<>::ConstPtr &cloud_in) +// { +// std::cout << "process cloud" << std::endl; +// // Transform pcds to target tf frames +// std::string target_frame_id; + +// // Synchronize work for each camera among worker threads +// std::promise start_work_prom; +// std::shared_future star_work_fut{start_work_prom.get_future()}; +// std::vector> worker_done_proms{_ros_cam_ptrs.size()}; +// std::vector> worker_done_futs{_ros_cam_ptrs.size()}; + +// #pragma omp parallel for +// for(size_t i = 0; i < _ros_cam_ptrs.size(); i++) +// { +// target_frame_id = _ros_cam_ptrs[i]->getCameraModelPtr()->tfFrame(); +// std::shared_ptr transfromed_pcd{new PointCloud()}; +// pcl_ros::transformPointCloud(target_frame_id, *cloud_in, *transfromed_pcd, _tf_listener); +// _transformed_cloud_ptrs[i] = transfromed_pcd; +// } + + // #pragma omp parallel for + // for(size_t i = 0; i < _transformed_cloud_ptrs.size(); i++) + // { + // // This should be merged with above loop + // std::cout << "hey" << std::endl; + // } + // std::cout << "pcd: " << cloud_in->header << std::endl; +// } + +// inline void PcdColorizer::_combine_pcd() +// { + +// } + +} // namespace nav diff --git a/perception/navigator_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp b/perception/navigator_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp new file mode 100644 index 00000000..f22f0d2b --- /dev/null +++ b/perception/navigator_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp @@ -0,0 +1,58 @@ +#include + +// using namespace std; + +namespace nav { + +using nav::tools::operator "" _s; // converts to std::string + +SingleCloudProcessor::SingleCloudProcessor(ros::NodeHandle nh, std::string& in_pcd_topic, size_t hist_size) +: _nh{nh}, _hist_size{hist_size} +{ + ROS_INFO(("SingleCloudProcessor: Initializing with " + in_pcd_topic).c_str()); + auto rect_color_topics = nav::tools::getRectifiedImageTopics(true); + if(rect_color_topics.size() == 0) + { + _err_msg += "COLORIZER: There are no rectified color camera topics currently publishing on this ROS master ("; + _err_msg += ros::master::getURI(); + _err_msg += ") Re-run node after rectified color images are being published."; + ROS_ERROR(_err_msg.c_str()); + return; + } + + for(auto& topic : rect_color_topics) + { + ROS_INFO(("SingleCloudProcessor: Creating CameraObserver for camera publishing to "_s + topic.c_str()).c_str()); + auto cam_observer_ptr = new CameraObserver{_nh, in_pcd_topic, topic, _hist_size}; + ROS_INFO(("CameraObserver: initialization "_s + (cam_observer_ptr->ok()? "successful" : "unsuccessful")).c_str()); + if(cam_observer_ptr->ok()) + _camera_observers.push_back(std::unique_ptr{cam_observer_ptr}); + else + delete cam_observer_ptr; + } + + if(_camera_observers.size() == 0) + { + _err_msg = "SingleCloudProcessor: No ROSCameraStreams could be initialized."; + ROS_ERROR_NAMED("COLORIZER", _err_msg.c_str()); + } + else + _ok = true; + return; +} + +void SingleCloudProcessor::operator()(const PCD::ConstPtr &pcd) +{ + + std::cout << "Called the single cloud processor operator()" << std::endl; + std::cout << "pcd: " << pcd->header << std::endl; + + + // Get color observation list from each of the observers + + // Merge lists from all of the observers + + // Summarize Confidence in each point color observation +} + +} // namespace nav From dfe3fe942515de6c29d03aa7fc9ab95f6636e92e Mon Sep 17 00:00:00 2001 From: David Soto Date: Tue, 29 Nov 2016 19:54:04 -0500 Subject: [PATCH 168/267] TOOLS: fix bugs and improve usability of video player --- .../nodes/{video_player.py => video_player} | 39 ++++++++++++------- 1 file changed, 25 insertions(+), 14 deletions(-) rename utils/navigator_tools/nodes/{video_player.py => video_player} (72%) diff --git a/utils/navigator_tools/nodes/video_player.py b/utils/navigator_tools/nodes/video_player similarity index 72% rename from utils/navigator_tools/nodes/video_player.py rename to utils/navigator_tools/nodes/video_player index 23fa0123..88a010bd 100755 --- a/utils/navigator_tools/nodes/video_player.py +++ b/utils/navigator_tools/nodes/video_player @@ -1,13 +1,14 @@ #!/usr/bin/env python """ -Usage: rosrun navigator_tools video_player.py _filename:=MyVideoFile.mp4 +Usage: rosrun navigator_tools video_player _filename:=MyVideoFile.mp4 + rosrun navigator_tools video_player MyVideoFile.mp4 Utility to play video files into a ros topic with some added conveniences like pausing and scrolling through the video with a slider bar. Plays any videofile that OpenCV can open (mp4,etc) -Publishes to video_file/image_raw +Publishes to video_player/filename/image_raw Set the following rosparams for customization ~filename string what file to load @@ -22,6 +23,7 @@ """ import sys +import os import rospy import cv2 from sensor_msgs.msg import Image @@ -31,8 +33,9 @@ class RosVideoPlayer: def __init__(self): self.bridge = CvBridge() - self.image_pub = rospy.Publisher("video_file/image_raw",Image,queue_size=10) - self.filename = rospy.get_param('~filename', 'video.mp4') + self.filename = rospy.get_param('~filename', sys.argv[1]) + self.image_pub = rospy.Publisher("video_player/" + os.path.splitext(os.path.basename(self.filename))[0] + "/image_raw", + Image,queue_size=10) self.slider = rospy.get_param('~slider',True) self.start_frame = rospy.get_param('~start_frames',0) self.cap = cv2.VideoCapture(self.filename) @@ -46,8 +49,8 @@ def __init__(self): self.roi_width = rospy.get_param('~width', self.width) self.num_frames = self.cap.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT) - self.fps = rospy.get_param('~fps',self.cap.get(cv2.cv.CV_CAP_PROP_FPS)) - self.cap.set(cv2.cv.CV_CAP_PROP_POS_FRAMES,self.start_frame) + self.fps = rospy.get_param('~fps', self.cap.get(cv2.cv.CV_CAP_PROP_FPS)) + self.cap.set(cv2.cv.CV_CAP_PROP_POS_FRAMES, self.start_frame) self.num_frames = self.cap.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT) if (self.num_frames < 1): raise Exception("Cannot read video {}".format(self.filename)) @@ -58,7 +61,7 @@ def __init__(self): if (self.slider): cv2.namedWindow('Video Control') - cv2.createTrackbar('Frame','Video Control',int(self.start_frame),int(self.num_frames),self.trackbar_cb) + cv2.createTrackbar('Frame','Video Control', int(self.start_frame), int(self.num_frames), self.trackbar_cb) cv2.createTrackbar('ROI x_offset', 'Video Control', int(self.roi_x_offset), int(self.width)-1, self.roi_x_cb) cv2.createTrackbar('ROI y_offset', 'Video Control', int(self.roi_y_offset), int(self.height)-1, self.roi_y_cb) cv2.createTrackbar('ROI height', 'Video Control', int(self.roi_height), int(self.height), self.roi_height_cb) @@ -67,7 +70,7 @@ def __init__(self): def run(self): r = rospy.Rate(self.fps) # 10hz - while (not rospy.is_shutdown()) and self.cap.isOpened(): + while not rospy.is_shutdown() and self.cap.isOpened() and not self.ended: if self.slider: k = cv2.waitKey(1) & 0xFF if not k == self.last_key: @@ -79,13 +82,17 @@ def run(self): elif k == 115 and self.paused: self.one_frame() if self.paused: - cv2.setTrackbarPos('Frame','Video Control',int(self.cap.get(cv2.cv.CV_CAP_PROP_POS_FRAMES))) + cv2.setTrackbarPos('Frame','Video Control', int(self.cap.get(cv2.cv.CV_CAP_PROP_POS_FRAMES))) else: self.one_frame() r.sleep() def one_frame(self): - ret, frame = self.cap.read() + ret, frame = (None, None) + try: + ret, frame = self.cap.read() + except Exception as e: + print "Exception: {}".format(e) if not ret: if not self.ended: rospy.loginfo("File {} ended".format(self.filename)) @@ -93,7 +100,11 @@ def one_frame(self): return else: self.ended = False - frame_roied = frame[self.roi_y_offset:self.roi_height, self.roi_x_offset:self.roi_width] + x1 = self.roi_x_offset + x2 = x1 + self.roi_width + y1 = self.roi_y_offset + y2 = y1 + self.roi_height + frame_roied = frame[y1:min(y2, frame.shape[0]), x1:min(x2, frame.shape[1])] self.image_pub.publish(self.bridge.cv2_to_imgmsg(frame_roied, "bgr8")) def trackbar_cb(self,x): @@ -106,16 +117,16 @@ def roi_y_cb(self, d): self.roi_y_offset = d def roi_height_cb(self, d): - self.roi_height = d + self.roi_height = d if d > 0 else 1 def roi_width_cb(self, d): - self.roi_width = d + self.roi_width = d if d > 0 else 1 def pause(self): self.paused = not self.paused def main(): - rospy.init_node('video_player') + rospy.init_node('video_player', anonymous=True) player = RosVideoPlayer() player.run() From 466e8c1b8b168f1c4bfb49e818e9d7c0813396e2 Mon Sep 17 00:00:00 2001 From: David Soto Date: Wed, 30 Nov 2016 07:38:41 -0500 Subject: [PATCH 169/267] MOVE: fix bug with kwargs passed to MoveGoal Remove kwargs passed to MoveGoal that would crash navc and warn user. Add support for shorthand commands to navc: 'f', 'b', 'l', 'r', 'yl', 'yr'. Print more sensible move messages with units. PR161: address comments --- .../src/navigator_vision_lib/colorizer/camera_observer.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp b/perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp index 6d891c5c..df4dccca 100644 --- a/perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp +++ b/perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp @@ -47,7 +47,7 @@ ColorObservation::VecImg CameraObserver::get_color_observations(const PCD>{size_t(_cam_stream.rows()), vector{size_t(_cam_stream.cols())}}; - PCD pcd_cam{}; // _cam indicates the reference frame + PCD pcd_cam{}; // _cam indicates the reference frame auto cam_model = _cam_stream.getCameraModelPtr(); // We will first transform the pcd from lidar frame into the frame of the camera so that we may project From 490e4ab78285de2c01bb8aa7e55bdc4bae97e9d4 Mon Sep 17 00:00:00 2001 From: David Soto Date: Thu, 1 Dec 2016 00:08:57 -0500 Subject: [PATCH 170/267] TOOLS: add graceful CTRL-C exit to tf_to_gazebo --- utils/navigator_tools/nodes/tf_to_gazebo.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/utils/navigator_tools/nodes/tf_to_gazebo.py b/utils/navigator_tools/nodes/tf_to_gazebo.py index 30eefd9d..20f19d93 100755 --- a/utils/navigator_tools/nodes/tf_to_gazebo.py +++ b/utils/navigator_tools/nodes/tf_to_gazebo.py @@ -9,11 +9,11 @@ def main(): rospy.init_node('tf_to_gazebo') - do_cam_fix = rospy.get_param("~cam_fix",False) - tf_parent = rospy.get_param("~tf_parent","/measurement") + do_cam_fix = rospy.get_param("~cam_fix", False) + tf_parent = rospy.get_param("~tf_parent", "/measurement") listener = tf.TransformListener() rate = rospy.Rate(10.0) - cam_fix_quat = (-0.5 , 0.5 ,-0.5 ,-0.5) + cam_fix_quat = (-0.5, 0.5 , -0.5 , -0.5) while not rospy.is_shutdown(): try: @@ -34,4 +34,8 @@ def main(): rate.sleep() if __name__ == '__main__': - main() + try: + main() + except KeyboardInterrupt: + print + From 3c2999c856dfdfefb205b5fc363642bb3558d189 Mon Sep 17 00:00:00 2001 From: David Soto Date: Sat, 3 Dec 2016 20:46:53 -0500 Subject: [PATCH 171/267] CORAL SURVEY: add underwater shape templates --- perception/navigator_vision/templates/circle.png | Bin 0 -> 2590 bytes perception/navigator_vision/templates/cross.png | Bin 0 -> 761 bytes .../navigator_vision/templates/triangle.png | Bin 0 -> 1961 bytes 3 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 perception/navigator_vision/templates/circle.png create mode 100644 perception/navigator_vision/templates/cross.png create mode 100644 perception/navigator_vision/templates/triangle.png diff --git a/perception/navigator_vision/templates/circle.png b/perception/navigator_vision/templates/circle.png new file mode 100644 index 0000000000000000000000000000000000000000..fa5bd300932a2b2db3c70a04f84e8711480c5a6c GIT binary patch literal 2590 zcmaJ@c{r478=oSLeFii3WsE^~LYB!g#+sdUI3Y_VM~O0)hRDpA88N6y_B|wPl;y}U zldTR0Q6h$9G}(?*eNp<}uIsz5@B8a}uIGK9Kknzf?)(0|_x=0b6i;_YQ6V`Y5C|md z?1c3O&hTFkSO9omU8eg22mdvUvo9D}cyKrw1QLAZjJ5SmDBdX9KP9!GJhZrSyNNf~ zExk2+I{)>g@l+cZLzJa^JMh~}^D?~e3VqZE?+M>!XiD%2s%Gj*UDBktcuo%Dj!(VX zrZzNoOUK7uELB!AHRA1apN_rsFPf#L7mpVtZs5lXuHAw1+op<`5S#)J6?6?_H0|IR z*?F3@g4j_~Mh0j1TXo%da5|>K27@^~9r?MKw@Ya6=-@0r&*XmmDA5>u6RCbjFkdea z6BD!Z%lL9^Y+yCEfBibuud5IG^sQ*$qV-nyJjs)!Lofrg|Hn}qWHg8P zKc?G>>6yk%0}&aLEf&jfbo_Yky?Z1smur6WxTU3V&q6j$**Y{L0z5S}1%BWk`H|S7 zliAs6CX?mx{{8#%@^X+s`XMhbFEW{&imAB2`xQOTT-yzajg@e(m4s&=QB+iH325Z< zcuW?HfQ@f6sDll}D2NE~J&9h-&XPYd5N?D(AP!lSh(V#yb21;SUo0gmHJoX7aCR2l z+uJ+W5x}0AA$|FRyPcIqxhkERVF6r67!gf%>>%Ea%S}IoJezeA0@ZNc8wdMV` z_5fXDIZM)goPm;933e`2fbf*qljyK_=u2w`FZ zTU}kPd-xFJ;zCu6lL!Pe4Tgt@Z2+HCDz)6N!QR8C@78&LH57`o)Ss@Yt)1@I0P7X2 zv=;0QqA@eX!)BWTe|r*D@IHEwMj#MW3=N?g?!}C3MN0@Au6_k?m>#2$p=gO)Vx;j+ ztY4oV0W;ckT-7DBm953@jHpFMc@ZC-?UFcs`m}*u*7PVC_=@CKWLnYI<~==)Dugup zW!};7s4OpEqT!P>ZNtOWMn^|mE{-d)=girJs5fm#_P=aMp49j$Cc;!PHHAB7E2Q*nMeMQ1!`()c1e5AHfO;@F|YqjJT4?G+~20Neib_r z*`ayt7<5$^p{HhHA!l!IPo_{1id31jpxK$m-}__2kDee4kBp5Sn5Ifct!sgp1{1u` zub(AbN=QhM2UWEQ=l!MNa8(qFSlGPtQ<{iLgK7JzSXvfw#WT4+)a+~r9PZ1X$=CY< zh@D(ru|Yvf>{3-PghLfnAU!SN9gL3@0sx*%r`!KymwP{lk%PnI@e{QTAmBgg>WYqx zjIcr>H@sbHBnk=&d_@|69s^n!k4H~VPLiLisAFrqe-*J7o$3pA_H1|s_)we4hDb`l zV8qVOP82%@PXjpS>{$g>RaNp`4_C5#ZF*31u+h4fO+;)Z`}rw> zp_qvXV^1wQc2q-M4UI1Tt)>RZP?PQw>I261^!6KVwaOfB@O6q?r3eDc-)K>JQS!Bf z?ZeAzUs(LM;D^bw!uAmUZM4UycVs~z;{KN}Me3JPb`bu5;h&Y2zTeeQQD^Z!hbAQz z`tIa$zU~{IJn8mn$ia1ZdRpqig9lD9ec6;gWgj0Omis(XJsgiOrmp_DSy)gY3s6J< z22S!=*?mX`^H9%%yUwf&aeOU(;flBw0-zy2KECY)bT!85*3>#{X-TE^OYVILZHTYv zFA{}9K~4RD@gG7Wk=%_9(Pvkh$JgvUJj4eF2k+g6l&%>X7^EH?q=XuhQ)F;)ahwu)`)KCehGtI$$q*)#co>2#Bp3ThH4-QR8 zFyl_meEOu5l#~Q88s*sz%kgJWZxRvU6~@}`kwz(7h=I8|1?WfSg~!k=`?JFg2B@&8 z$k58l-9pMRp>zl^?Qq%1Wu`5Q_%FN_I(bhjdhuD!hjzmA<=C!?$;n$|WA4kX*n*ap zH&GFhk++x}@M!z1SFeaSzHauf#zJYRIg!a9`(N2}b8^?CDQ`C%e0(Hz;F+2`SV381HM-5R9;yo`+rdwWUkPo7W%JlJOt_-}t7iHnOfDEezVsn^$~ zptzXzSJ8$1W27*$bh@;Bqx1BO`(OXubI}MG6M9ZJxEO@lIRmT+EX++nHOxK5*eN2cYZD z(Zn|U{|_r4 zL`w`r9*5JOms5gIE+%4pwP1k8*DDkTxl~tIf4?4b6ggpRi9|j{wEuVBSy4W{nVY5y T3)Tk)K9IA$JGKdP?$*BnjCQY# literal 0 HcmV?d00001 diff --git a/perception/navigator_vision/templates/cross.png b/perception/navigator_vision/templates/cross.png new file mode 100644 index 0000000000000000000000000000000000000000..9b400145f7986163d222d8090a4041a18f160506 GIT binary patch literal 761 zcmeAS@N?(olHy`uVBq!ia0vp^DIm!dJxA*GJo3?%A+rx0PN;Qtz%wA6>cj~~67)p|3h?Q2zSobj;(o7U{l`_{33=kn>(#YMVZQ;cSQp2f)U`Cx{L z(bJ-vf2!j88{}S=+$!09xBL9R_SaH#olj_RNU|y}R!9+P>1mn}C}70pg3Z;1J;HK^0nK(-+D90K!z{z_BQ^>tj%^FD?^-i-*q#Z`DM@b8oTz#7FQfgPAK6r z3aj}{yIW?TfBx~;udWP}SDEvj7Ct!rwS=L=ZE=Q))QS+TdEX-Src3a!704W)zOzm| zVnU2wd)nsC95d-F(SJ5a=(JVseOEdEWJ=S^5-AS>*-dbFW3>!Z@bgl~d0*>x*KxgZ z=lfeH|M;WC!hjch%1-|+%ibzwHJ4AQv*r2BoZD`zuO7-U`LyNy%aTL4Z_j>S`A=no z!tuu+AElkye(P;nLG1PRPKN8H8}H_Aj9OcBQ=h}C9U~+$Ee8kG8E3Z2XWj?De6PFz z{(FH8d;8&sd&*wNuh$df-m`x{yV3Ia7tLR)Y`;|Pt?vJrD)(u^*E2Xwg2Y~Uyxy@S z!zrgzU6dwPpZR>hejAf#^rRRsVQKij<@5DKP(Hb+6tK8 S51s@}%M6~belF{r5}E)I=}4CV literal 0 HcmV?d00001 diff --git a/perception/navigator_vision/templates/triangle.png b/perception/navigator_vision/templates/triangle.png new file mode 100644 index 0000000000000000000000000000000000000000..80da22ae87cf2d9ebeeddbd532b55e36624a62b9 GIT binary patch literal 1961 zcmaJ?c~FyC5)Xn9AV`eNaz)BOKorCP84ZUhpFs#B*KiGTgfXCiKtPUw$YnSs8PH)+ zu5gr~D1-nBmxvq^FdPPCVkUA%j1t+%f(YW`e%AiCf9$LGUe~Lx`gOnVUw7xbyE>|= z=%_#-5H*4m-V>Ch9|x)k-sGhcFHk640|;JFaHT>?c@T&ak$|`NqExI_NPYfx*5!V` zBd-j{%V+Nqg<2s4QU?^ZUEg+O6LeM)w~(Ep2%O4Pk;3F zOVIn3YM5}$e%d%P1(^#XWqazou zpLuz*3Z(^`a{b`i>7Qp@aM#7isbP8{S+B`Qbv$f(=2HH3OQjYTD>JPxU1+kjh8q@} z12ypFvFP6;vqxGB*C>Tb>UtTHWH995Ufr}Xz234ey)T|5_? z+1N<=y78fjYOS^o;5~-;{QZ9k1hQB3X6eg>%I$ppbpVmCuuolGmU_tB`#Gpyk2G{q z1q~k$dh%>#Wu;QIl&rV2voppl!5jlqv$(ZI2`RnjXPQ%bJlNt?r{ysc4r%tSP>*(Q zWI0YX>RK_zgp{1D%?#@G-zv+g569Wsk~pQ9BUCZ&qfBPNm^}XEEsB7spFVPX^erF? z)NoPp@cyZe4_zcv7TY2L=+pB3TmHWFFa;4 zMJewPz##(S#j=GCEhj2ATK*Rxt_%6~-*!GeK39*I8~|FLKBa@~q}WDBJ}!S&2xm;@ zru5PS%p|Y7v*%@wO+II9+=*dLRBcpOz`nsnc(KzE>*ioA5<~=1G#p~l?-DR5{B1PI zH*h=h#8f!weZ2KSQroZGrCpd2bx0iR_oqYXy%I4_J@WRwd!fz8vghZRs^ShD1fFxmDTPAA(ok8#dH*3*{r< zJX8j={b#5-JLNFXVsmrzL3CrC5#HOoqM`!BRs}1-#pvcJ35X&$7!6@y?fy*{W767G zJv3tD;((}HCx?-#DMZ!j^ofs(?>!$oVTK%lW$?O_wrgFOy)5MWnV5?SL*-cmX(gc1C@CO0@9+a z-h%-R;yz~_cqL7^EVkBu>}NXAYT6ijyS#inX^;{(*6-rQK_Z5Hb&oXfM}DI_Zh1Z&`bOM6 z1@;pg8=DcyI(_IGKCeE!#gtc)?s=p@-}yeGd$!r&5@Qumus_okaE|StFizY(Muk6U zEJ*C3(P(NSvCs^yLmR+eAX#X?i%IydS|#y#==E|nF>2l;T)H2jASV)uu5^gT9w)z1 z1fV{?zF>;0!^@gjSAS}4Z>NGV260&+&)S4|mS6j4&g==W=b`S55|ffjor|OS zZ~*LEKQL#jqsoiU9Lfl5%a35M6yz^3L0X&gfExFfsM%T0Oe**>W^muqV6J*%cJ+Wz zs7v)R8C9qgBm-wJgy7(cZw822{{dcwsBi!P literal 0 HcmV?d00001 From 9802e8c1495221fe76f686047a4141becd69b79e Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 4 Dec 2016 02:47:39 -0500 Subject: [PATCH 172/267] PERCEPTION: add underwater shape id node --- perception/navigator_vision/CMakeLists.txt | 13 +++- ...underwater_shape_identification_vision.cpp | 59 +++++++++++++++++++ 2 files changed, 71 insertions(+), 1 deletion(-) create mode 100644 perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp diff --git a/perception/navigator_vision/CMakeLists.txt b/perception/navigator_vision/CMakeLists.txt index c1887b89..8c6d8885 100644 --- a/perception/navigator_vision/CMakeLists.txt +++ b/perception/navigator_vision/CMakeLists.txt @@ -76,11 +76,12 @@ add_library( navigator_vision_lib src/navigator_vision_lib/visualization.cc src/navigator_vision_lib/cv_utils.cc + src/navigator_vision_lib/image_filtering.cpp src/navigator_vision_lib/colorizer/pcd_colorizer.cpp src/navigator_vision_lib/colorizer/single_cloud_processor.cpp src/navigator_vision_lib/colorizer/camera_observer.cpp src/navigator_vision_lib/colorizer/color_observation.cpp - ) +) target_include_directories(navigator_vision_lib PUBLIC include/navigator_vision_lib) target_link_libraries(navigator_vision_lib navigator_tools_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES} ${PCL_COMMON_LIBRARIES} ${PCL_COMMON_LIBRARIES}) @@ -110,6 +111,16 @@ target_link_libraries( ${OpenCV_INCLUDE_DIRS} ) +add_executable(underwater_shape_identification_vision nodes/underwater_shape_identification_vision.cpp) +add_dependencies(underwater_shape_identification_vision navigator_vision_lib ${catkin_EXPORTED_TARGETS}) +target_link_libraries( + underwater_shape_identification_vision + navigator_vision_lib + ${catkin_LIBRARIES} + ${Boost_LIBRARIES} + ${OpenCV_INCLUDE_DIRS} +) + add_executable(shape_identification nodes/shape_identification/main.cpp nodes/shape_identification/DockShapeVision.cpp diff --git a/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp b/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp new file mode 100644 index 00000000..39dd9b4e --- /dev/null +++ b/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp @@ -0,0 +1,59 @@ +#include +#include + +#include + +#include +#include + +using namespace std; + +int main(int argc, char** argv) +{ + cout << "\033[1;31mUnderwater Shape Identification Vision\033[0m" << endl; + int rotations = stoi(argv[2]); + + // set up image acquisition + string cam_topic {"down/image_raw"}; + size_t history_length{200}; + ros::init(argc, argv, "underwater_shape_identification_vision"); + ros::NodeHandle nh; + + // cv::Vec3b to get color imgs + nav::ROSCameraStream left_cam_stream(nh, history_length); + if(!left_cam_stream.init(cam_topic)) + return -1; + + // Display most recent frame in the buffer + while(left_cam_stream.ok()) + { + size_t size = left_cam_stream.size(); + if(size > 0) + { + cv::Mat latest = left_cam_stream[0]->image(); + cv::Mat _latest = latest; + // cv::cvtColor(latest, _latest, CV_BGR2RGB); + cv::imshow("newest_frame", _latest); + } + char code = cv::waitKey(100); + cout << "ESC = " << int('\033') << " waitKey --> " << code << endl; + if(code != -1) + break; + } + + // Test kernel rotation and averaging + auto file_name = "/home/santiago/mil_ws/src/Navigator/perception/navigator_vision/templates/" + string(argv[1]) + ".png"; + cv::Mat kernel = cv::imread(file_name, CV_LOAD_IMAGE_GRAYSCALE); + if(!kernel.data) + { + cout << "Could not load image from " << file_name << endl; + return -1; + } + + cv::Mat rot_invar_kernel; + makeRotInvariant(kernel, rotations).convertTo(rot_invar_kernel, CV_8UC1); + cv::imshow("rot invariant kernel", rot_invar_kernel); + cv::waitKey(0); + return 0; +} + From 3dcabae5f7e0b03a548d6ffbc8a37adb4581dbee Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 4 Dec 2016 02:50:21 -0500 Subject: [PATCH 173/267] PERCEPTION: add rotational invariance for filter kernels --- .../navigator_vision_lib/image_filtering.hpp | 16 ++++++ .../navigator_vision_lib/image_filtering.cpp | 50 +++++++++++++++++++ 2 files changed, 66 insertions(+) create mode 100644 perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp create mode 100644 perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp b/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp new file mode 100644 index 00000000..6e5964c7 --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp @@ -0,0 +1,16 @@ +#pragma once + +#include + +#include +#include + +/* + Creates a new kernel that is rotationally invariant by averaging rotated instances of the + original. + kernel - original kernel + rotations - OPTIONAL. The number of the times that the original kernel will be rotated before + the rotated versions are averaged together. The rotated kernels will be uniformly spread + angularly. +*/ +cv::Mat makeRotInvariant(const cv::Mat &kernel, int rotations=8); diff --git a/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp b/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp new file mode 100644 index 00000000..6a981cbb --- /dev/null +++ b/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp @@ -0,0 +1,50 @@ +#include + +cv::Mat makeRotInvariant(const cv::Mat &kernel, int rotations) +{ + // Determine affine transform to move from top left to center of output size + cv::Point2f c_org{kernel.cols * 0.5f, kernel.rows * 0.5f}; // center of original + float hypot = std::hypot(c_org.x, c_org.y); + cv::Point2f c_dest{hypot, hypot}; + float center_dest_coeffs[6] = {1.0f, 0.0f, c_dest.x - c_org.x, 0.0f, 1.0f, c_dest.y - c_org.y}; + cv::Mat center_dest = cv::Mat{2, 3, CV_32F, center_dest_coeffs}; + + // Move into rotation position in larger canvas + cv::Mat dest = cv::Mat::zeros(cv::Size(hypot * 2, hypot * 2), CV_8U); + auto dest_top_left = dest(cv::Rect(0, 0, kernel.cols, kernel.rows)); + kernel.copyTo(dest_top_left); + cv::warpAffine(dest, dest, center_dest, dest.size()); // center dest is ready for rotating + + // Get angles of rotation + std::vector rotation_angles; + float delta_theta = 360.0f / rotations; + float theta = 0.0f; + while(true) + { + theta += delta_theta; + if(theta < 360.0f) + rotation_angles.push_back(theta); + else + break; + } + + // Incrementally rotate and save versions of original kernel + std::vector kernel_rotations; + kernel_rotations.push_back(dest); + int i = 0; + for(auto theta : rotation_angles) + { + cv::Mat rotated_kernel; + cv::Mat rot_mat = cv::getRotationMatrix2D(c_dest, theta, 1.0); + cv::warpAffine(dest, rotated_kernel, rot_mat, dest.size()); + kernel_rotations.push_back(rotated_kernel); + } + + // Average all rotated versions + cv::Mat sum = cv::Mat::zeros(dest.size(), CV_32S); + for(auto& rot_kernel : kernel_rotations) + cv::add(sum, rot_kernel, sum, cv::Mat(), CV_32S); + cv::Mat result = sum / float(kernel_rotations.size()); + result.convertTo(result, kernel.type()); + return result; +} From 47526cd0bb89f8c5095b3dd5797176cd25e90c54 Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 4 Dec 2016 02:57:03 -0500 Subject: [PATCH 174/267] PERCEPTION: remove repeated files from bad merge --- perception/navigator_vision/CMakeLists.txt | 6 +- .../navigator_vision_lib/camera_frame.hpp | 223 --------------- .../camera_frame_sequence.hpp | 118 -------- .../navigator_vision_lib/camera_model.hpp | 62 ---- .../ros_camera_stream.hpp | 267 ------------------ 5 files changed, 5 insertions(+), 671 deletions(-) delete mode 100644 perception/navigator_vision/include/navigator_vision_lib/camera_frame.hpp delete mode 100644 perception/navigator_vision/include/navigator_vision_lib/camera_frame_sequence.hpp delete mode 100644 perception/navigator_vision/include/navigator_vision_lib/camera_model.hpp delete mode 100644 perception/navigator_vision/include/navigator_vision_lib/ros_camera_stream.hpp diff --git a/perception/navigator_vision/CMakeLists.txt b/perception/navigator_vision/CMakeLists.txt index 8c6d8885..4d64e9d1 100644 --- a/perception/navigator_vision/CMakeLists.txt +++ b/perception/navigator_vision/CMakeLists.txt @@ -111,11 +111,15 @@ target_link_libraries( ${OpenCV_INCLUDE_DIRS} ) -add_executable(underwater_shape_identification_vision nodes/underwater_shape_identification_vision.cpp) +add_executable(underwater_shape_identification_vision + nodes/underwater_shape_identification_vision.cpp + src/missions/underwater_shape_identification.cpp +) add_dependencies(underwater_shape_identification_vision navigator_vision_lib ${catkin_EXPORTED_TARGETS}) target_link_libraries( underwater_shape_identification_vision navigator_vision_lib + navigator_tools_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES} ${OpenCV_INCLUDE_DIRS} diff --git a/perception/navigator_vision/include/navigator_vision_lib/camera_frame.hpp b/perception/navigator_vision/include/navigator_vision_lib/camera_frame.hpp deleted file mode 100644 index a955f4fa..00000000 --- a/perception/navigator_vision/include/navigator_vision_lib/camera_frame.hpp +++ /dev/null @@ -1,223 +0,0 @@ -#pragma once - -#include -#include -#include -#include -#include -#include -#include -#include -#include - - -namespace nav -{ - -// Helper function prototype -std::string __getROSImgEncoding(const sensor_msgs::ImageConstPtr &image_msg_ptr); - -enum class PixelType -{ -/* - Enumeration for dealing with different image pixel types - The underlying integers for these enums are compatible with OpenCV's - "CV_C" - macros. The benefit to having these is that we do not have to have OpenCV as a dependency. - Theoretically, the CameraFrame Objects need not use a cv::Mat and this class' functionality - would not be affected. The underscore in front of these enums is to solve naming conflicts - with commonly OpenCV and Eigen macros. -*/ - _8UC1=0, _8SC1, _16UC1, _16SC1, _32SC1, _32FC1, _64FC1, - _8UC2=8, _8SC2, _16UC2, _16SC2, _32SC2, _32FC2, _64FC2, - _8UC3=16, _8SC3, _16UC3, _16SC3, _32SC3, _32FC3, _64FC3, - _8UC4=24, _8SC4, _16UC4, _16SC4, _32SC4, _32FC4, _64FC4, _UNKNOWN = -1 -}; - -template, - typename time_t_ = ros::Time, - typename img_scalar_t = uint8_t, - typename float_t = float> -class CameraFrame -{ -/* - This class is used to represent a single frame from a camera. It contains information about - the time the image was taken, the id and camera parameters of the camera that took it and the - image itself. This cis a templated class whose first template parameter is the type of the - pixel elements (commonly uint8_t for grayscale images and cv::Vec3b for RGB images) -*/ - -public: - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Constructors and Destructors /////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - CameraFrame() // Default Constructor - { - } - - CameraFrame(const CameraFrame &other) // Copy Constructor - { - this->_seq = other._seq; - this->_stamp = other._stamp; - this->_image = other._image.clone(); // Object will have unoque copy of image data - this->cam_model_ptr = other.cam_model_ptr; - } - - // From ROS img msg - CameraFrame(const sensor_msgs::ImageConstPtr &image_msg_ptr, boost::shared_ptr &cam_model_ptr, - bool is_rectified = false, float_t store_at_scale = 1.0); - - CameraFrame(const sensor_msgs::ImageConstPtr &image_msg_ptr, - const sensor_msgs::CameraInfoConstPtr &info_msg_ptr) - { - } - - ~CameraFrame() - { - } - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Public Methods ///////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - cam_model_ptr_t getCameraModelPtr() const - { - return cam_model_ptr; - } - - unsigned int seq() const - { - return _seq; - } - - time_t_ stamp() const - { - return _stamp; - } - - const cv::Mat_& image() const - { - return _image; - } - - bool rectified() const - { - return _rectified; - } - - float_t getImageScale() const - { - return _img_scale; - } - - void copyImgTo(cv::Mat dest) const - { - dest = image.clone(); - } - - bool isCameraGeometryKnown() const - { - return cam_model_ptr == nullptr; - } - - -private: - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Private Members //////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - // Frames from the same source will have sequentially increasing seq numbers - unsigned int _seq; - - // Time that this image was taken - time_t_ _stamp; - - // Stores the image data (not a cv:Mat header pointing to shared image data) - cv::Mat_ _image; - - // Points to a camera model object that stores information about the intrinsic and extrinsic - // geometry of the camera used to take this image - cam_model_ptr_t cam_model_ptr; - - // Identifies if this image has already been rectified with the distortion parameters in the - // associated camera model object - bool _rectified = false; - - // Scale of the image compared to that which would be generated by projecting using the camera - // geometry expresed in the associated camera model object - float_t _img_scale = 1.0f; - - // Stores the pixel data type - nav::PixelType TYPE = nav::PixelType::_UNKNOWN; - - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Private Methods //////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - void project3DPointToImagePlane(Eigen::Matrix cam_frame_pt); -}; - -/////////////////////////////////////////////////////////////////////////////////////////////////// -////// Templated function implementations ///////////////////////////////////////////////////////// -/////////////////////////////////////////////////////////////////////////////////////////////////// - -// Constructor from ROS image message and ROS pinhole camera model -template -CameraFrame::CameraFrame( - const sensor_msgs::ImageConstPtr &image_msg_ptr, boost::shared_ptr &cam_model_ptr, - bool is_rectified, float_t store_at_scale) -try -{ - // ROS image message decoding - cv_bridge::CvImageConstPtr _ros_img_bridge; - std::string encoding = __getROSImgEncoding(image_msg_ptr); - _ros_img_bridge = cv_bridge::toCvShare(image_msg_ptr, encoding); - _ros_img_bridge->image.copyTo(_image); - - // Resize image as requested - if(store_at_scale != 1.0) - { - cv::resize(_image, _image, cv::Size(0, 0), store_at_scale, store_at_scale); - this->_img_scale = store_at_scale; - } - - // Store ptr to cam model object - this->cam_model_ptr = cam_model_ptr; - - this->_rectified = is_rectified; - - // Get header information - _seq = image_msg_ptr->header.seq; - _stamp = image_msg_ptr->header.stamp; -} -catch(cv_bridge::Exception &e) -{ - ROS_WARN("Error converting sensor_msgs::ImageConstPtr to cv::Mat."); -} -catch( cv::Exception &e ) -{ - std::string err_msg = "Error copying cv::Mat created from ROS image message to the cv::Mat stored in the Camera Frame Object: " - + std::string(e.what()); - ROS_WARN(err_msg.c_str()); - std::cout << "exception caught: " << err_msg << std::endl; -} -catch(const std::exception &e) -{ - ROS_WARN(e.what()); -} - -/////////////////////////////////////////////////////////////////////////////////////////////////// -// Helper Functions /////////////////////////////////////////////////////////////////////////////// -/////////////////////////////////////////////////////////////////////////////////////////////////// - -std::string __getROSImgEncoding(const sensor_msgs::ImageConstPtr &image_msg_ptr) -{ - return image_msg_ptr->encoding; -} - -} // namespace nav - diff --git a/perception/navigator_vision/include/navigator_vision_lib/camera_frame_sequence.hpp b/perception/navigator_vision/include/navigator_vision_lib/camera_frame_sequence.hpp deleted file mode 100644 index 0e077b02..00000000 --- a/perception/navigator_vision/include/navigator_vision_lib/camera_frame_sequence.hpp +++ /dev/null @@ -1,118 +0,0 @@ -#pragma once - -#include -#include -#include -#include -#include -#include -#include - -namespace nav -{ - -template, - typename time_t_ = ros::Time, - typename img_scalar_t = uint8_t, - typename float_t = float> -class CameraFrameSequence -{ -/* - This class is used to store and operate on camera frame sequences (videos). It contains methods - to make it more convenient to create visual algorithms that take into account temporal - information. - It also has convenient constructors to construct these sequences by subscribing to - ROS camera topics, by taking in frames from a v4l camera or by, loading frames from a saved - video file. - It is also possible to draw on contained camera frames as a batch. -*/ - -// Type Alises -using CircularBuffer = boost::circular_buffer>>; - -public: - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Constructors and Destructors /////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - // Default Constructor - CameraFrameSequence(size_t capacity) : _frame_ptr_circular_buffer(capacity) - { - } - - // TODO: implement copy and move constructors - - ~CameraFrameSequence() - { - } - - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Public Methods ///////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - virtual typename CircularBuffer::iterator begin() - { - return _frame_ptr_circular_buffer.begin(); - } - - virtual typename CircularBuffer::iterator end() - { - return _frame_ptr_circular_buffer.end(); - } - - size_t length() const - { - return _frame_ptr_circular_buffer.size(); - } - - // Accessors - - // Returns a copy of the CameraFrame taken closest to the given time - virtual boost::shared_ptr const> - getFrameFromTime(time_t_) = 0; - - // Returns reference to the nth frame from the most recent. For example frame_sequence[0] is - // the most recent frame, frame_sequence[-1] is the oldest frame, and frame_sequence[-2] is - // the second oldest frame - virtual boost::shared_ptr const> - operator[](int) = 0; - - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Public Members ///////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - -protected: - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Protected Methods ////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - virtual void _addFrame(boost::shared_ptr> &new_frame_ptr) = 0; - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Private Members //////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - // Container for all included CameraFrame objects - CircularBuffer _frame_ptr_circular_buffer; - - // cv::Ptr to a shared CameraInfo object for all frames in the sequence. If null, it indicates - // that the Frames have different CameraInfo objects which should be examined individually - cam_model_ptr_t _cam_model_ptr; - - // Shared CameraFrame properties - int COLS = -1; - int ROWS = -1; - nav::PixelType TYPE = nav::PixelType::_UNKNOWN; - - bool CONST_CAM_GEOMETRY = false; - bool KNOWN_CAM_GEOMETRY = false; - - time_t_ _start_time {}; - time_t_ _end_time {}; -}; - -} // namespace nav \ No newline at end of file diff --git a/perception/navigator_vision/include/navigator_vision_lib/camera_model.hpp b/perception/navigator_vision/include/navigator_vision_lib/camera_model.hpp deleted file mode 100644 index 477b5497..00000000 --- a/perception/navigator_vision/include/navigator_vision_lib/camera_model.hpp +++ /dev/null @@ -1,62 +0,0 @@ -#pragma once - -#include -#include - -namespace nav{ - -template -class CameraModel{ -public: - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Constructors and Destructors /////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - CameraModel() - { - } - - ~CameraModel() - { - } - -private: - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Private Methods //////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - Eigen::Matrix get_projection_matrix(); - - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Private Members //////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - int ROWS = 0; - int COLS = 0; - - // Distortion model (only the plum-bob model is currently supported) - T D[5] = {0, 0, 0, 0, 0}; - - // Camera Geometry - Eigen::Matrix K; // Camera intrinsics - Eigen::Matrix C; // Center of projection in world frame - Eigen::Matrix R; // Orientation of camera frame relative to world frame - - -}; - - -template -Eigen::Matrix CameraModel::get_projection_matrix() -{ - Eigen::Matrix v; - Eigen::DiagonalMatrix I = v.asDiagonal(); - Eigen::Matrix aug; - aug.block(0, 0, 2, 2) = I; - aug. col(2) = C; - return K * R * aug; -} - -} // namespace nav \ No newline at end of file diff --git a/perception/navigator_vision/include/navigator_vision_lib/ros_camera_stream.hpp b/perception/navigator_vision/include/navigator_vision_lib/ros_camera_stream.hpp deleted file mode 100644 index 39fb631f..00000000 --- a/perception/navigator_vision/include/navigator_vision_lib/ros_camera_stream.hpp +++ /dev/null @@ -1,267 +0,0 @@ -#pragma once - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -// DBG -#include - - -namespace nav -{ - -template -class ROSCameraStream : public CameraFrameSequence, ros::Time, img_scalar_t, float_t> -{ - -// Type Aliases -using CamFrame = CameraFrame, ros::Time, img_scalar_t, float_t>; -using CamFramePtr = boost::shared_ptr, ros::Time, img_scalar_t, float_t>>; -using CamFrameConstPtr = boost::shared_ptr, ros::Time, img_scalar_t, float_t> const>; -using CamFrameSequence = CameraFrameSequence, ros::Time, img_scalar_t, float_t>; - - -public: - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Constructors and Destructors /////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - // Default Constructor - ROSCameraStream(ros::NodeHandle nh, size_t buffer_size) - : CamFrameSequence(buffer_size), - _it(nh) - { - this->_nh = nh; - this->_capacity = buffer_size; - } - - ~ROSCameraStream(); - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Public Methods ///////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - bool init(std::string camera_topic); - - CamFrameConstPtr getFrameFromTime(ros::Time desired_time); - - CamFrameConstPtr operator[](int i); - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Public Members ///////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - -private: - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Private Methods //////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - void _addFrame(CamFramePtr &new_frame_ptr); - - void _newFrameCb(const sensor_msgs::ImageConstPtr &image_msg_ptr); - - - /////////////////////////////////////////////////////////////////////////////////////////////// - // Private Members //////////////////////////////////////////////////////////////////////////// - /////////////////////////////////////////////////////////////////////////////////////////////// - - // Initialization flag - bool _initialized = false; - - // Mutex for multithreaded to camera frame data - std::mutex _mtx; - - // ROS node handle - ros::NodeHandle _nh; - - // ROS image transportg - image_transport::ImageTransport _it; - - // Custom ROS Callback Queue - ros::CallbackQueue _cb_queue; - - // Flexible topic subscription with potential for filtering and chaining - message_filters::Subscriber img_sub; - - // ROS spinner to handle callbacks in a background thread - ros::AsyncSpinner async_spinner {1, &_cb_queue}; - - // The maximum amount of frames that this object will hold at one time - size_t _capacity = -1; - - // The ros topic we will subscribe to - std::string _img_topic = "uninitialized"; -}; - -/////////////////////////////////////////////////////////////////////////////////////////////////// -////// Templated function implementations ///////////////////////////////////////////////////////// -/////////////////////////////////////////////////////////////////////////////////////////////////// - -// Constructor for when there is an image msg being published w/ a corresponding camera info msg -template -bool ROSCameraStream::init(std::string camera_topic) -{ - this->_img_topic = camera_topic; - bool success = false; - image_transport::CameraSubscriber cam_sub; - - // subscribes to image msg and camera info and initializes CameraModel Object then disconnets subscriber - auto init_lambda = - [&](const sensor_msgs::ImageConstPtr &image_msg_ptr, const sensor_msgs::CameraInfoConstPtr &info_msg_ptr) mutable - { - ROS_WARN("Initializing ROSCameraStream object."); - // Set ptr to camera model object - boost::shared_ptr camera_model_ptr{new image_geometry::PinholeCameraModel()}; - camera_model_ptr->fromCameraInfo(info_msg_ptr); - this->_cam_model_ptr = camera_model_ptr; - - // Set metadata attributes - this->ROWS = image_msg_ptr->height; - this->COLS = image_msg_ptr->width; - this->CONST_CAM_GEOMETRY = true; - this->KNOWN_CAM_GEOMETRY = true; - success = true; - cam_sub.shutdown(); - ROS_WARN("ROSCameraStream object initialized."); - return; - }; - - // Subscribe to both camera topic and camera info topic - cam_sub = _it.subscribeCamera(camera_topic, 100, init_lambda); - - // The amount of time that we will try to wait for a cb from cam_sub - ros::Duration timeout{5, 0}; - auto sub_start = ros::Time::now(); - - // Loop to check if we get callbacks from cam_sub - ros::Rate rate{10}; // 10 HZ so we dont spam cpu - - while(ros::Time::now() - sub_start < timeout) - { - ros::spinOnce(); - if(success) - { - // Subscribe to ROS image topic - img_sub.subscribe(_nh, this->_img_topic, 100, ros::TransportHints(), &_cb_queue); - - // Register callback to process frames published on camera img topic - auto img_cb = [this](const sensor_msgs::ImageConstPtr &image_msg_ptr){this->_newFrameCb(image_msg_ptr);}; - img_sub.registerCallback(img_cb); - - // Start listening for image messages in a background thread - async_spinner.start(); - - this->_initialized = true; - return true; - } - rate.sleep(); - } - - std::string err_msg {"timeout: failed to initialize ROSCameraStream object with camera topic '"}; - err_msg += camera_topic + "'."; - ROS_WARN(err_msg.c_str()); - this->_initialized = false; - return false; -} -// TODO: handle construction from img msg when there is no matching camera info msg - -template -typename ROSCameraStream::CamFrameConstPtr -ROSCameraStream::getFrameFromTime(ros::Time desired_time) -{ - CamFrameConstPtr closest_in_time = this->operator[](0); - double min_abs_nsec_time_diff = fabs((this->operator[](0)->stamp() - desired_time).toNSec()); - double abs_nsec_time_diff = -1; - for(CamFrameConstPtr frame_ptr : this->_frame_ptr_circular_buffer) - { - abs_nsec_time_diff = fabs((frame_ptr->stamp() - desired_time).toNSec()); - if(abs_nsec_time_diff < min_abs_nsec_time_diff) - { - closest_in_time = frame_ptr; - min_abs_nsec_time_diff = abs_nsec_time_diff; - } - } - return closest_in_time; -} - -template -typename ROSCameraStream::CamFrameConstPtr -ROSCameraStream::operator[](int i) -{ - // Prevent adding new frames while frames are being accessed - _mtx.lock(); - - // shared_ptr to a dynamically allocated reference frame object - CamFrameConstPtr shared_ptr_to_const_frame; - - try - { - if(i >= 0) // regular access for non-negative indices - { - shared_ptr_to_const_frame = this->_frame_ptr_circular_buffer[i]; - } - else{ // reverse access for negative indices (ex. [-1] refers to the last element) - size_t past_last_idx = this->_frame_ptr_circular_buffer.end() - this->_frame_ptr_circular_buffer.begin(); - shared_ptr_to_const_frame = this->_frame_ptr_circular_buffer[past_last_idx + i]; - } - } - catch(std::exception &e) - { - std::string err_msg = "The circular buffer index you are trying to acess is out of bounds:\n" + std::string(e.what()); - ROS_WARN(err_msg.c_str()); - } - - - - _mtx.unlock(); - return shared_ptr_to_const_frame; -} - -template -void -ROSCameraStream::_addFrame(CamFramePtr &new_frame_ptr) -{ - // Prevent accessing frames while new frames are being added - _mtx.lock(); - this->_frame_ptr_circular_buffer.push_front(new_frame_ptr); - _mtx.unlock(); -} - -template -void ROSCameraStream::_newFrameCb(const sensor_msgs::ImageConstPtr &image_msg_ptr) -{ - // Check if the topic name contains the string "rect" - bool rectified = this->_img_topic.find(std::string("rect")) != std::string::npos; - - // Create shared pointer to dynamically allocated Camera Frame object constructed from ros img msg - boost::shared_ptr, ros::Time, img_scalar_t, float_t>> new_frame_ptr - {new CameraFrame, ros::Time, img_scalar_t, float_t> - (image_msg_ptr, this->_cam_model_ptr, rectified, 1.0)}; - - // Add shared pointer to CameraFrame object to the circular buffer - _addFrame(new_frame_ptr); - - // Update the bounding timestamps for the frame sequence - this->_start_time = image_msg_ptr->header.stamp; - this->_end_time = this->_frame_ptr_circular_buffer.back()->stamp(); -} - -template -ROSCameraStream::~ROSCameraStream() -{ - img_sub.unsubscribe(); - _cb_queue.clear(); - _cb_queue.disable(); -} - -} // namespace nav \ No newline at end of file From 37be6e5d00948334f75a975005c6830d612e94ba Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 4 Dec 2016 06:01:46 -0500 Subject: [PATCH 175/267] PERCEPTION: add UnderwaterShapeDetector + Shape class --- .../underwater_shape_identification.hpp | 62 +++++++++++++++++++ .../underwater_shape_identification.cpp | 4 ++ 2 files changed, 66 insertions(+) create mode 100644 perception/navigator_vision/include/missions/underwater_shape_identification.hpp create mode 100644 perception/navigator_vision/src/missions/underwater_shape_identification.cpp diff --git a/perception/navigator_vision/include/missions/underwater_shape_identification.hpp b/perception/navigator_vision/include/missions/underwater_shape_identification.hpp new file mode 100644 index 00000000..d300d6e0 --- /dev/null +++ b/perception/navigator_vision/include/missions/underwater_shape_identification.hpp @@ -0,0 +1,62 @@ +#pragma once + +#include +#include +#include +#include +#include + +#include +#include + +#include + +#include +#include +#include + +class ShapeDetection; + +class Shape +{ + using DetectionConstPtr = typename std::unique_ptr; + std::string _name; + std::string _template_path; + float _pixels_per_meter; + float _radial_symmetry_angle; + cv::Mat _template; +public: + static void loadShapes(std::string directory); + void load(std::string path); + std::string name() const { return _name; } + std::string path() const { return _template_path; } + float scale() const { return _pixels_per_meter; } + float radialSymmetryAngle() const { return _radial_symmetry_angle; } + cv::Mat const * const image() const { return &_template; } + std::unique_ptr detect(bool try_rotation=false, int count=10) const; +}; + +struct ShapeDetection +{ + Shape* shape; + ros::Time time; + cv::Point2f location; + float depth; + float confidence; + std::shared_ptr image; + cv::RotatedRect roi; +}; + +class UnderwaterShapeDetector +{ + std::string _template_dir; + float _template_max_dimension; + std::map _shapes; + float _search_depth; + float _depth_uncertainty; + std::map _detection_thresholds; + std::map> _detections; + int _min_detections; +public: + UnderwaterShapeDetector(); +}; diff --git a/perception/navigator_vision/src/missions/underwater_shape_identification.cpp b/perception/navigator_vision/src/missions/underwater_shape_identification.cpp new file mode 100644 index 00000000..5f763a04 --- /dev/null +++ b/perception/navigator_vision/src/missions/underwater_shape_identification.cpp @@ -0,0 +1,4 @@ +#include + + + From ce0c19b53eb0904eb7b9cbbb6d89bbb53fcc223a Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 4 Dec 2016 11:23:59 -0500 Subject: [PATCH 176/267] UTILS: add PI constant to navigator_tools header --- .../config/underwater_shape_detection.yaml | 11 +++++++++++ .../circle.png | Bin .../{ => underwater_shape_identification}/cross.png | Bin .../triangle.png | Bin utils/navigator_tools/include/navigator_tools.hpp | 5 ++++- utils/navigator_tools/src/navigator_tools.cpp | 2 +- 6 files changed, 16 insertions(+), 2 deletions(-) create mode 100644 perception/navigator_vision/config/underwater_shape_detection.yaml rename perception/navigator_vision/templates/{ => underwater_shape_identification}/circle.png (100%) rename perception/navigator_vision/templates/{ => underwater_shape_identification}/cross.png (100%) rename perception/navigator_vision/templates/{ => underwater_shape_identification}/triangle.png (100%) diff --git a/perception/navigator_vision/config/underwater_shape_detection.yaml b/perception/navigator_vision/config/underwater_shape_detection.yaml new file mode 100644 index 00000000..bcd51932 --- /dev/null +++ b/perception/navigator_vision/config/underwater_shape_detection.yaml @@ -0,0 +1,11 @@ +# Underwater Shape Identification Challenge Parameters + +template_directory : "templates/underwater_shape_identification/" +depth : 5 +depth_uncertainty : 1 +min_detections : 10 + +camera_topic : "/down/image_raw" +buffer_size : 5 + +# calibration_images : "calib/underwater_shape_identification/" diff --git a/perception/navigator_vision/templates/circle.png b/perception/navigator_vision/templates/underwater_shape_identification/circle.png similarity index 100% rename from perception/navigator_vision/templates/circle.png rename to perception/navigator_vision/templates/underwater_shape_identification/circle.png diff --git a/perception/navigator_vision/templates/cross.png b/perception/navigator_vision/templates/underwater_shape_identification/cross.png similarity index 100% rename from perception/navigator_vision/templates/cross.png rename to perception/navigator_vision/templates/underwater_shape_identification/cross.png diff --git a/perception/navigator_vision/templates/triangle.png b/perception/navigator_vision/templates/underwater_shape_identification/triangle.png similarity index 100% rename from perception/navigator_vision/templates/triangle.png rename to perception/navigator_vision/templates/underwater_shape_identification/triangle.png diff --git a/utils/navigator_tools/include/navigator_tools.hpp b/utils/navigator_tools/include/navigator_tools.hpp index 755aae9c..34d23a7c 100644 --- a/utils/navigator_tools/include/navigator_tools.hpp +++ b/utils/navigator_tools/include/navigator_tools.hpp @@ -5,6 +5,9 @@ #include namespace nav{ + +static const double PI = 3.1415926535897932; + namespace tools{ // Returns a vector of topic names (std::string) that end with "image_rect_color" @@ -18,4 +21,4 @@ inline std::string operator "" _s(const char* str, size_t /*length*/) } } // namespace nav::tools -} // namespace nav \ No newline at end of file +} // namespace nav diff --git a/utils/navigator_tools/src/navigator_tools.cpp b/utils/navigator_tools/src/navigator_tools.cpp index 50eab951..ee932fcf 100644 --- a/utils/navigator_tools/src/navigator_tools.cpp +++ b/utils/navigator_tools/src/navigator_tools.cpp @@ -40,4 +40,4 @@ vector getRectifiedImageTopics(bool color) } } // namespace nav::tools -} // namespace nav \ No newline at end of file +} // namespace nav From eb15b05c91af8818a4884d0dee66830691739b44 Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 4 Dec 2016 11:32:34 -0500 Subject: [PATCH 177/267] PERCEPTION: add config of underwater shape id via yaml --- .../config/underwater_shape_detection.yaml | 8 +- .../underwater_shape_identification.hpp | 31 +++++-- ...underwater_shape_identification_vision.cpp | 23 ++--- .../underwater_shape_identification.cpp | 90 +++++++++++++++++++ 4 files changed, 128 insertions(+), 24 deletions(-) diff --git a/perception/navigator_vision/config/underwater_shape_detection.yaml b/perception/navigator_vision/config/underwater_shape_detection.yaml index bcd51932..570ffd03 100644 --- a/perception/navigator_vision/config/underwater_shape_detection.yaml +++ b/perception/navigator_vision/config/underwater_shape_detection.yaml @@ -1,9 +1,13 @@ # Underwater Shape Identification Challenge Parameters template_directory : "templates/underwater_shape_identification/" -depth : 5 -depth_uncertainty : 1 +depth : 5.0 +depth_uncertainty : 1.0 +shape_area : 1.0 min_detections : 10 +min_boat_displacement : 0.1 +is_target : {"circle":true, "cross":true, "triangle":true} +detection_thresholds : {"circle":0.5, "cross":0.5, "triangle":0.5} camera_topic : "/down/image_raw" buffer_size : 5 diff --git a/perception/navigator_vision/include/missions/underwater_shape_identification.hpp b/perception/navigator_vision/include/missions/underwater_shape_identification.hpp index d300d6e0..4fe58d20 100644 --- a/perception/navigator_vision/include/missions/underwater_shape_identification.hpp +++ b/perception/navigator_vision/include/missions/underwater_shape_identification.hpp @@ -6,34 +6,42 @@ #include #include +#include + #include +#include #include #include +#include #include #include #include +namespace nav +{ + class ShapeDetection; class Shape { - using DetectionConstPtr = typename std::unique_ptr; std::string _name; std::string _template_path; float _pixels_per_meter; float _radial_symmetry_angle; cv::Mat _template; + bool _ok = false; public: - static void loadShapes(std::string directory); - void load(std::string path); + static std::vector loadShapes(std::string directory); + void load(std::string path, float shape_area); std::string name() const { return _name; } std::string path() const { return _template_path; } float scale() const { return _pixels_per_meter; } float radialSymmetryAngle() const { return _radial_symmetry_angle; } cv::Mat const * const image() const { return &_template; } - std::unique_ptr detect(bool try_rotation=false, int count=10) const; + std::unique_ptr detect(float threshold, bool try_rotation=false, int count=10) const; + bool ok() const { return _ok; } }; struct ShapeDetection @@ -49,14 +57,23 @@ struct ShapeDetection class UnderwaterShapeDetector { + ros::NodeHandle _nh; + nav::ROSCameraStream _camera; + std::string _ns; std::string _template_dir; - float _template_max_dimension; - std::map _shapes; + float _shape_area; float _search_depth; float _depth_uncertainty; + std::map _shapes; + std::map _is_target; std::map _detection_thresholds; std::map> _detections; int _min_detections; + float _min_boat_displacement; public: - UnderwaterShapeDetector(); + UnderwaterShapeDetector(ros::NodeHandle &nh, int img_buf_size, std::string name_space); + bool searchFor(std::vector target_shapes); + void calibrateThresholds(); }; + +} // namespace nav diff --git a/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp b/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp index 39dd9b4e..e3d1bd40 100644 --- a/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp +++ b/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp @@ -6,19 +6,25 @@ #include #include +#include + using namespace std; int main(int argc, char** argv) { cout << "\033[1;31mUnderwater Shape Identification Vision\033[0m" << endl; - int rotations = stoi(argv[2]); // set up image acquisition string cam_topic {"down/image_raw"}; size_t history_length{200}; - ros::init(argc, argv, "underwater_shape_identification_vision"); + string challenge_name = "underwater_shape_identification"; + ros::init(argc, argv, challenge_name + "_vision"); ros::NodeHandle nh; + int img_buffer_size = 0; + string name_space{challenge_name + "/"}; + nh.param(name_space + "buffer_size", img_buffer_size, 5); + nav::UnderwaterShapeDetector underwater_shape_detector(nh, img_buffer_size, name_space); // cv::Vec3b to get color imgs nav::ROSCameraStream left_cam_stream(nh, history_length); if(!left_cam_stream.init(cam_topic)) @@ -41,19 +47,6 @@ int main(int argc, char** argv) break; } - // Test kernel rotation and averaging - auto file_name = "/home/santiago/mil_ws/src/Navigator/perception/navigator_vision/templates/" + string(argv[1]) + ".png"; - cv::Mat kernel = cv::imread(file_name, CV_LOAD_IMAGE_GRAYSCALE); - if(!kernel.data) - { - cout << "Could not load image from " << file_name << endl; - return -1; - } - - cv::Mat rot_invar_kernel; - makeRotInvariant(kernel, rotations).convertTo(rot_invar_kernel, CV_8UC1); - cv::imshow("rot invariant kernel", rot_invar_kernel); - cv::waitKey(0); return 0; } diff --git a/perception/navigator_vision/src/missions/underwater_shape_identification.cpp b/perception/navigator_vision/src/missions/underwater_shape_identification.cpp index 5f763a04..d1e1bacd 100644 --- a/perception/navigator_vision/src/missions/underwater_shape_identification.cpp +++ b/perception/navigator_vision/src/missions/underwater_shape_identification.cpp @@ -1,4 +1,94 @@ #include +namespace nav { +using namespace std; +using namespace cv; + +namespace fs = boost::filesystem; + +vector Shape::loadShapes(string directory) +{ + fs::directory_iterator dir_end; // default ctor --> past-the-end + fs::path dir{directory}; + vector img_filenames; + if(fs::is_directory(dir)){ + + // iterate through all files in directory + for(fs:: directory_iterator dir_itr(dir); dir_itr != dir_end; ++dir_itr){ + // collect all files that match img extension + string path_str {dir_itr->path().filename().string()}; + if(path_str.compare(path_str.size() - 3, 3, "png") == 0){ + img_filenames.push_back(dir_itr->path().string()); + } + } + } + else { + cerr << "Error: " << dir << " is not a valid directory." << endl; + } + + for(auto s : img_filenames) + cout << s << endl; + return vector(); +} + +//void Shape::load(string path, float shape_area) +//{ +// _ok = true; +// fs::path template_path{path}; +// if(!fs::exists(template_path)) +// { +// cout << __PRETTY_FUNCTION__ << ": the file " << path << "doesn't exist." << endl; +// _ok = false; +// return; +// } +// else +// { +// _name = template_path.stem().string(); +// _template_path = path; +// _template = cv::imread(path, CV_LOAD_IMAGE_GRAYSCALE); +// if(!_template.data) +// { +// cout << __PRETTY_FUNCTION__ << ": the image at " << path << " could not be loaded." << endl; +// _ok = false; +// return; +// } +// else +// { +// float template_area = _template.rows * _template.cols; +// if(shape_area == 0.0) +// { +// cout << __PRETTY_FUNCTION__ << ": shape_area can't be zero." << endl; +// _ok = false; +// return; +// } +// _pixels_per_meter = sqrt(template_area / shape_area); +// } +// } +// +// // Find angle of radial symmetry +// +//} + +UnderwaterShapeDetector::UnderwaterShapeDetector(ros::NodeHandle &nh, int img_buf_size, string name_space) +: _nh(nh), _camera(_nh, img_buf_size), _ns(name_space) +{ + string camera_topic; + _nh.param(_ns + "template_directory", _template_dir, "templates/underwater_shape_identification/"); + _nh.param(_ns + "shape_area", _shape_area, 1.0f); + _nh.param(_ns + "camera_topic", camera_topic, "down/image_rect"); + _nh.param(_ns + "search_depth", _search_depth, 5.0f); + _nh.param(_ns + "depth_uncertainty", _depth_uncertainty, 1.0f); + _nh.param(_ns + "min_boat_displacement", _min_boat_displacement, 0.1); + _nh.param(_ns + "min_detections", _min_detections, 10); + _nh.param>(_ns + "is_target", _is_target, + {{"circle", true}, {"cross", true}, {"triangle", true}}); + _nh.param>(_ns + "detection_thresholds", _detection_thresholds, + {{"circle", 0.5}, {"cross", 0.5}, {"triangle", 0.5}}); + + string pkg_path = ros::package::getPath("navigator_vision"); + auto shapes = Shape::loadShapes(pkg_path + "/" + _template_dir); +} + +} // namespace nav From 430427ed9988d61a7aefc03c9f3c318f6866d4d4 Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 4 Dec 2016 11:35:07 -0500 Subject: [PATCH 178/267] PERCEPTION: make rotateKernel separate and configurable --- .../navigator_vision_lib/image_filtering.hpp | 17 +++++++++ .../navigator_vision_lib/image_filtering.cpp | 38 ++++++++++++++----- 2 files changed, 46 insertions(+), 9 deletions(-) diff --git a/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp b/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp index 6e5964c7..89c4f7ce 100644 --- a/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp +++ b/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp @@ -5,6 +5,21 @@ #include #include +#include + +namespace nav +{ + +/* + Returns a version of the input kernel rotated about its center point. + kernel - original kernel + theta - anlgle in radians by which to rotate the kernel. Positive angles --> counterclockwise. + deg - OPTIONAL. Will assume that theta is given in degrees if set to true. + no_expand - OPTIONAL. Will leave the output size the same as the input size. Parts of the + original kernel may fall outside the output canvas after the rotation. +*/ +cv::Mat rotateKernel(const cv::Mat &kernel, float theta, bool deg=false, bool no_expand=false); + /* Creates a new kernel that is rotationally invariant by averaging rotated instances of the original. @@ -14,3 +29,5 @@ angularly. */ cv::Mat makeRotInvariant(const cv::Mat &kernel, int rotations=8); + +} // namespace nav diff --git a/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp b/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp index 6a981cbb..031ec2ab 100644 --- a/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp +++ b/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp @@ -1,9 +1,22 @@ #include -cv::Mat makeRotInvariant(const cv::Mat &kernel, int rotations) +namespace nav { - // Determine affine transform to move from top left to center of output size + +cv::Mat rotateKernel(const cv::Mat &kernel, float theta, bool deg, bool no_expand) +{ + theta = deg? theta : theta * nav::PI / 180.0f; cv::Point2f c_org{kernel.cols * 0.5f, kernel.rows * 0.5f}; // center of original + + if(no_expand) // rotates without expanding the canvas + { + cv::Mat result; + cv::Mat rot_mat = cv::getRotationMatrix2D(c_org, theta , 1.0f); + cv::warpAffine(kernel, result, rot_mat, kernel.size()); + return result; + } + + // Determine affine transform to move from top left to center of output size float hypot = std::hypot(c_org.x, c_org.y); cv::Point2f c_dest{hypot, hypot}; float center_dest_coeffs[6] = {1.0f, 0.0f, c_dest.x - c_org.x, 0.0f, 1.0f, c_dest.y - c_org.y}; @@ -15,6 +28,15 @@ cv::Mat makeRotInvariant(const cv::Mat &kernel, int rotations) kernel.copyTo(dest_top_left); cv::warpAffine(dest, dest, center_dest, dest.size()); // center dest is ready for rotating + // Rotate kernel about center point + cv::Mat rot_mat = cv::getRotationMatrix2D(c_dest, theta , 1.0); + cv::warpAffine(dest, dest, rot_mat, dest.size()); + + return dest; +} + +cv::Mat makeRotInvariant(const cv::Mat &kernel, int rotations) +{ // Get angles of rotation std::vector rotation_angles; float delta_theta = 360.0f / rotations; @@ -30,15 +52,11 @@ cv::Mat makeRotInvariant(const cv::Mat &kernel, int rotations) // Incrementally rotate and save versions of original kernel std::vector kernel_rotations; + cv::Mat dest = rotateKernel(kernel, 0.0f); kernel_rotations.push_back(dest); - int i = 0; + cv::Point2f c_dest{dest.cols * 0.5f, dest.rows * 0.5f}; for(auto theta : rotation_angles) - { - cv::Mat rotated_kernel; - cv::Mat rot_mat = cv::getRotationMatrix2D(c_dest, theta, 1.0); - cv::warpAffine(dest, rotated_kernel, rot_mat, dest.size()); - kernel_rotations.push_back(rotated_kernel); - } + kernel_rotations.push_back(rotateKernel(dest, theta, true, true)); // Average all rotated versions cv::Mat sum = cv::Mat::zeros(dest.size(), CV_32S); @@ -48,3 +66,5 @@ cv::Mat makeRotInvariant(const cv::Mat &kernel, int rotations) result.convertTo(result, kernel.type()); return result; } + +} // namespace nav From 44f8d819091113d6e9ea62d346fb09b89573ffb4 Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 4 Dec 2016 12:14:43 -0500 Subject: [PATCH 179/267] PERCEPTION: add function to find angle of radial symmetry --- .../navigator_vision_lib/image_filtering.hpp | 9 +++++ .../navigator_vision_lib/image_filtering.cpp | 37 +++++++++++++++++++ 2 files changed, 46 insertions(+) diff --git a/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp b/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp index 89c4f7ce..44ea8e7d 100644 --- a/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp +++ b/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp @@ -30,4 +30,13 @@ cv::Mat rotateKernel(const cv::Mat &kernel, float theta, bool deg=false, bool no */ cv::Mat makeRotInvariant(const cv::Mat &kernel, int rotations=8); +/* + Returns the minimum theta for which a version of the kernel that has been rotated + by theta radians will be approximately identical to the original. + kernel - input kernel + ang_res - OPTIONAL. The result will be a multiple of this angle + deg - OPTIONAL. The output will be in degrees instead of radias if set to true +*/ +float getRadialSymmetryAngle(const cv::Mat &kernel, float ang_res=0.1, bool deg=false); + } // namespace nav diff --git a/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp b/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp index 031ec2ab..e033a4ef 100644 --- a/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp +++ b/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp @@ -67,4 +67,41 @@ cv::Mat makeRotInvariant(const cv::Mat &kernel, int rotations) return result; } +float getRadialSymmetryAngle(const cv::Mat &kernel, float ang_res, bool deg) +{ + auto original = rotateKernel(kernel, 0.0f); + cv::Mat elem_wise_mult{original.size(), CV_32S}; + cv::multiply(original, original, elem_wise_mult); + auto standard = cv::sum(elem_wise_mult)[0]; + float max = deg? 360.0f : 2 * nav::PI; + float result = max; + float best_score = 0; + + for(float theta = 0.0f; theta < max; theta += (deg? ang_res * 180.0f / nav::PI : ang_res)) + { + cv::multiply(original, rotateKernel(original, theta, deg, true), elem_wise_mult); + double score = standard / cv::sum(elem_wise_mult)[0]; + if(score >= 0.975) + { + if(result == max) // First good candidate + { + result = theta; + best_score = score; + } + else if(score > best_score) // Improved candidate + { + result = theta; + best_score = score; + } + else // Decreased candidate above 0.975 + break; + } + else // Decreased candidate below 0.975 + if(result != max) // Viable candidate already found + break; + } + + return result; +} + } // namespace nav From 2c5613ab9b68eb6f23ff51b449645ca85c065d5b Mon Sep 17 00:00:00 2001 From: David Soto Date: Mon, 5 Dec 2016 19:58:33 -0500 Subject: [PATCH 180/267] PERCEPTION: load templates from directory --- perception/navigator_vision/CMakeLists.txt | 2 +- .../underwater_shape_identification.hpp | 3 +- .../underwater_shape_identification.cpp | 98 +++++++++++-------- .../navigator_vision_lib/image_filtering.cpp | 5 + 4 files changed, 64 insertions(+), 44 deletions(-) diff --git a/perception/navigator_vision/CMakeLists.txt b/perception/navigator_vision/CMakeLists.txt index 4d64e9d1..9eb3b277 100644 --- a/perception/navigator_vision/CMakeLists.txt +++ b/perception/navigator_vision/CMakeLists.txt @@ -28,7 +28,7 @@ find_package(catkin ) find_package(PCL 1.7 REQUIRED) -find_package(Boost REQUIRED date_time) +find_package(Boost REQUIRED date_time filesystem) catkin_python_setup() diff --git a/perception/navigator_vision/include/missions/underwater_shape_identification.hpp b/perception/navigator_vision/include/missions/underwater_shape_identification.hpp index 4fe58d20..b288fa17 100644 --- a/perception/navigator_vision/include/missions/underwater_shape_identification.hpp +++ b/perception/navigator_vision/include/missions/underwater_shape_identification.hpp @@ -5,6 +5,7 @@ #include #include #include +#include #include @@ -33,7 +34,7 @@ class Shape cv::Mat _template; bool _ok = false; public: - static std::vector loadShapes(std::string directory); + static std::vector loadShapes(std::string directory, float shape_area); void load(std::string path, float shape_area); std::string name() const { return _name; } std::string path() const { return _template_path; } diff --git a/perception/navigator_vision/src/missions/underwater_shape_identification.cpp b/perception/navigator_vision/src/missions/underwater_shape_identification.cpp index d1e1bacd..f7a1a0b9 100644 --- a/perception/navigator_vision/src/missions/underwater_shape_identification.cpp +++ b/perception/navigator_vision/src/missions/underwater_shape_identification.cpp @@ -7,7 +7,7 @@ using namespace cv; namespace fs = boost::filesystem; -vector Shape::loadShapes(string directory) +vector Shape::loadShapes(string directory, float shape_area) { fs::directory_iterator dir_end; // default ctor --> past-the-end fs::path dir{directory}; @@ -27,48 +27,51 @@ vector Shape::loadShapes(string directory) cerr << "Error: " << dir << " is not a valid directory." << endl; } - for(auto s : img_filenames) - cout << s << endl; - return vector(); + vector shapes; + for(auto path : img_filenames) + { + shapes.push_back(Shape()); + shapes.back().load(path, shape_area); + if(!shapes.back().ok()) + shapes.pop_back(); + } + + return shapes; } -//void Shape::load(string path, float shape_area) -//{ -// _ok = true; -// fs::path template_path{path}; -// if(!fs::exists(template_path)) -// { -// cout << __PRETTY_FUNCTION__ << ": the file " << path << "doesn't exist." << endl; -// _ok = false; -// return; -// } -// else -// { -// _name = template_path.stem().string(); -// _template_path = path; -// _template = cv::imread(path, CV_LOAD_IMAGE_GRAYSCALE); -// if(!_template.data) -// { -// cout << __PRETTY_FUNCTION__ << ": the image at " << path << " could not be loaded." << endl; -// _ok = false; -// return; -// } -// else -// { -// float template_area = _template.rows * _template.cols; -// if(shape_area == 0.0) -// { -// cout << __PRETTY_FUNCTION__ << ": shape_area can't be zero." << endl; -// _ok = false; -// return; -// } -// _pixels_per_meter = sqrt(template_area / shape_area); -// } -// } -// -// // Find angle of radial symmetry -// -//} +void Shape::load(string path, float shape_area) +{ + _ok = true; + fs::path template_path{path}; + if(!fs::exists(template_path)) + { + cout << __PRETTY_FUNCTION__ << ": the file " << path << "doesn't exist." << endl; + _ok = false; + return; + } + + _name = template_path.stem().string(); + _template_path = path; + _template = cv::imread(path, CV_LOAD_IMAGE_GRAYSCALE); + if(!_template.data) + { + cout << __PRETTY_FUNCTION__ << ": the image at " << path << " could not be loaded." << endl; + _ok = false; + return; + } + + float template_area = _template.rows * _template.cols; + if(shape_area == 0.0f) + { + cout << __PRETTY_FUNCTION__ << ": shape_area can't be zero." << endl; + _ok = false; + return; + } + _pixels_per_meter = sqrt(template_area / shape_area); + + // Find angle of radial symmetry + _radial_symmetry_angle = getRadialSymmetryAngle(_template, 0.01); +} UnderwaterShapeDetector::UnderwaterShapeDetector(ros::NodeHandle &nh, int img_buf_size, string name_space) : _nh(nh), _camera(_nh, img_buf_size), _ns(name_space) @@ -87,7 +90,18 @@ UnderwaterShapeDetector::UnderwaterShapeDetector(ros::NodeHandle &nh, int img_bu {{"circle", 0.5}, {"cross", 0.5}, {"triangle", 0.5}}); string pkg_path = ros::package::getPath("navigator_vision"); - auto shapes = Shape::loadShapes(pkg_path + "/" + _template_dir); + auto shapes = Shape::loadShapes(pkg_path + "/" + _template_dir, _shape_area); + + // Eliminate shapes without thresholds or not targets + for(auto it = shapes.end() - 1; it >= shapes.begin(); it--) + { + if(_detection_thresholds.count(it->name()) < 1 || !_is_target.at(it->name())) + shapes.erase(it); + } + + // Add remaining shapes to map + for(auto& sh : shapes) + _shapes[sh.name()] = sh; } } // namespace nav diff --git a/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp b/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp index e033a4ef..ddae5398 100644 --- a/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp +++ b/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp @@ -76,11 +76,16 @@ float getRadialSymmetryAngle(const cv::Mat &kernel, float ang_res, bool deg) float max = deg? 360.0f : 2 * nav::PI; float result = max; float best_score = 0; + bool left_starting_region = false; for(float theta = 0.0f; theta < max; theta += (deg? ang_res * 180.0f / nav::PI : ang_res)) { cv::multiply(original, rotateKernel(original, theta, deg, true), elem_wise_mult); double score = standard / cv::sum(elem_wise_mult)[0]; + if(score < 0.9) + left_starting_region = true; + if(!left_starting_region) + continue; if(score >= 0.975) { if(result == max) // First good candidate From 8373f6dedef89a280f018693a16afef98859feb0 Mon Sep 17 00:00:00 2001 From: David Soto Date: Mon, 5 Dec 2016 21:34:28 -0500 Subject: [PATCH 181/267] PR172: address comments --- ...underwater_shape_identification_vision.cpp | 28 ++----------------- 1 file changed, 2 insertions(+), 26 deletions(-) diff --git a/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp b/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp index e3d1bd40..d64054fc 100644 --- a/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp +++ b/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp @@ -14,38 +14,14 @@ int main(int argc, char** argv) { cout << "\033[1;31mUnderwater Shape Identification Vision\033[0m" << endl; - // set up image acquisition - string cam_topic {"down/image_raw"}; - size_t history_length{200}; + ros::NodeHandle nh; string challenge_name = "underwater_shape_identification"; ros::init(argc, argv, challenge_name + "_vision"); - ros::NodeHandle nh; + string name_space{challenge_name + "/"}; int img_buffer_size = 0; - string name_space{challenge_name + "/"}; nh.param(name_space + "buffer_size", img_buffer_size, 5); nav::UnderwaterShapeDetector underwater_shape_detector(nh, img_buffer_size, name_space); - // cv::Vec3b to get color imgs - nav::ROSCameraStream left_cam_stream(nh, history_length); - if(!left_cam_stream.init(cam_topic)) - return -1; - - // Display most recent frame in the buffer - while(left_cam_stream.ok()) - { - size_t size = left_cam_stream.size(); - if(size > 0) - { - cv::Mat latest = left_cam_stream[0]->image(); - cv::Mat _latest = latest; - // cv::cvtColor(latest, _latest, CV_BGR2RGB); - cv::imshow("newest_frame", _latest); - } - char code = cv::waitKey(100); - cout << "ESC = " << int('\033') << " waitKey --> " << code << endl; - if(code != -1) - break; - } return 0; } From 6ed6e54e6ead11e1fb07843a21b79211136decec Mon Sep 17 00:00:00 2001 From: Tess Date: Fri, 2 Dec 2016 05:11:06 -0500 Subject: [PATCH 182/267] PERCEPTION: looking at images for object detection --- .../navigator_vision/nodes/model_detector.py | 314 ------------------ .../nodes/object_classifier.py | 41 +++ .../navigator_vision/nodes/pickle_maker.py | 104 ++++++ .../nodes/scan_the_code_lib/__init__.py | 0 .../nodes/scan_the_code_lib/model.py | 100 ------ .../nodes/scan_the_code_lib/model_tracker.py | 121 ------- .../object_classification/HOG_descriptor.py | 10 + .../object_classification/SVM_classifier.py | 18 + .../object_classification/__init__.py | 3 + .../object_classification/lidar_to_image.py | 213 ++++++++++++ perception/navigator_vision/setup.py | 2 +- .../navigator_tools/__init__.py | 6 +- .../navigator_tools/bag_crawler.py | 69 ++++ .../navigator_tools/navigator_tools/debug.py | 108 ++++++ 14 files changed, 572 insertions(+), 537 deletions(-) delete mode 100755 perception/navigator_vision/nodes/model_detector.py create mode 100755 perception/navigator_vision/nodes/object_classifier.py create mode 100755 perception/navigator_vision/nodes/pickle_maker.py delete mode 100644 perception/navigator_vision/nodes/scan_the_code_lib/__init__.py delete mode 100644 perception/navigator_vision/nodes/scan_the_code_lib/model.py delete mode 100644 perception/navigator_vision/nodes/scan_the_code_lib/model_tracker.py create mode 100644 perception/navigator_vision/object_classification/HOG_descriptor.py create mode 100644 perception/navigator_vision/object_classification/SVM_classifier.py create mode 100644 perception/navigator_vision/object_classification/__init__.py create mode 100644 perception/navigator_vision/object_classification/lidar_to_image.py create mode 100644 utils/navigator_tools/navigator_tools/bag_crawler.py create mode 100644 utils/navigator_tools/navigator_tools/debug.py diff --git a/perception/navigator_vision/nodes/model_detector.py b/perception/navigator_vision/nodes/model_detector.py deleted file mode 100755 index d5b1b838..00000000 --- a/perception/navigator_vision/nodes/model_detector.py +++ /dev/null @@ -1,314 +0,0 @@ -#!/usr/bin/env python -import roslib -import sys -import rospy -import cv2 -from sensor_msgs.msg import Image -from cv_bridge import CvBridge, CvBridgeError -import numpy as np -from scan_the_code_lib.model_tracker import ModelTracker -from navigator_msgs.srv import ScanTheCodeMission, ScanTheCodeMissionResponse - - -class image_converter: - - def __init__(self): - - self.bridge = CvBridge() - self.image_sub = rospy.Subscriber("/stereo/left/image_raw", Image, self.callback, queue_size=10) - self.fgbg = cv2.BackgroundSubtractorMOG() - self.model_tracker = ModelTracker() - self.mission_complete = False - self.colors = [] - self.on = False - - def get_foreground_mask(self, cv_image): - fgmask = self.fgbg.apply(cv_image) - - # DEBUG - cv2.imshow('mask', fgmask) - cv2.waitKey(33) - - return fgmask - - def get_contours(self, mask): - contours, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) - # moments = [] - # for i, val in enumerate(contours): - # M = cv2.moments(val) - # if M["m00"] != 0: - # cX = int(M["m10"] / M["m00"]) - # cY = int(M["m01"] / M["m00"]) - # else: - # cX, cY = 0, 0 - # moments.append([cX,cY]) - - return contours, hierarchy - - def get_best_contours(self, contours): - # Calculate the area of each contour - areas = [] - for i, val in enumerate(contours): - a = cv2.contourArea(val, 0) - areas.append([i, a]) - - # Choose the six biggest - areas = sorted(areas, key=lambda x: x[1], reverse=True) - areas = areas[:10] - - # Get the moments that correspond to the biggest areas - # my_moments = [[x[0], moments[x[0]]] for x in areas] - - # # calculate the average of these moment locations - # avgx = sum(i[1][0] for i in my_moments)/len(my_moments) - # avgy = sum(i[1][1] for i in my_moments)/len(my_moments) - # maxs = [] - - # #print avgx, avgy - # for i, m in enumerate(my_moments): - # # Get the euclidian distance of the moment to the average - # xd = m[1][0] - avgx - # yd = m[1][1] - avgy - # maxs.append([m[0],np.linalg.norm([xd,yd])]) - - # # Take the 4 closest to the average - # maxs = sorted(maxs, key=lambda x: x[1]) - # maxs = maxs[:4] - - # Get the contours that correspond to those moments - - my_contours = [contours[x[0]] for x in areas] - - # print len(my_contours) - - return my_contours - - def get_bounding_rect(self, contours): - xmin = 1000 - xmax = 0 - ymin = 1000 - ymax = 0 - for i, cont in enumerate(contours): - for j, _val in enumerate(cont): - if(_val[0][0] < xmin): - xmin = _val[0][0] - if(_val[0][0] > xmax): - xmax = _val[0][0] - if(_val[0][1] < ymin): - ymin = _val[0][1] - if(_val[0][1] > ymax): - ymax = _val[0][1] - # print xmin, ymin, xmax, ymax - - return xmin, ymin, xmax, ymax - - def get_non_furthest_points(self, i, points): - dist_points = [] - for ind, val in enumerate(points): - if(ind != i): - diff = np.subtract(points[i], val) - dist_points.append([np.linalg.norm(diff), points[ind]]) - - dist_points = sorted(dist_points, key=lambda x: x[0]) - - return dist_points[0][1], dist_points[1][1] - - def geometry_test(self, fp): - if(len(fp) < 4): - return False - for i, p in enumerate(fp): - p1, p2 = self.get_non_furthest_points(i, fp) - diff1 = np.subtract(p1, p)[0] - diff2 = np.subtract(p2, p)[0] - diff1 /= np.linalg.norm(diff1) - diff2 /= np.linalg.norm(diff2) - - val = abs(np.dot(diff1, diff2)) - if(val > .05): - return False - - return True - - def get_salient_points(self, img, rect): - if(rect[1] == rect[3] or rect[0] == rect[2]): - return - roi = img[rect[1]:rect[3], rect[0]:rect[2]] - gray = cv2.cvtColor(roi, cv2.COLOR_BGR2GRAY) - fp = cv2.goodFeaturesToTrack(gray, 35, .05, 10.0) # [, corners[, mask[, blockSize[, useHarrisDetector[, k]]]]] - if(fp is None): - return - # DEBUG - img_clone = gray.copy() - for i, val in enumerate(fp): - cv2.circle(img_clone, tuple(val[0]), 3, (255, 255, 255), -1) - - cv2.imshow("salient_points", img_clone) - cv2.waitKey(33) - - def get_rectangle(self, img, img_area, xmin_a, ymin_a): - gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) - bf = cv2.bilateralFilter(gray, 11, 13, 13) - edged = cv2.Canny(gray, 30, 200) - drawimg = edged.copy() - largest = edged.copy() - blah = edged.copy() - contours, hierarchy = cv2.findContours(blah, 1, 2) - - drawimg_p = gray.copy() - - h, w, c = img.shape - - max_i = -1 - max_ar = 0 - for i, cnt in enumerate(contours): - approx = cv2.approxPolyDP(cnt, 0.01 * cv2.arcLength(cnt, True), True) - if(cv2.contourArea(cnt, 0) > 1): - # if(len(approx) >= 3 and len(approx) <= 5 or len(approx) == 9 or len(approx) == 15): - cv2.drawContours(blah, [cnt], 0, 255, -1) - ar = cv2.contourArea(cnt, 0) - # print len(approx), "len" - # print ar, img_area - cv2.imshow("rectangles", blah) - cv2.waitKey(0) - - if(ar > max_ar): - max_ar = ar - max_i = i - - # print max_ar - cv2.drawContours(largest, contours, max_i, (255, 0, 0), -1, 8, hierarchy) - cv2.imshow("largest", largest) - cv2.waitKey(33) - - if(max_ar > 100): - xmin, ymin, xmax, ymax = self.get_bounding_rect([contours[max_i]]) - smear = 5 - xmin -= smear - ymin -= smear - xmax += smear - ymax += smear - if(xmin < 0): - xmin = 0 - if(ymin < 0): - ymin = 0 - if(xmax > w): - xmax = w - if(ymax > h): - ymax = h - fp = cv2.goodFeaturesToTrack(gray[ymin:ymax, xmin:xmax], 4, .05, 10.0) - drawimgpp = gray[ymin:ymax, xmin:xmax].copy() - for i, val in enumerate(fp): - cv2.circle(drawimgpp, tuple(val[0]), 3, (255, 255, 255), -1) - cv2.imshow("salient_points", drawimgpp) - cv2.waitKey(33) - - keep = self.geometry_test(fp) - - if(keep): - for v in fp: - v = v[0] - cv2.waitKey(33) - - v[0] += xmin_a + xmin - - v[1] += ymin_a + ymin - return fp - - return None - - def callback(self, data): - if(not self.on): - return - try: - cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8") - except CvBridgeError as e: - print(e) - - if(self.mission_complete): - return - - # print cv_image.shape - - img_clone = cv_image.copy() - img_clone1 = cv_image.copy() - gray = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY) - - # Get the background mask - mask = self.get_foreground_mask(cv_image) - - # Get the contours of the image - contours, hierarchy = self.get_contours(mask) - - if(len(contours) == 0): - return - - # Pick the good contours from this list (biggest area) - best_contours = self.get_best_contours(contours) - - # Get the bounding box to those contours - xmin, ymin, xmax, ymax = self.get_bounding_rect(best_contours) - # print xmin, ymin, xmax, ymax - - # DEBUG - cv2.rectangle(img_clone, (xmin, ymin), (xmax, ymax), (0, 255, 0), 3) - cv2.imshow("bounding_rect", img_clone) - cv2.waitKey(33) - - # Get rectangle shapes in image - - h, w, r = cv_image.shape - - self.mission_complete = self.model_tracker.track_models(cv_image) - - if(self.mission_complete): - print "MISSION COMPLETE" - - rect = self.get_rectangle(cv_image[ymin:ymax, xmin:xmax], w * h, xmin, ymin) - - if(rect is not None): - self.model_tracker.register_model(rect, cv_image) - - if(rect is not None): - for i, val in enumerate(rect): - cv2.circle(img_clone1, tuple(val[0]), 3, (255, 255, 255), -1) - cv2.imshow("model", img_clone1) - cv2.waitKey(33) - - def mission_status(self, req): - obs = False - colors = None - if(len(self.model_tracker.models) > 0): - obs = True - if(self.mission_complete): - colors = self.model_tracker.colors - found = self.mission_complete - return ScanTheCodeMissionResponse(obs, found, colors) - - def activate(self, req): - if(self.on): - del self.fgbg - del self.model_tracker - self.fgbg = cv2.BackgroundSubtractorMOG() - self.model_tracker = ModelTracker() - self.mission_complete = False - self.colors = [] - self.on = False - if(not self.on): - self.on = True - return ScanTheCodeMissionResponse(None, None, None) - - -def main(args): - ic = image_converter() - rospy.init_node('model_detector', anonymous=True) - status = rospy.Service('/vision/scan_the_code_status', ScanTheCodeMission, ic.mission_status) - activate = rospy.Service('/vision/scan_the_code_activate', ScanTheCodeMission, ic.activate) - - try: - rospy.spin() - except KeyboardInterrupt: - print("Shutting down") - cv2.destroyAllWindows() - -if __name__ == '__main__': - main(sys.argv) diff --git a/perception/navigator_vision/nodes/object_classifier.py b/perception/navigator_vision/nodes/object_classifier.py new file mode 100755 index 00000000..aa31fa3d --- /dev/null +++ b/perception/navigator_vision/nodes/object_classifier.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python +import txros +from txros import util +from twisted.internet import reactor, defer +from object_classification import LidarToImage, HOGDescriptor, SVMClassifier + + +class ObjectClassifier(object): + + def __init__(self, nh, descriptor, classifier): + self.nh = nh + self.descriptor = descriptor + # load up the trained svm via pickle + self.classifier = classifier + + @util.cancellableInlineCallbacks + def init_(self): + self.lidar_to_image = yield LidarToImage(self.nh, self.image_cb).init_() + defer.returnValue(self) + + + def image_cb(self, imgs): + for img in imgs: + objid = img["id"] + img = img["img"] + desc = self.descriptor.get_descriptor(img) + classification = self.classify(desc) + print "id", objid, classification + + +@util.cancellableInlineCallbacks +def main(): + nh = yield txros.NodeHandle.from_argv("object_classifier") + d = HOGDescriptor() + cl = SVMClassifier() + oc = yield ObjectClassifier(nh, d, cl).init_() + + +if __name__ == "__main__": + reactor.callWhenRunning(main) + reactor.run() diff --git a/perception/navigator_vision/nodes/pickle_maker.py b/perception/navigator_vision/nodes/pickle_maker.py new file mode 100755 index 00000000..38127251 --- /dev/null +++ b/perception/navigator_vision/nodes/pickle_maker.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +from object_classification import LidarToImage, HOGDescriptor, SVMClassifier +from txros import util, NodeHandle +import sys +import cv2 +import time +import numpy as np +import argparse +import pickle +from twisted.internet import reactor, defer, threads +from navigator_tools import Debug +import threading + + +class PickleMaker(object): + + def __init__(self, nh, descriptor, classifier, classes): + self.nh = nh + self.descriptor = descriptor + self.debug = Debug(self.nh) + # load up the trained svm via pickle + self.classifier = classifier + self.descs = [] + self.classify = [] + self.classifier.number += 1 + self.last_time = None + self.classes = classes + self.class_to_id = {} + self.id_to_class = {} + self.first_message = False + for i, clss in enumerate(classes): + self.class_to_id[clss] = i + self.id_to_class[i] = clss + + @util.cancellableInlineCallbacks + def init_(self): + self.lidar_to_image = yield LidarToImage(self.nh, training=True, classes=self.classes).init_() + defer.returnValue(self) + + @util.cancellableInlineCallbacks + def poll(self): + @util.cancellableInlineCallbacks + def _do(): + self.nh.sleep(.1) + img, objs = yield self.lidar_to_image.get_all_object_rois() + if img is None or objs is None or len(objs) == 0: + defer.returnValue(True) + draw = img.copy() + for o in objs: + xmin, ymin, xmax, ymax = o["bbox"] + cv2.rectangle(draw, (xmin, ymin), (xmax, ymax), (255, 0, 0)) + img_name = o["name"] + print img_name + roi = o["img"] + points = o["points"] + cv2.putText(draw, str(o["id"]), (xmin, ymin), 1, 1.0, (255, 255, 255)) + for p in points: + cv2.circle(draw, p, 3, (0, 0, 255), -1) + self.debug.add_image(roi, "roi", topic="roi") + self.last_time = self.nh.get_time() + desc = self.descriptor.get_descriptor(roi) + desc = desc.flatten() + print roi.shape, desc.shape + self.descs.append(desc) + self.classify.append(self.class_to_id[img_name]) + self.debug.add_image(draw, "box", topic="boxes") + + while True: + try: + d = _do() + yield util.wrap_timeout(d, 1) + except Exception: + break + + def done(self): + self.descs = np.array(self.descs) + self.classify = np.array(self.classify) + print self.descs.shape, self.classify.shape + self.classifier.train(self.descs, self.classify) + print np.max(self.classifier.clf.support_vectors_) + self.classifier.pickle("train.p") + reactor.stop() + + +@util.cancellableInlineCallbacks +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--pickle', type=str, + help="A pickle file you want to load, if you want to use an old one") + nh = yield NodeHandle.from_argv("pickle_maker") + args = parser.parse_args(sys.argv[1:]) + d = HOGDescriptor() + cl = SVMClassifier() + if args.pickle is not None: + cl = pickle.load(open(args.pickle, "rb")) + classes = ["totem", "buoy", "shooter", "scan_the_code", "unknown"] + oc = yield PickleMaker(nh, d, cl, classes).init_() + yield oc.poll() + oc.done() + + +if __name__ == "__main__": + reactor.callWhenRunning(main) + reactor.run() diff --git a/perception/navigator_vision/nodes/scan_the_code_lib/__init__.py b/perception/navigator_vision/nodes/scan_the_code_lib/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/perception/navigator_vision/nodes/scan_the_code_lib/model.py b/perception/navigator_vision/nodes/scan_the_code_lib/model.py deleted file mode 100644 index 6403e13c..00000000 --- a/perception/navigator_vision/nodes/scan_the_code_lib/model.py +++ /dev/null @@ -1,100 +0,0 @@ -import sys -import cv2 -import numpy as np -import time - - -class Model: - - def __init__(self, prev_points, prev_frame, _id): - self.prev_points = prev_points - self.prev_frame = prev_frame - self.time_started = time.time() - self.turned_black = False - self.colors = [] - self.prev_color = None - self.colors_found = 0 - self.my_id = _id - self.offset = None - self.offset_sum = [0, 0, 0, 0] - self.num_offset = 0 - - def get_bounding_rect(self): - xmin = 1000 - xmax = 0 - ymin = 1000 - ymax = 0 - for i, cont in enumerate(self.prev_points): - for j, _val in enumerate(cont): - if(_val[0] < xmin): - xmin = _val[0] - if(_val[0] > xmax): - xmax = _val[0] - if(_val[1] < ymin): - ymin = _val[1] - if(_val[1] > ymax): - ymax = _val[1] - # print xmin, ymin, xmax, ymax - - return xmin, ymin, xmax, ymax - - def get_color(self, scalar): - blackness1 = scalar[0] - scalar[1] - blackness2 = scalar[1] - scalar[2] - if(abs(blackness1) < 30 and abs(blackness2) < 30): - o = [0, scalar[0] - scalar[1], scalar[0] - scalar[2], 0] - self.num_offset += 1 - self.offset_sum = np.add(o, self.offset_sum) - self.offset = np.append(self.offset_sum / self.num_offset, 0) - return 'k' - - if(self.offset is not None): - scalar = np.add(scalar, self.offset) - - a = scalar[0] - b = scalar[1] - c = scalar[2] - if(abs(a - b) < 10 and (b - c) < 70 and b > c and a > c): - return 'y' - elif(a > b and a > c): - return 'b' - elif(b > a and b > c): - return 'g' - elif(c > a and c > b): - return 'r' - else: - return 'g' - - def check_for_colors(self): - if(self.colors_found == 3): - return True, self.colors - xmin, ymin, xmax, ymax = self.get_bounding_rect() - scalar = cv2.mean(self.prev_frame[ymin:ymax, xmin:xmax]) - cv2.imshow("colros", self.prev_frame[ymin:ymax, xmin:xmax]) - cv2.waitKey(33) - color = self.get_color(scalar) - print "ID", self.my_id - print "color", color - - if(self.prev_color is not None): - changed = False - - if(color != self.prev_color): - changed = True - - if(color == 'k'): - print "turned black" - self.turned_black = True - self.colors_found = 0 - del self.colors[:] - - elif (self.turned_black and changed): - self.colors_found += 1 - self.colors.append(color) - print "changed" - print color - - print "cols", self.colors - self.prev_color = color - print "---" - return False, [] diff --git a/perception/navigator_vision/nodes/scan_the_code_lib/model_tracker.py b/perception/navigator_vision/nodes/scan_the_code_lib/model_tracker.py deleted file mode 100644 index 2fce8b44..00000000 --- a/perception/navigator_vision/nodes/scan_the_code_lib/model_tracker.py +++ /dev/null @@ -1,121 +0,0 @@ -import sys -import cv2 -import numpy as np -import time -from model import Model - - -class ModelTracker: - - def __init__(self): - self.model_count = 0 - self.models = [] - self.colors = [] - self.lk_params = dict(winSize=(15, 15), maxLevel=2, criteria=(cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03)) - - def register_model(self, model, frame): - # Go through all the models we already have - # for i, m in enumerate(self.models): - # old_frame = m.prev_frame - # old_gray = cv2.cvtColor(old_frame, cv2.COLOR_BGR2GRAY) - # gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) - # prev_points = m.prev_points - # # calculate the point in the new image - # p1, st, err = cv2.calcOpticalFlowPyrLK(old_gray, gray, prev_points, None, **self.lk_params) - - # # get the total distance between the points - # total_dists = 0 - # for i, p in enumerate(p1): - # min_dist = 1000 - # for j, _p in enumerate(model): - # dist = np.linalg.norm(np.subtract(p, _p)) - # if(dist < min_dist): - # min_dist = dist - # total_dists += min_dist - - # # If the total distance is less than 50, you have a repeat model, don't add it - # if(total_dists < 50): - # return - - if(len(self.models) > 0): - return - - model = Model(model, frame, self.model_count) - self.models.append(model) - self.model_count += 1 - - def track_models(self, frame): - draw = frame.copy() - updated_models = [] - for i, m in enumerate(self.models): - - old_frame = m.prev_frame - old_gray = cv2.cvtColor(old_frame, cv2.COLOR_BGR2GRAY) - gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) - prev_points = m.prev_points - - p1, st, err = cv2.calcOpticalFlowPyrLK(old_gray, gray, prev_points, None, **self.lk_params) - for e in err: - if(e == 1): - continue - print "err" - if(not self.geometry_test(p1, gray.copy())): - print "didn't pass geom" - continue - if(time.time() - m.time_started > 15): - print "timed out" - continue - mission_status, colors = m.check_for_colors() - if(mission_status): - self.colors = colors - return True - - m.prev_points = p1 - m.prev_frame = frame - updated_models.append(m) - - for i, val in enumerate(m.prev_points): - cv2.circle(draw, tuple(val[0]), 3, (255, 255, 255), -1) - - del self.models[:] - self.models = updated_models - - if(len(self.models) > 0): - cv2.imshow("all_models", draw) - cv2.waitKey(33) - - return False - - def get_non_furthest_points(self, i, points): - dist_points = [] - for ind, val in enumerate(points): - if(ind != i): - diff = np.subtract(points[i], val) - dist_points.append([np.linalg.norm(diff), points[ind]]) - - dist_points = sorted(dist_points, key=lambda x: x[0]) - - return dist_points[0][1], dist_points[1][1] - - def geometry_test(self, fp, draw1): - - for i, p in enumerate(fp): - draw = draw1.copy() - p1, p2 = self.get_non_furthest_points(i, fp) - cv2.circle(draw, tuple(p[0]), 3, (255, 255, 255), -1) - cv2.circle(draw, tuple(p1[0]), 3, (255, 255, 255), -1) - # cv2.circle(draw, tuple(p2[0]), 3, (255,255,255), -1) - # cv2.imshow("geom", draw) - # cv2.waitKey(0) - - diff1 = np.subtract(p1, p)[0] - diff2 = np.subtract(p2, p)[0] - - diff1 /= np.linalg.norm(diff1) - diff2 /= np.linalg.norm(diff2) - - val = abs(np.dot(diff1, diff2)) - if(val > .1): - return False - - return True diff --git a/perception/navigator_vision/object_classification/HOG_descriptor.py b/perception/navigator_vision/object_classification/HOG_descriptor.py new file mode 100644 index 00000000..74e6f9d7 --- /dev/null +++ b/perception/navigator_vision/object_classification/HOG_descriptor.py @@ -0,0 +1,10 @@ +import cv2 + + +class HOGDescriptor(object): + + def __init__(self): + self.hog = cv2.HOGDescriptor((8, 8), (8, 8), (4, 4), (8, 8), 9) + + def get_descriptor(self, img): + return self.hog.compute(img) diff --git a/perception/navigator_vision/object_classification/SVM_classifier.py b/perception/navigator_vision/object_classification/SVM_classifier.py new file mode 100644 index 00000000..8437a3d0 --- /dev/null +++ b/perception/navigator_vision/object_classification/SVM_classifier.py @@ -0,0 +1,18 @@ +from sklearn import svm +import pickle + + +class SVMClassifier(object): + + def __init__(self): + self.clf = svm.SVC() + self.number = 0 + + def classify(self, desc): + self.clf.predict(desc) + + def train(self, desc, clss): + self.clf.fit(desc, clss) + + def pickle(self, name): + pickle.dump(self, open(name, "wb")) diff --git a/perception/navigator_vision/object_classification/__init__.py b/perception/navigator_vision/object_classification/__init__.py new file mode 100644 index 00000000..9e6ed716 --- /dev/null +++ b/perception/navigator_vision/object_classification/__init__.py @@ -0,0 +1,3 @@ +from lidar_to_image import LidarToImage +from SVM_classifier import SVMClassifier +from HOG_descriptor import HOGDescriptor \ No newline at end of file diff --git a/perception/navigator_vision/object_classification/lidar_to_image.py b/perception/navigator_vision/object_classification/lidar_to_image.py new file mode 100644 index 00000000..b8624671 --- /dev/null +++ b/perception/navigator_vision/object_classification/lidar_to_image.py @@ -0,0 +1,213 @@ +import txros +from twisted.internet import defer +from txros import util, tf +import navigator_tools as nt +from navigator_tools import Debug +import sys +from collections import deque +from cv_bridge import CvBridge +from nav_msgs.msg import Odometry +from sensor_msgs.msg import Image, CameraInfo +import genpy +import cv2 +import navigator_tools as nt +from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest +import numpy as np + + +class LidarToImage(object): + + def __init__(self, nh, training=False, classes=None, dist=50): + self.MAX_SIZE = 74 + self.IMAGE_SIZE = 100 + self.max_dist = dist + self.bridge = CvBridge() + self.nh = nh + self.image_cache = deque() + self.pose = None + self.training = training + self.image = None + self.classes = classes + self.cam_info = None + self.busy = False + self.c = 0 + + self.debug = Debug(nh) + + @util.cancellableInlineCallbacks + def init_(self, right=True): + image_sub = "/stereo/right/image_rect_color" + self.tf_frame = "/stereo_right_cam" + cam_info = "/stereo/right/camera_info" + if not right: + image_sub = "/stereo/left/image_rect_color" + self.tf_frame = "/stereo_left_cam" + cam_info = "/stereo/left/camera_info" + + yield self.nh.subscribe(image_sub, Image, self._img_cb) + self._database = yield self.nh.get_service_client('/database/requests', ObjectDBQuery) + self._odom_sub = yield self.nh.subscribe('/odom', Odometry, + lambda odom: setattr(self, 'pose', nt.odometry_to_numpy(odom)[0])) + self.cam_info_sub = yield self.nh.subscribe(cam_info, CameraInfo, self._info_cb) + self.tf_listener = tf.TransformListener(self.nh) + defer.returnValue(self) + + def _info_cb(self, info): + self.cam_info = info + + def _odom_cb(self, odom): + self.position, self.rot = nt.odometry_to_numpy(odom)[0] + + @util.cancellableInlineCallbacks + def get_all_object_rois(self): + req = ObjectDBQueryRequest() + req.name = 'all' + obj = yield self._database(req) + if obj is None or not obj.found: + defer.returnValue((None, None)) + rois = [] + ros_img = yield self._get_closest_image(obj.objects[0].header.stamp) + if ros_img is None: + defer.returnValue((None, None)) + img = self.bridge.imgmsg_to_cv2(ros_img, "mono8") + for o in obj.objects: + print o.name, len(o.points) + if self.training and o.name not in self.classes: + continue + position = yield self._get_position() + o_pos = nt.rosmsg_to_numpy(o.position) + diff = np.linalg.norm(position - o_pos) + if diff > self.max_dist: + continue + points, bbox = yield self._get_bounding_box_2d(o.points, obj.objects[0].header.stamp) + if bbox is None: + continue + + xmin, ymin, xmax, ymax = bbox + + h, w, r = img.shape + if xmin < 0 or xmax < 0 or xmin > w or xmax > w or xmax - xmin == 0 or ymax - ymin == 0: + continue + if ymin < 0: + ymin = 0 + print "bbox", bbox + roi = img[ymin:ymax, xmin:xmax] + print "preshape", roi.shape + roi = self._resize_image(roi) + print "postshape", roi.shape + ret_obj = {} + ret_obj["id"] = o.id + ret_obj["points"] = points + ret_obj["img"] = roi + ret_obj["time"] = o.header.stamp + ret_obj["name"] = o.name + ret_obj["bbox"] = [xmin, ymin, xmax, ymax] + rois.append(ret_obj) + defer.returnValue((img, rois)) + + def _img_cb(self, image_ros): + """Add an image to the image cache.""" + self.image = image_ros + + if len(self.image_cache) > 100: + self.image_cache.popleft() + + self.image_cache.append(image_ros) + + @util.cancellableInlineCallbacks + def _get_position(self): + last_odom_msg = yield self._odom_sub.get_next_message() + defer.returnValue(nt.odometry_to_numpy(last_odom_msg)[0][0]) + + @txros.util.cancellableInlineCallbacks + def _get_bounding_box_2d(self, points_3d_enu, time): + points_2d = [] + try: + trans = yield self.tf_listener.get_transform(self.tf_frame, "/enu", time) + except Exception: + defer.returnValue((None, None)) + transformation = trans.as_matrix() + + for point in points_3d_enu: + point = nt.rosmsg_to_numpy(point) + point = np.append(point, 1.0) + t_p = transformation.dot(point) + if t_p[3] < 1E-15: + defer.returnValue((None, None)) + t_p[0] /= t_p[3] + t_p[1] /= t_p[3] + t_p[2] /= t_p[3] + t_p = t_p[0:3] + if t_p[2] < 0: + defer.returnValue((None, None)) + + K = self.cam_info.K + K = np.array(K).reshape(3, 3) + point_2d = K.dot(t_p) + if point_2d[2] == 0: + defer.returnValue((None, None)) + point_2d[0] /= point_2d[2] + point_2d[1] /= point_2d[2] + if point_2d[0] < 0: + point_2d[0] = 0 + if point_2d[1] < 0: + point_2d[1] = 0 + points_2d.append((int(point_2d[0]), int(point_2d[1]))) + + xmin, ymin = sys.maxint, sys.maxint + xmax, ymax = -sys.maxint, -sys.maxint + + for i, point in enumerate(points_2d): + if point[0] < xmin: + xmin = point[0] + if point[0] > xmax: + xmax = point[0] + if point[1] < ymin: + ymin = point[1] + if point[1] > ymax: + ymax = point[1] + defer.returnValue((points_2d, (xmin, ymin, xmax, ymax))) + + @util.cancellableInlineCallbacks + def _get_closest_image(self, time): + min_img = None + for i in range(0, 20): + min_diff = genpy.Duration(sys.maxint) + for img in self.image_cache: + diff = abs(time - img.header.stamp) + if diff < min_diff: + min_diff = diff + min_img = img + print min_diff.to_sec() + if min_img is not None: + defer.returnValue(min_img) + yield self.nh.sleep(.3) + + def _resize_image(self, img): + h, w, r = img.shape + if h > w: + nh = self.MAX_SIZE + nw = nh * w / h + else: + nw = self.MAX_SIZE + nh = nw * h / w + img = cv2.resize(img, (nw, nh)) + # return img + rep = np.ones(nw, dtype=np.int64) + reph = np.ones(nh, dtype=np.int64) + emtpy_slots = self.IMAGE_SIZE - nw + empty_slots_h = self.IMAGE_SIZE - nh + half_empty_slots = emtpy_slots / 2 + 1 + half_empty_slots_h = empty_slots_h / 2 + 1 + reph[0] = half_empty_slots_h + reph[-1] = half_empty_slots_h + rep[0] = half_empty_slots + rep[-1] = half_empty_slots + if emtpy_slots % 2 == 1: + rep[-1] += 1 + + if empty_slots_h % 2 == 1: + reph[-1] += 1 + + img = np.repeat(img, reph, axis=0) + return np.repeat(img, rep, axis=1) diff --git a/perception/navigator_vision/setup.py b/perception/navigator_vision/setup.py index 29a9ec75..c311a2fd 100644 --- a/perception/navigator_vision/setup.py +++ b/perception/navigator_vision/setup.py @@ -5,7 +5,7 @@ # fetch values from package.xml setup_args = generate_distutils_setup( - packages=[''], + packages=['object_classification'], ) setup(**setup_args) diff --git a/utils/navigator_tools/navigator_tools/__init__.py b/utils/navigator_tools/navigator_tools/__init__.py index 0fa0c06c..6eb57b63 100644 --- a/utils/navigator_tools/navigator_tools/__init__.py +++ b/utils/navigator_tools/navigator_tools/__init__.py @@ -7,6 +7,8 @@ import general_helpers import object_database_helper import missing_perception_object +import debug +import bag_crawler from init_helpers import * from image_helpers import * @@ -16,4 +18,6 @@ from rviz_helpers import * from general_helpers import * from object_database_helper import * -from missing_perception_object import * \ No newline at end of file +from missing_perception_object import * +from debug import Debug +from bag_crawler import BagCrawler \ No newline at end of file diff --git a/utils/navigator_tools/navigator_tools/bag_crawler.py b/utils/navigator_tools/navigator_tools/bag_crawler.py new file mode 100644 index 00000000..662971df --- /dev/null +++ b/utils/navigator_tools/navigator_tools/bag_crawler.py @@ -0,0 +1,69 @@ +#!/usr/bin/python +""" +This file wis written by the team at UF MIL for the 2016 robosub competition. + +github.com/uf-mil +""" +import rosbag +from cv_bridge import CvBridge +import progressbar + + +class BagCrawler(object): + + def __init__(self, bag_path): + self.bag_path = bag_path + self.bag = rosbag.Bag(self.bag_path) + self.bridge = CvBridge() + + def convert(self, msg): + img = self.bridge.imgmsg_to_cv2(msg, desired_encoding='bgr8') + return img + + def crawl(self, topic=None, is_image=False, max_msgs=float('inf')): + num_seen = 0 + num_msgs = 0 + bar = progressbar.ProgressBar(max_value=self.bag.get_message_count()) + if is_image: + topic = self.image_topics[0] + for msg_topic, msg, t in self.bag.read_messages(): + bar.update(num_msgs) + num_msgs += 1 + if msg_topic != topic: + continue + if num_seen > max_msgs: + break + num_seen += 1 + yield msg + + @property + def image_topics(self, cam="right"): + all_topics = self.bag.get_type_and_topic_info()[1].keys() + all_types = self.bag.get_type_and_topic_info()[1].values() + topics = [all_topics[k] for k, topic in enumerate(all_topics) if (all_types[k][0] == 'sensor_msgs/Image')] + if cam == "right": + topics = [topics[i] for i, t in enumerate(topics) if "right" in t] + if cam == "left": + topics = [topics[i] for i, t in enumerate(topics) if "left" in t] + return topics + + @property + def image_info_topics(self, cam="right"): + all_topics = self.bag.get_type_and_topic_info()[1].keys() + all_types = self.bag.get_type_and_topic_info()[1].values() + topics = [all_topics[k] for k, topic in enumerate(all_topics) if (all_types[k][0] == 'sensor_msgs/CameraInfo')] + if cam == "right": + topics = [topics[i] for i, t in enumerate(topics) if "right" in t] + if cam == "left": + topics = [topics[i] for i, t in enumerate(topics) if "left" in t] + return topics + +if __name__ == '__main__': + import cv2 + + bag = '/home/jacob/catkin_ws/src/Sub8/gnc/sub8_perception/data/bag_test.bag' + bc = BagCrawler(bag) + + for image in bc.crawl(topic=bc.image_topics[0]): + cv2.imshow('current_image', image) + cv2.waitKey(3) diff --git a/utils/navigator_tools/navigator_tools/debug.py b/utils/navigator_tools/navigator_tools/debug.py new file mode 100644 index 00000000..a7846686 --- /dev/null +++ b/utils/navigator_tools/navigator_tools/debug.py @@ -0,0 +1,108 @@ +"""Shows images for debugging purposes.""" +import cv2 +import numpy as np +from sensor_msgs.msg import Image +from cv_bridge import CvBridge +import rospy +import sys +___author___ = "Tess Bianchi" + + +class Debug(object): + """Class that contains methods that assist with debugging with images.""" + + def __init__(self, nh=None, w=1000, h=800, total=8, win_name="debug", wait=True): + """ + Initialize the Debug class. + + @param w = The width of the image that smaller images are added to + @param h = The height of the image that smaller images are added to + @param win_name = the name of the window that is shown in opencv + @param wait = whether or not to wait after showing the image + """ + self.width = w + self.height = h + self.img = np.zeros((h, w, 3), np.uint8) + self.total = total + self.hor_num = total / 2 + self.vert_num = 2 + self.max_width = w / self.hor_num + self.max_height = h / self.vert_num + self.wait = wait + self.nh = nh + + self.curr_w = 0 + self.curr_h = 0 + self.num_imgs = 0 + self.win_name = win_name + self.name_to_starting = {} + self.bridge = CvBridge() + self.base_topic = "/debug/scan_the_code/" + self.topic_to_pub = {} + if nh is None: + self.pub = rospy.Publisher("/debug/scan_the_code/image", Image, queue_size=10) + else: + self.pub = nh.advertise("/debug/scan_the_code/image", Image) + + def add_image(self, img, name, wait=33, topic="image"): + """ + Add an image to show to either with a topic or using cv2.imshow. + + @param name = a unique key name for the image, use the same name if you want to switch out this image for another + @param wait = the amount of wait time for the imshow image + """ + color = "bgr8" + print img.shape + if len(img.shape) == 2 or img.shape[2] == 1: + color = "mono8" + + if topic != "image": + self._add_new_topic(img, name, wait, topic) + return + if self.wait: + wait = 0 + h, w = img.shape[0], img.shape[1] + + if w > h: + img = cv2.resize(img, (self.max_width, h * self.max_width / w)) + + if h > w: + img = cv2.resize(img, (w * self.max_height / h, self.max_height)) + h, w = img.shape[0], img.shape[1] + if name not in self.name_to_starting: + if self.num_imgs == self.total: + print "Too many images" + return + self.name_to_starting[name] = (self.curr_w, self.curr_h) + self.num_imgs += 1 + + self.curr_w += w + if self.num_imgs == self.total / 2: + self.curr_w = 0 + self.curr_h = self.max_height + if self.num_imgs > self.total / 2: + self.name_to_starting[name] = (self.curr_w, self.curr_h) + my_w, my_h = self.name_to_starting[name] + self.img[my_h: my_h + h, my_w: my_w + w] = img + if self.nh is None: + cv2.imshow("img", self.img) + if cv2.waitKey(wait) & 0xFF == ord('q'): + cv2.destroyAllWindows() + sys.exit() + + else: + self.pub.publish(self.bridge.cv2_to_imgmsg(self.img, color)) + + def _add_new_topic(self, img, name, wait, topic): + color = "bgr8" + if len(img.shape) == 2 or img.shape[2] == 1: + color = "mono8" + pub = None + if topic in self.topic_to_pub.keys(): + pub = self.topic_to_pub[topic] + elif self.nh is None: + pub = rospy.Publisher("/debug/scan_the_code/" + topic, Image, queue_size=10) + elif self.nh is not None: + pub = self.nh.advertise("/debug/scan_the_code/" + topic, Image) + self.topic_to_pub[topic] = pub + pub.publish(self.bridge.cv2_to_imgmsg(img, color)) From de0035a0fe93002ade0d768fe4822f16fd64735a Mon Sep 17 00:00:00 2001 From: Tess Date: Sun, 4 Dec 2016 16:27:35 -0500 Subject: [PATCH 183/267] OBJECT CLASSIFICATION: Uses HOG Descriptors to act as a final image object classifier. --- .../navigator_vision/nodes/pickle_maker.py | 2 +- .../object_classification/SVM_classifier.py | 9 +- .../object_classification/lidar_to_image.py | 27 +-- .../object_classification/median_flow.py | 208 ++++++++++++++++++ .../object_classification.py | 148 +++++++++++++ .../object_classification/roi_generator.py | 188 ++++++++++++++++ 6 files changed, 566 insertions(+), 16 deletions(-) create mode 100644 perception/navigator_vision/object_classification/median_flow.py create mode 100644 perception/navigator_vision/object_classification/object_classification.py create mode 100644 perception/navigator_vision/object_classification/roi_generator.py diff --git a/perception/navigator_vision/nodes/pickle_maker.py b/perception/navigator_vision/nodes/pickle_maker.py index 38127251..41673e93 100755 --- a/perception/navigator_vision/nodes/pickle_maker.py +++ b/perception/navigator_vision/nodes/pickle_maker.py @@ -68,7 +68,7 @@ def _do(): while True: try: d = _do() - yield util.wrap_timeout(d, 1) + yield util.wrap_timeout(d, 10) except Exception: break diff --git a/perception/navigator_vision/object_classification/SVM_classifier.py b/perception/navigator_vision/object_classification/SVM_classifier.py index 8437a3d0..d3e93275 100644 --- a/perception/navigator_vision/object_classification/SVM_classifier.py +++ b/perception/navigator_vision/object_classification/SVM_classifier.py @@ -5,11 +5,16 @@ class SVMClassifier(object): def __init__(self): - self.clf = svm.SVC() + self.clf = svm.SVC(probability=True) self.number = 0 def classify(self, desc): - self.clf.predict(desc) + desc = desc.reshape(1, len(desc)) + probs = self.clf.predict_proba(desc) + probs = list(probs.flatten()) + p = max(probs) + i = probs.index(p) + return i, p def train(self, desc, clss): self.clf.fit(desc, clss) diff --git a/perception/navigator_vision/object_classification/lidar_to_image.py b/perception/navigator_vision/object_classification/lidar_to_image.py index b8624671..53a3b707 100644 --- a/perception/navigator_vision/object_classification/lidar_to_image.py +++ b/perception/navigator_vision/object_classification/lidar_to_image.py @@ -12,6 +12,7 @@ import cv2 import navigator_tools as nt from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest +from image_geometry import PinholeCameraModel import numpy as np @@ -23,6 +24,7 @@ def __init__(self, nh, training=False, classes=None, dist=50): self.max_dist = dist self.bridge = CvBridge() self.nh = nh + self.id_to_perist = {} self.image_cache = deque() self.pose = None self.training = training @@ -71,7 +73,12 @@ def get_all_object_rois(self): defer.returnValue((None, None)) img = self.bridge.imgmsg_to_cv2(ros_img, "mono8") for o in obj.objects: - print o.name, len(o.points) + if o.id not in self.id_to_perist: + self.id_to_perist[o.id] = [] + ppoints = self.id_to_perist[o.id] + ppoints.extend(o.points) + if len(ppoints) > 500: + ppoints = ppoints[:500] if self.training and o.name not in self.classes: continue position = yield self._get_position() @@ -79,7 +86,7 @@ def get_all_object_rois(self): diff = np.linalg.norm(position - o_pos) if diff > self.max_dist: continue - points, bbox = yield self._get_bounding_box_2d(o.points, obj.objects[0].header.stamp) + points, bbox = yield self._get_bounding_box_2d(ppoints, obj.objects[0].header.stamp) if bbox is None: continue @@ -121,6 +128,10 @@ def _get_position(self): @txros.util.cancellableInlineCallbacks def _get_bounding_box_2d(self, points_3d_enu, time): + if self.cam_info is None: + defer.returnValue((None, None)) + self.camera_model = PinholeCameraModel() + self.camera_model.fromCameraInfo(self.cam_info) points_2d = [] try: trans = yield self.tf_listener.get_transform(self.tf_frame, "/enu", time) @@ -141,17 +152,7 @@ def _get_bounding_box_2d(self, points_3d_enu, time): if t_p[2] < 0: defer.returnValue((None, None)) - K = self.cam_info.K - K = np.array(K).reshape(3, 3) - point_2d = K.dot(t_p) - if point_2d[2] == 0: - defer.returnValue((None, None)) - point_2d[0] /= point_2d[2] - point_2d[1] /= point_2d[2] - if point_2d[0] < 0: - point_2d[0] = 0 - if point_2d[1] < 0: - point_2d[1] = 0 + point_2d = self.camera_model.project3dToPixel(t_p) points_2d.append((int(point_2d[0]), int(point_2d[1]))) xmin, ymin = sys.maxint, sys.maxint diff --git a/perception/navigator_vision/object_classification/median_flow.py b/perception/navigator_vision/object_classification/median_flow.py new file mode 100644 index 00000000..713b5bd7 --- /dev/null +++ b/perception/navigator_vision/object_classification/median_flow.py @@ -0,0 +1,208 @@ +import cv2 +import numpy as np +from collections import deque +import itertools + +class MedianFlow(object): + TRACKING_LENGTH = 3 + + def __init__(self, elimination_amount=.6, winSize=(15, 15), maxLevel=2): + self.prev_points = None + self.prev_frame = None + self.lk_params = dict(winSize=winSize, maxLevel=maxLevel, criteria=( + cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 2, 0.03)) + self.tracking_points = deque() + self.tracking_frames = deque() + self.bboxs = deque() + + self.elimination_amount = elimination_amount + self._is_effective = True + + self.bbox = None + self._curr_scale = 1 + self._amount_mdn_flow_tracked = 0 + + def init(self, frame, bbox, img=None, num_points=6): + ''' + Arguments: num_points is the square root of the number of that will be initially given to your image + ''' + if frame is None: + raise TypeError("The frame is invalid") + + x, y, w, h = bbox + + if(w <= 0 or h <= 0): + raise ValueError("Invalid bounding box") + + self.prev_frame = frame + + self.bbox = bbox + + px = np.linspace(x, x + w, num_points) + py = np.linspace(y, y + h, num_points) + px = px.astype(np.int32) + py = py.astype(np.int32) + p = np.array(list(itertools.product(px, py)), dtype=np.float32) + p = p.reshape(p.shape[0], 2) + + self.prev_points = p + + def stop_use(self): + self.tracking_points.clear() + self.tracking_frames.clear() + self.bboxs.clear() + self._amount_mdn_flow_tracked = 0 + self._is_effective = True + self.curr_scale = 1 + + def is_effective(self): + return self._is_effective + + def get_last_good_frame(self): + best = self.bboxs.popleft() + best_frame = self.tracking_frames.popleft() + x, y, w, h = best + _roi = best_frame[y:y + h, x:x + w] + return _roi, best + + def _calculate_forward_backward_error(self, prev_frame, prev_points): + # Get the last set of frame in the list, calulate optical flow from f(t) to f(t-TRACKING_LENGTH) + # for the number of frame in tracking frame + # Go backwards through the list because we want the most recent element first + for i in range(self.TRACKING_LENGTH - 1, -1, -1): + _frame = self.tracking_frames[i] + _points, status, err = cv2.calcOpticalFlowPyrLK(prev_frame, _frame, prev_points, None, **self.lk_params) + prev_frame = _frame + prev_points = _points + + # We now have f(t-TRACKING_LENGTH), get the euclidean distance from each + # of these points to the oldest points in tracking_points + old_points = self.tracking_points.popleft() + diff = np.subtract(old_points, prev_points) + diff_norm = np.linalg.norm(diff, axis=1) + + return diff, diff_norm + + def _eliminate_points(self, points, frame): + if frame is None: + raise TypeError("The frame is invalid") + + # Make sure there is enough frames in our list + if(len(self.tracking_points) < self.TRACKING_LENGTH): + self.tracking_points.append(points) + self.tracking_frames.append(frame) + return points + + diff, diff_norm = self._calculate_forward_backward_error(frame, points) + # Eliminate the points based on the fb error + msk = None + + # If this is the first time median flow is tracking these points, eliminate the top 60% of the points with the highest + # fb tracking error 60% is defined in self.elimination_amount) + if(self._amount_mdn_flow_tracked == 0): + lrgst_ind = np.argsort(diff_norm)[-int(len(diff_norm) * self.elimination_amount):] + msk = np.ones(len(diff_norm), dtype=bool) + msk[lrgst_ind] = False + else: + # Elimate points with a foward-backward error greater than .8 + high_val = diff_norm > .8 + diff = diff.T[0] + msk = np.ones(len(diff_norm), dtype=bool) + msk[high_val] = False + + # Eliminate these from the current points, also, from all previous points as well + points = points[msk] + for i, tp in enumerate(self.tracking_points): + self.tracking_points[i] = tp[np.array(msk)] + + self.prev_points = self.tracking_points[self.TRACKING_LENGTH - 2] + + # Update tracking values + self.tracking_points.append(points) + self.tracking_frames.popleft() + self.tracking_frames.append(frame) + self._amount_mdn_flow_tracked += 1 + + # Determine if this model is still effective + # If the mean fb error is > 2 and the amount of frames tracked is > 20, then the model has failed. + # Also if the number of points in the model is 2 and self._amount_mdn_flow_tracked > 20) or len(points) < 3): + self._is_effective = False + + return points + + def _update_bbox(self, curr_points): + # Update bounding dimensions + # Get the median dx and dy, this is how far the bounding box moves + mydiff = np.subtract(curr_points, self.prev_points) + a = mydiff.transpose() + x_d = np.median(a[0]) + y_d = np.median(a[1]) + x, y, w, h = self.bbox + + x_n = int(round(x + x_d)) + y_n = int(round(y + y_d)) + + # Scale is a little trickier, get every permutation of two points from previous points and current points + prev_comb = list(itertools.permutations(self.prev_points, 2)) + curr_comb = list(itertools.permutations(curr_points, 2)) + ratios = [] + + for i, val in enumerate(prev_comb): + if(i >= len(curr_comb)): + # This should never happen + raise ValueError('The previous points and current points are not synchronized') + prev_point_A = prev_comb[i][0] + prev_point_B = prev_comb[i][1] + curr_point_A = curr_comb[i][0] + curr_point_B = curr_comb[i][1] + + # Get the distance between every corresponsing pair of points + prev_dist = np.subtract(prev_point_A, prev_point_B) + curr_dist = np.subtract(curr_point_A, curr_point_B) + + # Dont want to divide by zero + if(np.linalg.norm(prev_dist) == 0): + continue + + # Get the ration of the current distance, to the previous distance + ratios.append(np.linalg.norm(curr_dist) / np.linalg.norm(prev_dist)) + + # Choose the median of these distances + scale = np.median(ratios) + + # Multiply the current scale with this scale + self._curr_scale *= scale + + w_n = w + h_n = h + + # If the scale is big or small enough (due to rounding errors) + if(self._curr_scale > 1.08 or self._curr_scale < .92): + w_n = int(round(w * self._curr_scale)) + h_n = int(round(h * self._curr_scale)) + self._curr_scale = 1 + + if(x_n < 0 or y_n < 0 or w_n < 0 or h_n < 0): + raise ValueError("The new bounding box has incorrect values") + + self.bbox = (x_n, y_n, w_n, h_n) + + def track(self, frame): + if frame is None: + raise TypeError("The frame is invalid") + + points, status, err = cv2.calcOpticalFlowPyrLK(self.prev_frame, frame, self.prev_points, None, **self.lk_params) + points = self._eliminate_points(points, frame) + try: + self._update_bbox(points) + except: + return None + + self.bboxs.append(self.bbox) + if(len(self.bboxs) > self.TRACKING_LENGTH): + self.bboxs.popleft() + + self.prev_points = points + self. prev_frame = frame + return self.bbox \ No newline at end of file diff --git a/perception/navigator_vision/object_classification/object_classification.py b/perception/navigator_vision/object_classification/object_classification.py new file mode 100644 index 00000000..b21d3dec --- /dev/null +++ b/perception/navigator_vision/object_classification/object_classification.py @@ -0,0 +1,148 @@ +from roi_generator import ROI_Collection +from navigator_tools import Debug, BagCrawler +import numpy as np +from HOG_descriptor import HOGDescriptor +from SVM_classifier import SVMClassifier +import pickle +import cv2 +from cv_bridge import CvBridge + + +class Config(object): + + def __init__(self): + self.classes = ["totem", "scan_the_code", "nothing", "shooter"] + self.classifier = SVMClassifier() + self.descriptor = HOGDescriptor() + self.bridge = CvBridge() + self.MAX_SIZE = 74 + self.IMAGE_SIZE = 100 + + def to_class(self, val): + return self.classes[val] + + def to_val(self, clss): + for i, c in enumerate(self.classes): + if c in clss: + return i + + def get_imgs(self, val): + roi = pickle.load(open(val, 'rb')) + imgs = [] + rois_vals = [] + rois = [] + print roi + + for b in roi.bag_to_rois.keys(): + frames = roi.bag_to_rois[b] + bc = BagCrawler(b) + topic = bc.image_topics[0] + bc_crawl = bc.crawl(topic) + print b + for frame in frames: + img = bc_crawl.next() + img = self.bridge.imgmsg_to_cv2(img, 'bgr8') + imgs.append(img) + a = [] + for clss in frame: + r = frame[clss] + myroi = img[r[1]:r[1] + r[3], r[0]:r[0] + r[2]] + myroi = self._resize_image(myroi) + clss = self.to_val(clss) + rois.append((myroi, clss)) + a.append((r, myroi, clss)) + rois_vals.append(a) + return imgs, rois_vals, rois + + def _resize_image(self, img): + h, w, r = img.shape + if h > w: + nh = self.MAX_SIZE + nw = nh * w / h + else: + nw = self.MAX_SIZE + nh = nw * h / w + img = cv2.resize(img, (nw, nh)) + # return img + rep = np.ones(nw, dtype=np.int64) + reph = np.ones(nh, dtype=np.int64) + emtpy_slots = self.IMAGE_SIZE - nw + empty_slots_h = self.IMAGE_SIZE - nh + half_empty_slots = emtpy_slots / 2 + 1 + half_empty_slots_h = empty_slots_h / 2 + 1 + reph[0] = half_empty_slots_h + reph[-1] = half_empty_slots_h + rep[0] = half_empty_slots + rep[-1] = half_empty_slots + if emtpy_slots % 2 == 1: + rep[-1] += 1 + + if empty_slots_h % 2 == 1: + reph[-1] += 1 + + img = np.repeat(img, reph, axis=0) + return np.repeat(img, rep, axis=1) + + +class Training(object): + + def __init__(self, roi_file, output): + self.config = Config() + self.output = output + self.roi_file = roi_file + + def train(self): + descs = [] + classify = [] + imgs, roi_val, rois = self.config.get_imgs(self.roi_file) + for r in rois: + roi, clss = r + desc = self.config.descriptor.get_descriptor(roi) + desc = desc.flatten() + descs.append(desc) + classify.append(clss) + descs = np.array(descs) + classify = np.array(classify) + counts = dict((x, list(classify).count(x)) for x in set(classify)) + counts = dict((self.config.to_class(k), v) for k, v in counts.items()) + print counts + + self.config.classifier.train(descs, classify) + self.config.classifier.pickle("train.p") + +# class Classifier(object): + + +class ClassiferTest(object): + + def __init__(self, roi_file, class_file): + self.config = Config() + self.roi_file = roi_file + self.classifier = pickle.load(open(class_file, 'rb')) + self.debug = Debug() + + def classify(self): + print self.roi_file + imgs, roi_val, rois = self.config.get_imgs(self.roi_file) + print "dfd" + for i, frames in enumerate(roi_val): + img = imgs[i] + draw = img.copy() + for roi in frames: + myroi, roi_img, tru_clss = roi + desc = self.config.descriptor.get_descriptor(roi_img) + desc = desc.flatten() + clss, prob = self.classifier.classify(desc) + clss = self.config.to_class(clss) + cv2.rectangle(draw, (myroi[0], myroi[1]), (myroi[0] + myroi[2], myroi[1] + myroi[3]), (0, 0, 255)) + cv2.putText(draw, clss + ": " + str(prob), (myroi[0], myroi[1]), 1, 1.0, (0, 255, 0)) + # self.debug.add_image(draw, topic="roi") + cv2.imshow("roi", draw) + cv2.waitKey(33) + +if __name__ == "__main__": + # t = Training("train_roi.p", "train.p") + # t.train() + # print "done" + c = ClassiferTest("val_roi.p", "train.p") + c.classify() diff --git a/perception/navigator_vision/object_classification/roi_generator.py b/perception/navigator_vision/object_classification/roi_generator.py new file mode 100644 index 00000000..cbe86bef --- /dev/null +++ b/perception/navigator_vision/object_classification/roi_generator.py @@ -0,0 +1,188 @@ +#!/usr/bin/python +from navigator_tools import BagCrawler +import argparse +from cv_bridge import CvBridge +import cv2 +import sys +import pickle +import os +import numpy as np +from median_flow import MedianFlow + + +class ROI_Collection(): + + def __init__(self): + self.bag_to_rois = {} + + def pickle(self, name): + pickle.dump(self, open(name, "wb")) + + +class ROI_Generator(object): + + def __init__(self): + self.folder = os.path.dirname(os.path.realpath(__file__)) + self.bridge = CvBridge() + self.roi_to_tracker = {} + + def get_roi(self, name): + file = open(self.folder + '/' + name, 'r') + for line in file: + line = line.replace('\n', '') + if len(line) == 0: + continue + x, y, w, h = line.split(" ") + yield x, y, w, h + + def create(self, bag, output, load): + self.rects = {} + self.sel_rect = None + bc = BagCrawler(bag) + self.rclk = False + self.lclk = False + topic = bc.image_topics[0] + self.crawl_bu = bc.crawl(topic=topic) + image = self.crawl_bu.next() + self.image = self.bridge.imgmsg_to_cv2(image, 'bgr8') + self.crawl = bc.crawl(topic=topic) + self.x, self.y = 0, 0 + self.rsel = True + self.output = output + w, h, r = self.image.shape + self.button_pressed = False + if load: + self.collection = pickle.load(open(self.folder + '/' + output, "rb")) + else: + self.collection = ROI_Collection() + + self.collection.bag_to_rois[bag] = [] + self.mycoll = self.collection.bag_to_rois[bag] + + self.window_name = 'segment' + cv2.namedWindow(self.window_name) + cv2.setMouseCallback(self.window_name, self.mouse_roi) + self.last_rec = None + + def setw(x): + if self.sel_rect is not None: + r = self.rects[self.sel_rect] + r[2] = x + + def seth(x): + if self.sel_rect is not None: + r = self.rects[self.sel_rect] + r[3] = x + + cv2.createTrackbar("width", self.window_name, 0, w, setw) + cv2.createTrackbar("height", self.window_name, 0, h, seth) + + def out_range(self, bbox): + h, w, r = self.image.shape + if bbox[0] < 0 or bbox[0] + bbox[2] > w: + return True + if bbox[1] < 0 or bbox[1] + bbox[3] > h: + return True + return False + + def go(self): + while self.x is None: + cv2.waitKey(33) + image = self.crawl.next() + self.image = self.bridge.imgmsg_to_cv2(image, 'bgr8') + doing = False + while True: + if doing: + continue + doing = True + k = chr(cv2.waitKey(50) & 0xFF) + if k == 'q': + break + elif k == ' ' and not self.rclk: + try: + image = self.crawl.next() + self.image = self.bridge.imgmsg_to_cv2(image, 'bgr8') + except StopIteration: + break + + remove = [] + for name in self.rects: + bbox = self.roi_to_tracker[name].track(self.image) + if bbox is None or self.out_range(bbox): + remove.append(name) + else: + self.rects[name] = [bbox[0], bbox[1], bbox[2], bbox[3]] + for name in remove: + self.rects.pop(name) + self.roi_to_tracker.pop(name) + r = dict(self.rects) + self.mycoll.append(r) + clone = self.image.copy() + for key in self.rects.keys(): + r = self.rects[key] + color = (255, 0, 0) + if key == self.sel_rect: + color = (0, 255, 0) + + cv2.rectangle(clone, (r[0], r[1]), (r[0] + r[2], r[1] + r[3]), color, 2) + cv2.putText(clone, key, (r[0], r[1]), 1, 1.0, (255, 0, 0)) + + cv2.imshow(self.window_name, clone) + doing = False + self.collection.pickle(self.output) + + def mouse_roi(self, event, x, y, flags, params): + if event == cv2.EVENT_RBUTTONDOWN: + self.rclk = not self.rclk + self.sel_rect = None + for name in self.rects: + r = self.rects[name] + self.roi_to_tracker[name] = MedianFlow() + self.roi_to_tracker[name].init(self.image, r) + return + if self.rclk: + if event == cv2.EVENT_LBUTTONDOWN and flags == 48: # pressing shift + if len(self.rects) > 0: + r = min(self.rects.items(), key=lambda rect: np.linalg.norm( + np.array([rect[1][0], rect[1][1]]) - np.array([x, y]))) + r = r[0] + self.rects.pop(r) + self.roi_to_tracker.pop(r) + self.sel_rect = None + elif event == cv2.EVENT_LBUTTONDOWN and flags == 40: # pressing cntrl + name = raw_input('Enter name of object: ') + if name == "skip": + return + r = [20, 20, 20, 20] + self.rects[name] = r + elif event == cv2.EVENT_LBUTTONDOWN: + self.lclk = not self.lclk + if not self.lclk: + r = self.rects[self.sel_rect] + self.sel_rect = None + else: + if len(self.rects) > 0: + self.sel_rect = min(self.rects.items(), key=lambda rect: np.linalg.norm( + np.array([rect[1][0], rect[1][1]]) - np.array([x, y]))) + self.sel_rect = self.sel_rect[0] + r = self.rects[self.sel_rect] + cv2.setTrackbarPos("width", self.window_name, r[2]) + cv2.setTrackbarPos("height", self.window_name, r[3]) + + elif event == cv2.EVENT_MOUSEMOVE: + self.x, self.y = x, y + if self.sel_rect is not None: + r = self.rects[self.sel_rect] + self.rects[self.sel_rect][0:4] = [self.x, self.y, r[2], r[3]] + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('bag', type=str, help='The bag you would like to use') + parser.add_argument('name', type=str, help='The name of the output file') + parser.add_argument('--load', action='store_true', help='The name of the output file') + args = parser.parse_args(sys.argv[1:]) + + roi = ROI_Generator() + roi.create(args.bag, args.name, args.load) + roi.go() From f35ca3773bb7df4c49887c8cea55fd022fa30feb Mon Sep 17 00:00:00 2001 From: Tess Date: Sun, 4 Dec 2016 16:40:11 -0500 Subject: [PATCH 184/267] OBJECT CLASSIFICATION: removing unecessary changed for PR --- .../navigator_vision/nodes/pickle_maker.py | 104 ------------------ .../navigator_tools/navigator_tools/debug.py | 10 +- 2 files changed, 5 insertions(+), 109 deletions(-) delete mode 100755 perception/navigator_vision/nodes/pickle_maker.py diff --git a/perception/navigator_vision/nodes/pickle_maker.py b/perception/navigator_vision/nodes/pickle_maker.py deleted file mode 100755 index 41673e93..00000000 --- a/perception/navigator_vision/nodes/pickle_maker.py +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env python -from object_classification import LidarToImage, HOGDescriptor, SVMClassifier -from txros import util, NodeHandle -import sys -import cv2 -import time -import numpy as np -import argparse -import pickle -from twisted.internet import reactor, defer, threads -from navigator_tools import Debug -import threading - - -class PickleMaker(object): - - def __init__(self, nh, descriptor, classifier, classes): - self.nh = nh - self.descriptor = descriptor - self.debug = Debug(self.nh) - # load up the trained svm via pickle - self.classifier = classifier - self.descs = [] - self.classify = [] - self.classifier.number += 1 - self.last_time = None - self.classes = classes - self.class_to_id = {} - self.id_to_class = {} - self.first_message = False - for i, clss in enumerate(classes): - self.class_to_id[clss] = i - self.id_to_class[i] = clss - - @util.cancellableInlineCallbacks - def init_(self): - self.lidar_to_image = yield LidarToImage(self.nh, training=True, classes=self.classes).init_() - defer.returnValue(self) - - @util.cancellableInlineCallbacks - def poll(self): - @util.cancellableInlineCallbacks - def _do(): - self.nh.sleep(.1) - img, objs = yield self.lidar_to_image.get_all_object_rois() - if img is None or objs is None or len(objs) == 0: - defer.returnValue(True) - draw = img.copy() - for o in objs: - xmin, ymin, xmax, ymax = o["bbox"] - cv2.rectangle(draw, (xmin, ymin), (xmax, ymax), (255, 0, 0)) - img_name = o["name"] - print img_name - roi = o["img"] - points = o["points"] - cv2.putText(draw, str(o["id"]), (xmin, ymin), 1, 1.0, (255, 255, 255)) - for p in points: - cv2.circle(draw, p, 3, (0, 0, 255), -1) - self.debug.add_image(roi, "roi", topic="roi") - self.last_time = self.nh.get_time() - desc = self.descriptor.get_descriptor(roi) - desc = desc.flatten() - print roi.shape, desc.shape - self.descs.append(desc) - self.classify.append(self.class_to_id[img_name]) - self.debug.add_image(draw, "box", topic="boxes") - - while True: - try: - d = _do() - yield util.wrap_timeout(d, 10) - except Exception: - break - - def done(self): - self.descs = np.array(self.descs) - self.classify = np.array(self.classify) - print self.descs.shape, self.classify.shape - self.classifier.train(self.descs, self.classify) - print np.max(self.classifier.clf.support_vectors_) - self.classifier.pickle("train.p") - reactor.stop() - - -@util.cancellableInlineCallbacks -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('--pickle', type=str, - help="A pickle file you want to load, if you want to use an old one") - nh = yield NodeHandle.from_argv("pickle_maker") - args = parser.parse_args(sys.argv[1:]) - d = HOGDescriptor() - cl = SVMClassifier() - if args.pickle is not None: - cl = pickle.load(open(args.pickle, "rb")) - classes = ["totem", "buoy", "shooter", "scan_the_code", "unknown"] - oc = yield PickleMaker(nh, d, cl, classes).init_() - yield oc.poll() - oc.done() - - -if __name__ == "__main__": - reactor.callWhenRunning(main) - reactor.run() diff --git a/utils/navigator_tools/navigator_tools/debug.py b/utils/navigator_tools/navigator_tools/debug.py index a7846686..f40b267b 100644 --- a/utils/navigator_tools/navigator_tools/debug.py +++ b/utils/navigator_tools/navigator_tools/debug.py @@ -37,12 +37,12 @@ def __init__(self, nh=None, w=1000, h=800, total=8, win_name="debug", wait=True) self.win_name = win_name self.name_to_starting = {} self.bridge = CvBridge() - self.base_topic = "/debug/scan_the_code/" + self.base_topic = "/debug/" self.topic_to_pub = {} if nh is None: - self.pub = rospy.Publisher("/debug/scan_the_code/image", Image, queue_size=10) + self.pub = rospy.Publisher("/debug/image", Image, queue_size=10) else: - self.pub = nh.advertise("/debug/scan_the_code/image", Image) + self.pub = nh.advertise("/debug/image", Image) def add_image(self, img, name, wait=33, topic="image"): """ @@ -101,8 +101,8 @@ def _add_new_topic(self, img, name, wait, topic): if topic in self.topic_to_pub.keys(): pub = self.topic_to_pub[topic] elif self.nh is None: - pub = rospy.Publisher("/debug/scan_the_code/" + topic, Image, queue_size=10) + pub = rospy.Publisher("/debug/" + topic, Image, queue_size=10) elif self.nh is not None: - pub = self.nh.advertise("/debug/scan_the_code/" + topic, Image) + pub = self.nh.advertise("/debug/" + topic, Image) self.topic_to_pub[topic] = pub pub.publish(self.bridge.cv2_to_imgmsg(img, color)) From f304304a3ce4062a75b6a89baed9724ea322e2bd Mon Sep 17 00:00:00 2001 From: Tess Date: Sun, 4 Dec 2016 16:58:32 -0500 Subject: [PATCH 185/267] OBJECT CLASSIFICATION: adds authorship --- perception/navigator_vision/object_classification/median_flow.py | 1 + .../object_classification/object_classification.py | 1 + .../navigator_vision/object_classification/roi_generator.py | 1 + 3 files changed, 3 insertions(+) diff --git a/perception/navigator_vision/object_classification/median_flow.py b/perception/navigator_vision/object_classification/median_flow.py index 713b5bd7..7d9ed1ca 100644 --- a/perception/navigator_vision/object_classification/median_flow.py +++ b/perception/navigator_vision/object_classification/median_flow.py @@ -2,6 +2,7 @@ import numpy as np from collections import deque import itertools +___author___ = "Tess Bianchi" class MedianFlow(object): TRACKING_LENGTH = 3 diff --git a/perception/navigator_vision/object_classification/object_classification.py b/perception/navigator_vision/object_classification/object_classification.py index b21d3dec..3fe38a36 100644 --- a/perception/navigator_vision/object_classification/object_classification.py +++ b/perception/navigator_vision/object_classification/object_classification.py @@ -6,6 +6,7 @@ import pickle import cv2 from cv_bridge import CvBridge +___author___ = "Tess Bianchi" class Config(object): diff --git a/perception/navigator_vision/object_classification/roi_generator.py b/perception/navigator_vision/object_classification/roi_generator.py index cbe86bef..705f9429 100644 --- a/perception/navigator_vision/object_classification/roi_generator.py +++ b/perception/navigator_vision/object_classification/roi_generator.py @@ -8,6 +8,7 @@ import os import numpy as np from median_flow import MedianFlow +___author___ = "Tess Bianchi" class ROI_Collection(): From d44fb724074abc295ef8e6515ae62e88b7b7d84f Mon Sep 17 00:00:00 2001 From: Tess Date: Tue, 6 Dec 2016 22:59:19 -0500 Subject: [PATCH 186/267] OBJECT CLASSIFICATION: changes for PR --- .../object_classification/lidar_to_image.py | 13 ++++++++----- .../object_classification/object_classification.py | 5 ++--- utils/navigator_tools/navigator_tools/__init__.py | 4 ++-- .../navigator_tools/{debug.py => cv_debug.py} | 2 +- 4 files changed, 13 insertions(+), 11 deletions(-) rename utils/navigator_tools/navigator_tools/{debug.py => cv_debug.py} (99%) diff --git a/perception/navigator_vision/object_classification/lidar_to_image.py b/perception/navigator_vision/object_classification/lidar_to_image.py index 53a3b707..70833357 100644 --- a/perception/navigator_vision/object_classification/lidar_to_image.py +++ b/perception/navigator_vision/object_classification/lidar_to_image.py @@ -14,6 +14,7 @@ from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest from image_geometry import PinholeCameraModel import numpy as np +___author___ = "Tess Bianchi" class LidarToImage(object): @@ -37,15 +38,20 @@ def __init__(self, nh, training=False, classes=None, dist=50): self.debug = Debug(nh) @util.cancellableInlineCallbacks - def init_(self, right=True): + def init_(self, cam="r"): image_sub = "/stereo/right/image_rect_color" self.tf_frame = "/stereo_right_cam" cam_info = "/stereo/right/camera_info" - if not right: + if cam == 'l': image_sub = "/stereo/left/image_rect_color" self.tf_frame = "/stereo_left_cam" cam_info = "/stereo/left/camera_info" + if cam == 'rr': + image_sub = "/right/right/image_rect_color" + self.tf_frame = "/right_right_cam" + cam_info = "/right/right/camera_info" + yield self.nh.subscribe(image_sub, Image, self._img_cb) self._database = yield self.nh.get_service_client('/database/requests', ObjectDBQuery) self._odom_sub = yield self.nh.subscribe('/odom', Odometry, @@ -57,9 +63,6 @@ def init_(self, right=True): def _info_cb(self, info): self.cam_info = info - def _odom_cb(self, odom): - self.position, self.rot = nt.odometry_to_numpy(odom)[0] - @util.cancellableInlineCallbacks def get_all_object_rois(self): req = ObjectDBQueryRequest() diff --git a/perception/navigator_vision/object_classification/object_classification.py b/perception/navigator_vision/object_classification/object_classification.py index 3fe38a36..7e4ae634 100644 --- a/perception/navigator_vision/object_classification/object_classification.py +++ b/perception/navigator_vision/object_classification/object_classification.py @@ -1,5 +1,5 @@ from roi_generator import ROI_Collection -from navigator_tools import Debug, BagCrawler +from navigator_tools import CvDebug, BagCrawler import numpy as np from HOG_descriptor import HOGDescriptor from SVM_classifier import SVMClassifier @@ -120,12 +120,11 @@ def __init__(self, roi_file, class_file): self.config = Config() self.roi_file = roi_file self.classifier = pickle.load(open(class_file, 'rb')) - self.debug = Debug() + self.debug = CvDebug() def classify(self): print self.roi_file imgs, roi_val, rois = self.config.get_imgs(self.roi_file) - print "dfd" for i, frames in enumerate(roi_val): img = imgs[i] draw = img.copy() diff --git a/utils/navigator_tools/navigator_tools/__init__.py b/utils/navigator_tools/navigator_tools/__init__.py index 6eb57b63..ee89f36a 100644 --- a/utils/navigator_tools/navigator_tools/__init__.py +++ b/utils/navigator_tools/navigator_tools/__init__.py @@ -7,7 +7,7 @@ import general_helpers import object_database_helper import missing_perception_object -import debug +import cv_debug import bag_crawler from init_helpers import * @@ -19,5 +19,5 @@ from general_helpers import * from object_database_helper import * from missing_perception_object import * -from debug import Debug +from debug import CvDebug from bag_crawler import BagCrawler \ No newline at end of file diff --git a/utils/navigator_tools/navigator_tools/debug.py b/utils/navigator_tools/navigator_tools/cv_debug.py similarity index 99% rename from utils/navigator_tools/navigator_tools/debug.py rename to utils/navigator_tools/navigator_tools/cv_debug.py index f40b267b..56962e5f 100644 --- a/utils/navigator_tools/navigator_tools/debug.py +++ b/utils/navigator_tools/navigator_tools/cv_debug.py @@ -8,7 +8,7 @@ ___author___ = "Tess Bianchi" -class Debug(object): +class CvDebug(object): """Class that contains methods that assist with debugging with images.""" def __init__(self, nh=None, w=1000, h=800, total=8, win_name="debug", wait=True): From ad10f85f07761eb3c0aeb7f5d62ea9d9f936ffd1 Mon Sep 17 00:00:00 2001 From: Tess Date: Wed, 7 Dec 2016 11:34:44 -0500 Subject: [PATCH 187/267] OBJECT CLASSIFICATION: file name change --- utils/navigator_tools/navigator_tools/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/navigator_tools/navigator_tools/__init__.py b/utils/navigator_tools/navigator_tools/__init__.py index ee89f36a..94d21ece 100644 --- a/utils/navigator_tools/navigator_tools/__init__.py +++ b/utils/navigator_tools/navigator_tools/__init__.py @@ -19,5 +19,5 @@ from general_helpers import * from object_database_helper import * from missing_perception_object import * -from debug import CvDebug +from cv_debug import CvDebug from bag_crawler import BagCrawler \ No newline at end of file From 7608e6e93087bf7dcb2ff3143743918fad55274d Mon Sep 17 00:00:00 2001 From: Matt Langford Date: Fri, 2 Dec 2016 00:49:57 -0500 Subject: [PATCH 188/267] COLORAMA: Update how confidence is calucated and updated weights --- .../navigator_vision/nodes/database_color.py | 477 ++++++++++++------ 1 file changed, 316 insertions(+), 161 deletions(-) diff --git a/perception/navigator_vision/nodes/database_color.py b/perception/navigator_vision/nodes/database_color.py index 281f7c39..f1b02581 100755 --- a/perception/navigator_vision/nodes/database_color.py +++ b/perception/navigator_vision/nodes/database_color.py @@ -1,12 +1,19 @@ #!/usr/bin/env python +from __future__ import division + import cv2 import numpy as np +from collections import deque +from copy import deepcopy import rospy import tf +import tf.transformations as trns + import sensor_msgs.point_cloud2 as pc2 -from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest, VisionRequest, VisionRequestResponse +from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest, ColorRequest, ColorRequestResponse from sensor_msgs.msg import CameraInfo, Image +from nav_msgs.msg import Odometry from cv_bridge import CvBridge, CvBridgeError from image_geometry import PinholeCameraModel @@ -16,15 +23,16 @@ camera_root = "/stereo/right" # /camera_root/root -rospy.init_node("database_colorer") ros_t = lambda t: rospy.Duration(t) fprint = lambda *args, **kwargs: _fprint(title="COLORAMA", time="", *args, **kwargs) +p2np = navigator_tools.point_to_numpy class ImageHolder(object): @classmethod def from_msg(cls, msg): time = msg.header.stamp + fprint("Got image! {}".format(time.to_sec())) try: image = CvBridge().imgmsg_to_cv2(msg) return cls(image, time) @@ -75,19 +83,23 @@ def _push(self, data): self._images.append(data) self._images = self._images[-self._history_length:] - def get_around_time(self, time, margin=.05): + def get_around_time(self, time, margin=.5, timeout=.5): ''' Returns the image that is closest to the specified time. Returns `None` if closest image is more than `margin` seconds away. ''' - if len(self._images) == 0: - return ImageHolder() - closest = min(self._images, key=lambda image: abs(image.time - time)) - if abs(closest.time - time) > ros_t(margin): - return ImageHolder() # Return invalid image + start_time = rospy.Time.now() + while rospy.Time.now() - start_time <= ros_t(timeout): + if len(self._images) == 0: + continue - return closest + closest = min(self._images, key=lambda image: abs(image.time - time)) + if abs(closest.time - time) > ros_t(margin): + continue # Return invalid image + return closest + + return ImageHolder() class DebugImage(object): def __init__(self, topic, encoding="bgr8", queue_size=1, prd=1): @@ -97,7 +109,6 @@ def __init__(self, topic, encoding="bgr8", queue_size=1, prd=1): self.image = None rospy.Timer(ros_t(prd), self.publish) - def publish(self, *args): if self.image is not None: try: @@ -108,21 +119,90 @@ def publish(self, *args): rospy.logerr(e) +class Observation(object): + history_length = 100 # Default to 100 + + def __init__(self): + self.hues = deque([], maxlen=self.history_length) + self.values = deque([], maxlen=self.history_length) + self.q_errs = deque([], maxlen=self.history_length) + self.dists = deque([], maxlen=self.history_length) + + def __len__(self): + return len(self.hues) + + def __iadd__(self, (hue, value, dist, q_diff)): + """ + Add observations like: + >>> co += [0.65, 0.4, ..., 0.1] + """ + self.hues.append(hue) + self.values.append(value) + self.dists.append(dist) + self.q_errs.append(q_diff) + + return self + + def __repr__(self): + _str = 'hues: {}\nvalues: {}\ndists: {}\nq_errs: {}' + return _str.format(*np.round(map(np.array, [self.hues, self.values, self.dists, self.q_errs]), 3)) + + def extend(self, (hues, values, dists, q_diffs)): + """Add lists of data in at once""" + self.hues.extend(hues) + self.values.extend(values) + self.dists.extend(dists) + self.q_errs.extend(q_diffs) + + def compute_confidence(self, (value_w, dist_w, q_diff_w), **kwargs): + """Don't try to compute weights with bad data (too few samples)""" + + # Get some value information + v_u = kwargs.get('v_u', 230) + v_sig = kwargs.get('v_sig', 30) + dist_sig = kwargs.get('dist_sig', 70) + q_sig = kwargs.get('q_sig', 1.3) + + # Compute data required before applying weights + value_errs = self._guass(self.values, v_u, v_sig) + dists = self._guass(self.dists, 5, dist_sig) + q_diffs = self._guass(self.q_errs, 0, q_sig) + + # Normalize weights + w_norm = value_w + dist_w + q_diff_w + value_w /= w_norm + dist_w /= w_norm + q_diff_w /= w_norm + + #print "1", hue_std_w, np.round(hue_std, 2), hue_std_w * hue_std + #print "2", value_w, np.round(value_errs, 2), value_w * value_errs + #print "3", dist_w, np.round(dists, 2), dist_w * dists + #print "4", q_diff_w, np.round(q_diffs, 2), q_diff_w * q_diffs + + # Compute normalized confidence + c = value_w * value_errs + dist_w * dists + q_diff_w * q_diffs + return c + + def _guass(self, data, mean, sig): + return np.exp(-np.power(np.array(data).astype(np.float64) - mean, 2) / (2 * np.power(sig, 2))) + + class Colorama(object): def __init__(self): info_topic = camera_root + "/camera_info" - image_topic = camera_root + "/image_raw" # Change this to rect on boat + image_topic = camera_root + "/image_rect_color" self.tf_listener = tf.TransformListener() - - # Map id => [{color, error}, ...] for determining when a color is good - self.colored = {} + + set_odom = lambda msg: setattr(self, "odom", navigator_tools.pose_to_numpy(msg.pose.pose)) + rospy.Subscriber("/odom", Odometry, set_odom) + fprint("Waiting for odom...") + self.odom = rospy.wait_for_message("/odom", Odometry, timeout=3) + fprint("Odom found!", msg_color='green') db_request = rospy.ServiceProxy("/database/requests", ObjectDBQuery) self.make_request = lambda **kwargs: db_request(ObjectDBQueryRequest(**kwargs)) - rospy.Service("/vision/buoy_colorizer", VisionRequest, self.got_request) - self.image_history = ImageHistory(image_topic) # Wait for camera info, and exit if not found @@ -142,144 +222,230 @@ def __init__(self): # These are color mappings from Hue values [0, 360] self.color_map = {'red': np.radians(0), 'yellow': np.radians(60), - 'green': np.radians(120), 'blue': np.radians(240)} + 'green': np.radians(120), 'blue': np.radians(200)} + + # RGB map for database setting + self.database_color_map = {'red': (255, 0, 0), 'yellow': (255, 255, 0), 'green': (0, 255, 0), 'blue': (0, 0, 255)} + + # ========= Some tunable parameters =================================== + self.update_time = 1 # s + + # Observation parameters + self.saturation_reject = 20 # Reject color obs with below this saturation + self.value_reject = 50 # Reject color obs with below this value + self.height_remove = 0.4 # Remove points that are this percent down on the object (%) + # 1 keeps all, .4 removes the bottom 40% + # Update parameters + self.history_length = 100 # How many of each color to keep + self.min_obs = 5 # Need atleast this many observations before making a determination + self.conf_reject = .5 # When to reject an observation based on it's confidence + + # Confidence weights + self.v_factor = 0.6 # Favor value values close to the nominal value + self.v_u = 200 # Mean of norm for variance error + self.v_sig = 60 # Sigma of norm for variance error + self.dist_factor = 0.3 # Favor being closer to the totem + self.dist_sig = 30 # Sigma of distance (m) + self.q_factor = 0.4 # Favor not looking into the sun + self.q_sig = 1.2 # Sigma of norm for quaternion error (rads) + + # Maps id => observations + self.colored = {} - # Some tunable parameters - self.update_time = .5 # s - self.saturation_reject = 50 - self.value_reject = 50 - self.hue_error_reject = .4 # rads + rospy.Timer(ros_t(self.update_time), self.do_observe) - # To decide when to accept color observations - self.error_tolerance = .4 # rads - self.spottings_req = 4 - self.similarity_reject = .1 # If two colors are within this amount of error, reject + def _compute_average_angle(self, angles, weights): + """ + Returns weighted average of angles. + Tends to break down with too many angles 180 degrees of each other - try not to do that. + """ + angles = np.array(angles) + if np.linalg.norm(weights) == 0: + return None - rospy.Timer(ros_t(self.update_time), self.do_update) + w = weights / np.linalg.norm(weights) + s = np.sum(w * np.sin(angles)) + c = np.sum(w * np.cos(angles)) + avg_angle = np.arctan2(s, c) + return avg_angle + + def _get_quaternion_error(self, q, target_q): + """ + Returns an angluar differnce between q and target_q in radians + """ + dq = trns.quaternion_multiply(np.array(target_q), trns.quaternion_inverse(np.array(q))) + return 2 * np.arccos(dq[3]) - def valid_color(self, _id): + def _get_closest_color(self, hue_angle): """ - Either returns valid color or None depending if the color is valid or not + Returns a pair of the most likely color and the radian error associated with that prediction + `hue_angle` := The radian value associated with hue [0, 2*pi] + Colors are found from `self.color_map` """ - if _id not in self.colored: - return None + c = np.cos(hue_angle) + s = np.sin(hue_angle) + error = np.inf + likely_color = 'undef' + for color, h_val in self.color_map.iteritems(): + cg = np.cos(h_val) + sg = np.sin(h_val) + # We need a signed error for some math later on so no absolute value + this_error = np.arctan2(sg*c - cg*s, cg*c + sg*s) + if np.abs(this_error) < np.abs(error): + error = this_error + likely_color = color - object_data = self.colored[_id] - print "object_data", object_data - - # Maps color => [err1, err2, ..] - potential_colors = {} - for data in object_data: - color, err = data['color'], data['err'] - if color not in potential_colors: - potential_colors[color] = [] - - potential_colors[color].append(err) - - potential_colors_avg = {} - for color, errs in potential_colors.iteritems(): - if len(errs) > self.spottings_req: - potential_colors_avg[color] = np.average(errs) - - print "potential_colors_avg", potential_colors_avg - if len(potential_colors_avg) == 1: - # No debate about the color - color = potential_colors_avg.keys()[0] - err = potential_colors_avg[color] - fprint("Only one color found, error: {} (/ {})".format(err, self.error_tolerance)) - if len(potential_colors[color]) > self.spottings_req and err <= self.error_tolerance: - return color - - elif len(potential_colors_avg) > 1: - # More than one color, see if one of them is still valid - fprint("More than one color detected. Removing all.") - self.colored[_id] = [] + fprint("Likely color: {} with an hue error of {} rads.".format(likely_color, np.round(error, 3))) + return [likely_color, error] + + def do_estimate(self, totem_id): + """Calculates best color estimate for a given totem id""" + fprint("DOING ESTIMATE", msg_color='blue') + if totem_id not in self.colored: + fprint("Totem ID {} not found!".format(totem_id), msg_color='red') + return None + + t_color = self.colored[totem_id] + + if len(t_color) < self.min_obs: + fprint("Only {} observations. {} required.".format(len(t_color), self.min_obs), msg_color='red') return None + kwargs = {'v_u': self.v_u, 'v_sig': self.v_sig, 'dist_sig': self.dist_sig, + 'q_factor': self.q_factor, 'q_sig': self.q_sig} + w = t_color.compute_confidence([self.v_factor, self.dist_factor, self.q_factor], **kwargs) + fprint("CONF: {}".format(w)) + if np.mean(w) < self.conf_reject: + return None + + hue_angles = np.radians(np.array(t_color.hues) * 2) + angle = self._compute_average_angle(hue_angles, w) + color = self._get_closest_color(angle) + + fprint("Color: {}".format(color[0])) + return color + def got_request(self, req): # Threading blah blah something unsafe - - color = self.valid_color(req.target_id) - if color is None: - fprint("ID {} is NOT a valid color".format(req.target_id), msg_color='red') - return VisionRequestResponse(found=False) - - return VisionRequestResponse(found=True, color=color) - - def do_update(self, *args): - resp = self.make_request(name='all') - + colored_ids = [_id for _id, color_err in self.colored.iteritems() if self.valid_color(_id) == req.color] + + fprint("Colored IDs: {}".format(colored_ids), msg_color='blue') + print '\n' * 50 + if len(colored_ids) == 0: + return ColorRequestResponse(found=False) + + return ColorRequestResponse(found=True, ids=colored_ids) + + def do_observe(self, *args): + resp = self.make_request(name='totem') + + # If atleast one totem was found start observing if resp.found: - # temp - time_of_marker = rospy.Time.now() - ros_t(.2) # header.stamp not filled out + # Time of the databse request + time_of_marker = resp.objects[0].header.stamp# - ros_t(1) + fprint("Looking for image at {}".format(time_of_marker.to_sec()), msg_color='yellow') image_holder = self.image_history.get_around_time(time_of_marker) if not image_holder.contains_valid_image: + t = self.image_history.newest_image.time + if t is None: + fprint("No images found.") + return + + fprint("No valid image found for t={} ({}) dt: {}".format(time_of_marker.to_sec(), t.to_sec(), (rospy.Time.now() - t).to_sec()), msg_color='red') return - - header = navigator_tools.make_header(frame="/enu", stamp=image_holder.time) #resp.objects[0].header + header = navigator_tools.make_header(frame='/enu', stamp=image_holder.time) image = image_holder.image self.debug.image = np.copy(image) cam_tf = self.camera_model.tfFrame() - fprint("Getting transform between /enu and {}...".format(cam_tf)) - self.tf_listener.waitForTransform("/enu", cam_tf, time_of_marker, ros_t(1)) - t_mat44 = self.tf_listener.asMatrix(cam_tf, header) # homogenous 4x4 transformation matrix + try: + fprint("Getting transform between /enu and {}...".format(cam_tf)) + self.tf_listener.waitForTransform("/enu", cam_tf, time_of_marker, ros_t(1)) + t_mat44 = self.tf_listener.asMatrix(cam_tf, header) + except tf.ExtrapolationException as e: + fprint("TF error found and excepted: {}".format(e)) + return for obj in resp.objects: - if len(obj.points) > 0 and obj.name == "totem": - fprint("{} {}".format(obj.id, "=" * 50)) - - print obj.position - # Get objects position in camera frame and make sure it's in view - object_cam = t_mat44.dot(np.append(navigator_tools.point_to_numpy(obj.position), 1)) - object_px = map(int, self.camera_model.project3dToPixel(object_cam[:3])) - if not self._object_in_frame(object_cam): - continue - - #print object_px - - points_np = np.array(map(navigator_tools.point_to_numpy, obj.points)) - # We dont want points below a certain level - points_np = points_np[points_np[:, 2] > -2.5] - # Shove ones in there to make homogenous points - points_np_homo = np.hstack((points_np, np.ones((points_np.shape[0], 1)))).T - points_cam = t_mat44.dot(points_np_homo).T - points_px = map(self.camera_model.project3dToPixel, points_cam[:, :3]) - #print points_px - - roi = self._get_ROI_from_points(points_px) - color_info = self._get_color_from_ROI(roi, image) # hue, string_color, error - bgr = (0, 0, 0) - - if color_info is not None: - hue, color, error = color_info - c = (int(hue), 255, 255) - hsv = np.array([[c]])[:, :3].astype(np.uint8) - bgr = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)[0, 0] - bgr = tuple(bgr.astype(np.uint8).tolist()) - - if hue is not None: - if obj.id not in self.colored: - self.colored[obj.id] = [] - - self.colored[obj.id].append({'color':color, 'err':error}) - - if self.valid_color(obj.id): - fprint("COLOR IS VALID", msg_color='green') - self.make_request(cmd='{name}={bgr[2]},{bgr[1]},{bgr[0]},{_id}'.format(name=obj.name,_id= obj.id, bgr=bgr)) - - [cv2.circle(self.debug.image, tuple(map(int, p)), 2, bgr, -1) for p in points_px] - cv2.circle(self.debug.image, tuple(object_px), 10, bgr, -1) - font = cv2.FONT_HERSHEY_SIMPLEX - cv2.putText(self.debug.image, str(obj.id), tuple(object_px), font, 1, bgr, 2) - - print '\n' * 2 + if len(obj.points) <= 1: + fprint("No points in object") + continue + + fprint("{} {}".format(obj.id, "=" * 50)) + + # Get object position in px coordinates to determine if it's in frame + object_cam = t_mat44.dot(np.append(p2np(obj.position), 1)) + object_px = map(int, self.camera_model.project3dToPixel(object_cam[:3])) + if not self._object_in_frame(object_cam): + fprint("Object not in frame") + continue + + # Get enu points associated with this totem and remove ones that are too low + points_np = np.array(map(p2np, obj.points)) + height = np.max(points_np[:, 2]) - np.min(points_np[:, 2]) + if height < .1: + # If the height of the object is too small, skip (units are meters) + fprint("Object too small") + continue + + threshold = np.min(points_np[:, 2]) + self.height_remove * height + points_np = points_np[points_np[:, 2] > threshold] + + # Shove ones in there to make homogenous points to get points in image frame + points_np_homo = np.hstack((points_np, np.ones((points_np.shape[0], 1)))).T + points_cam = t_mat44.dot(points_np_homo).T + points_px = map(self.camera_model.project3dToPixel, points_cam[:, :3]) + + [cv2.circle(self.debug.image, tuple(map(int, p)), 2, (255, 255, 255), -1) for p in points_px] + + # Get color information from the points + roi = self._get_ROI_from_points(points_px) + h, s, v = self._get_hsv_from_ROI(roi, image) + + # Compute parameters that go into the confidence + boat_q = self.odom[1] + target_q = self._get_solar_angle() + q_err = self._get_quaternion_error(boat_q, target_q) + + dist = np.linalg.norm(self.odom[0] - p2np(obj.position)) + + fprint("H: {}, S: {}, V: {}".format(h, s, v)) + fprint("q_err: {}, dist: {}".format(q_err, dist)) + + # Add to database and setup debug image + if s < self.saturation_reject or v < self.value_reject: + err_msg = "The colors aren't expressive enough s: {} ({}) v: {} ({}). Rejecting." + fprint(err_msg.format(s, self.saturation_reject, v, self.value_reject), msg_color='red') + + else: + if obj.id not in self.colored: + self.colored[obj.id] = Observation() + + # Add this observation in + self.colored[obj.id] += h, v, dist, q_err + print self.colored[obj.id] + + rgb = (0, 0, 0) + color = self.do_estimate(obj.id) + # Do we have a valid color estimate + if color: + fprint("COLOR IS VALID", msg_color='green') + rgb = self.database_color_map[color[0]] + + cmd = '{name}={rgb[0]},{rgb[1]},{rgb[2]},{_id}' + self.make_request(cmd=cmd.format(name=obj.name,_id=obj.id, rgb=rgb)) + + bgr = rgb[::-1] + cv2.circle(self.debug.image, tuple(object_px), 10, bgr, -1) + font = cv2.FONT_HERSHEY_SIMPLEX + cv2.putText(self.debug.image, str(obj.id), tuple(object_px), font, 1, bgr, 2) + + def _get_solar_angle(self): + return [0, 0, 0, 1] def _get_ROI_from_points(self, image_points): - # Probably will return a set of contours - cnt = np.array(image_points).astype(np.float32) - + cnt = np.array([image_points]).astype(np.float32) rect = cv2.minAreaRect(cnt) box = cv2.cv.BoxPoints(rect) box = np.int0(box) @@ -287,65 +453,54 @@ def _get_ROI_from_points(self, image_points): cv2.drawContours(self.debug.image, [box], 0, (0, 0, 255), 2) return box - def _get_color_from_ROI(self, roi, img): + def _get_hsv_from_ROI(self, roi, img): mask = np.zeros(img.shape[:2], np.uint8) cv2.drawContours(mask, [roi], 0, 255, -1) bgr = cv2.mean(img, mask) # We have to treat that bgr value as a 3d array to get cvtColor to work bgr = np.array([[bgr]])[:, :3].astype(np.uint8) h, s, v = cv2.cvtColor(bgr, cv2.COLOR_BGR2HSV)[0, 0] + + return h, s, v + # Now check that s and v are in a good range if s < self.saturation_reject or v < self.value_reject: - fprint("The colors aren't expressive enough. Rejecting.", msg_color='red') + err_msg = "The colors aren't expressive enough s: {} ({}) v: {} ({}). Rejecting." + fprint(err_msg.format(s, self.saturation_reject, v, self.value_reject), msg_color='red') return None # Compute hue error in SO2 hue_angle = np.radians(h * 2) - c = np.cos(hue_angle) - s = np.sin(hue_angle) - error = np.inf - likely_color = 'bazinga' - for color, h_val in self.color_map.iteritems(): - cg = np.cos(h_val) - sg = np.sin(h_val) - this_error = np.abs(np.arctan2(sg*c - cg*s, cg*c + sg*s)) - if this_error < error: - error = this_error - likely_color = color + + # Display the current values + font = cv2.FONT_HERSHEY_SIMPLEX + cv2.putText(self.debug.image, "h_val: {}".format(np.degrees(hue_angle)), + tuple(roi[1]), font, 1, (255, 255, 255), 2) + + likely_color, error = self.get_closest_color(hue_angle) if error > self.hue_error_reject: - fprint("Closest color was {} with an error of {} rads (> {}) Rejecting.".format(likely_color, np.round(error, 3), + fprint("Closest color was {} with an error of {} rads (> {}). Rejecting.".format(likely_color, np.round(error, 3), self.hue_error_reject), msg_color='red') return None fprint("Likely color: {} with an hue error of {} rads.".format(likely_color, np.round(error, 3))) - return [np.degrees(self.color_map[likely_color]) / 2.0, likely_color, error] + return [likely_color, error] def _object_in_frame(self, object_point): """ Returns if the object is in frame given the centroid of the object. `object_point` should be in the camera_model's frame. """ - print object_point if object_point[2] < 0: return False px = np.array(self.camera_model.project3dToPixel(object_point)) resoultion = self.camera_model.fullResolution() - if np.any([0,0] > px) or np.any(px > resoultion): - return False - - return True - + return not (np.any([0, 0] > px) or np.any(px > resoultion)) -#rospy.Subscriber("/velodyne_points", pc2.PointCloud2, cb) -# req = rospy.ServiceProxy("/database/requests", ObjectDBQuery) -# resp = req(ObjectDBQueryRequest(name="all")) -# if resp.found: -# for db_object in resp.objects: -# if len(db_object.points) > 0: -# print db_object.points -# print -c = Colorama() -rospy.spin() +if __name__ == "__main__": + rospy.init_node("database_colorer") + c = Colorama() + rospy.spin() From 2076c44ca13ce5c031736850ffb231b5e90c0f34 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Tue, 29 Nov 2016 00:33:13 -0500 Subject: [PATCH 189/267] DETECT DELIVER: Catch more pottential errors in camera_to_lidar --- .../nodes/camera_to_lidar/CameraLidarTransformer.cpp | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp index 102fe7ef..e21ea6c9 100644 --- a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp @@ -46,6 +46,12 @@ bool CameraLidarTransformer::transformServiceCallback(navigator_msgs::CameraToLi res.error = "NO CAMERA INFO"; return true; } + if (camera_info.header.frame_id != req.header.frame_id) + { + res.success = false; + res.error = "DIFFERENT FRAME ID THAN SUBSCRIBED CAMERA"; + return true; + } visualization_msgs::MarkerArray markers; sensor_msgs::PointCloud2ConstPtr scloud = lidarCache.getElemAfterTime(req.header.stamp); if (!scloud) { @@ -53,6 +59,12 @@ bool CameraLidarTransformer::transformServiceCallback(navigator_msgs::CameraToLi res.error = navigator_msgs::CameraToLidarTransform::Response::CLOUD_NOT_FOUND; return true; } + if (!tfBuffer.canTransform(req.header.frame_id, "velodyne", ros::Time(0), ros::Duration(1))) + { + res.success = false; + res.error = "NO TRANSFORM"; + return true; + } geometry_msgs::TransformStamped transform = tfBuffer.lookupTransform(req.header.frame_id, "velodyne", ros::Time(0)); sensor_msgs::PointCloud2 cloud_transformed; tf2::doTransform(*scloud, cloud_transformed, transform); From 9aa96ff8945d92e5caa44fa1145a58bc947e2680 Mon Sep 17 00:00:00 2001 From: Tess Date: Sun, 11 Dec 2016 14:05:24 -1000 Subject: [PATCH 190/267] EXPLORE_MISSION: - adds the new explore mission that uses the camera object classification - changes the roi tool for pause and unpause, instead of having to hold down the space bar --- .../nodes/object_classifier.py | 52 +++++++++++++------ .../object_classification/lidar_to_image.py | 20 ++++--- .../object_classification/roi_generator.py | 14 +++-- 3 files changed, 57 insertions(+), 29 deletions(-) diff --git a/perception/navigator_vision/nodes/object_classifier.py b/perception/navigator_vision/nodes/object_classifier.py index aa31fa3d..8312282f 100755 --- a/perception/navigator_vision/nodes/object_classifier.py +++ b/perception/navigator_vision/nodes/object_classifier.py @@ -2,39 +2,59 @@ import txros from txros import util from twisted.internet import reactor, defer -from object_classification import LidarToImage, HOGDescriptor, SVMClassifier +from object_classification import LidarToImage +from navigator_msgs.srv import CameraDBQuery, CameraDBQueryResponse +from object_classification import Config +import pickle +import os.path class ObjectClassifier(object): - def __init__(self, nh, descriptor, classifier): + def __init__(self, nh, classifier, config): self.nh = nh - self.descriptor = descriptor # load up the trained svm via pickle self.classifier = classifier + self.config = config @util.cancellableInlineCallbacks def init_(self): - self.lidar_to_image = yield LidarToImage(self.nh, self.image_cb).init_() + yield self.nh.advertise_service("/camera_database/requests", CameraDBQuery, self.database_request) + self.lidar_to_image = yield LidarToImage(self.nh).init_() defer.returnValue(self) - - def image_cb(self, imgs): - for img in imgs: - objid = img["id"] - img = img["img"] - desc = self.descriptor.get_descriptor(img) - classification = self.classify(desc) - print "id", objid, classification + @util.cancellableInlineCallbacks + def database_request(self, req): + id_ = req.id + name = req.name + img, rois = yield self.lidar_to_image.get_object_rois(name=id_) + resp = CameraDBQueryResponse() + if img is None or len(rois) == 0: + resp.found = False + defer.returnValue(resp) + + desc = self.config.descriptor.get_descriptor(rois[0]) + clss, prob = self.classifier.classify(desc) + clss = self.config.to_class(clss) + + if name == clss and prob < .8: + resp.found = True + else: + resp.found = False + defer.returnValue(resp) @util.cancellableInlineCallbacks def main(): nh = yield txros.NodeHandle.from_argv("object_classifier") - d = HOGDescriptor() - cl = SVMClassifier() - oc = yield ObjectClassifier(nh, d, cl).init_() - + config = Config() + class_file = os.path.abspath(__file__) + class_file = class_file.split("nodes")[0] + class_file += class_file + "/object_classification/train.p" + print class_file + + cl = pickle.load(open(class_file, 'rb')) + oc = yield ObjectClassifier(nh, cl, config).init_() if __name__ == "__main__": reactor.callWhenRunning(main) diff --git a/perception/navigator_vision/object_classification/lidar_to_image.py b/perception/navigator_vision/object_classification/lidar_to_image.py index 70833357..1fbdfd26 100644 --- a/perception/navigator_vision/object_classification/lidar_to_image.py +++ b/perception/navigator_vision/object_classification/lidar_to_image.py @@ -2,7 +2,7 @@ from twisted.internet import defer from txros import util, tf import navigator_tools as nt -from navigator_tools import Debug +from navigator_tools import CvDebug import sys from collections import deque from cv_bridge import CvBridge @@ -35,7 +35,7 @@ def __init__(self, nh, training=False, classes=None, dist=50): self.busy = False self.c = 0 - self.debug = Debug(nh) + self.debug = CvDebug(nh) @util.cancellableInlineCallbacks def init_(self, cam="r"): @@ -64,9 +64,12 @@ def _info_cb(self, info): self.cam_info = info @util.cancellableInlineCallbacks - def get_all_object_rois(self): + def get_object_rois(self, name=None): req = ObjectDBQueryRequest() - req.name = 'all' + if name is None: + req.name = 'all' + else: + req.name = name obj = yield self._database(req) if obj is None or not obj.found: defer.returnValue((None, None)) @@ -75,7 +78,11 @@ def get_all_object_rois(self): if ros_img is None: defer.returnValue((None, None)) img = self.bridge.imgmsg_to_cv2(ros_img, "mono8") - for o in obj.objects: + objects = obj.objects + if name is not None: + objects = obj.objects[0] + + for o in objects: if o.id not in self.id_to_perist: self.id_to_perist[o.id] = [] ppoints = self.id_to_perist[o.id] @@ -100,11 +107,8 @@ def get_all_object_rois(self): continue if ymin < 0: ymin = 0 - print "bbox", bbox roi = img[ymin:ymax, xmin:xmax] - print "preshape", roi.shape roi = self._resize_image(roi) - print "postshape", roi.shape ret_obj = {} ret_obj["id"] = o.id ret_obj["points"] = points diff --git a/perception/navigator_vision/object_classification/roi_generator.py b/perception/navigator_vision/object_classification/roi_generator.py index 705f9429..713fbfed 100644 --- a/perception/navigator_vision/object_classification/roi_generator.py +++ b/perception/navigator_vision/object_classification/roi_generator.py @@ -92,6 +92,7 @@ def go(self): image = self.crawl.next() self.image = self.bridge.imgmsg_to_cv2(image, 'bgr8') doing = False + pause = True while True: if doing: continue @@ -99,7 +100,9 @@ def go(self): k = chr(cv2.waitKey(50) & 0xFF) if k == 'q': break - elif k == ' ' and not self.rclk: + elif k == ' ': + pause = not pause + elif not pause and not self.rclk: try: image = self.crawl.next() self.image = self.bridge.imgmsg_to_cv2(image, 'bgr8') @@ -142,7 +145,7 @@ def mouse_roi(self, event, x, y, flags, params): self.roi_to_tracker[name].init(self.image, r) return if self.rclk: - if event == cv2.EVENT_LBUTTONDOWN and flags == 48: # pressing shift + if event == cv2.EVENT_LBUTTONDOWN and flags == 16: # pressing shift, remove box if len(self.rects) > 0: r = min(self.rects.items(), key=lambda rect: np.linalg.norm( np.array([rect[1][0], rect[1][1]]) - np.array([x, y]))) @@ -150,7 +153,7 @@ def mouse_roi(self, event, x, y, flags, params): self.rects.pop(r) self.roi_to_tracker.pop(r) self.sel_rect = None - elif event == cv2.EVENT_LBUTTONDOWN and flags == 40: # pressing cntrl + elif event == cv2.EVENT_LBUTTONDOWN and flags == 8: # pressing cntrl, add box name = raw_input('Enter name of object: ') if name == "skip": return @@ -159,8 +162,9 @@ def mouse_roi(self, event, x, y, flags, params): elif event == cv2.EVENT_LBUTTONDOWN: self.lclk = not self.lclk if not self.lclk: - r = self.rects[self.sel_rect] - self.sel_rect = None + if self.sel_rect is not None: + r = self.rects[self.sel_rect] + self.sel_rect = None else: if len(self.rects) > 0: self.sel_rect = min(self.rects.items(), key=lambda rect: np.linalg.norm( From 87076217848349ce505631deadea5324330c8469 Mon Sep 17 00:00:00 2001 From: Tess Date: Sun, 11 Dec 2016 22:31:28 -1000 Subject: [PATCH 191/267] EXPLORE_MISSION: - creates an explore mission that confirms the identity of the object before beginning the mission, and locks that object into the database --- .../nodes/object_classifier.py | 30 ++++++++++++++----- .../object_classification/__init__.py | 4 ++- .../object_classification/depickler.py | 10 +++++++ .../object_classification/lidar_to_image.py | 8 +++-- .../object_classification.py | 2 +- 5 files changed, 42 insertions(+), 12 deletions(-) create mode 100644 perception/navigator_vision/object_classification/depickler.py diff --git a/perception/navigator_vision/nodes/object_classifier.py b/perception/navigator_vision/nodes/object_classifier.py index 8312282f..83c19b31 100755 --- a/perception/navigator_vision/nodes/object_classifier.py +++ b/perception/navigator_vision/nodes/object_classifier.py @@ -4,9 +4,11 @@ from twisted.internet import reactor, defer from object_classification import LidarToImage from navigator_msgs.srv import CameraDBQuery, CameraDBQueryResponse -from object_classification import Config -import pickle +from object_classification import Config, depicklify +from navigator_tools import CvDebug, fprint import os.path +import numpy as np +import cv2 class ObjectClassifier(object): @@ -16,6 +18,7 @@ def __init__(self, nh, classifier, config): # load up the trained svm via pickle self.classifier = classifier self.config = config + self.debug = CvDebug(nh=nh) @util.cancellableInlineCallbacks def init_(self): @@ -27,19 +30,31 @@ def init_(self): def database_request(self, req): id_ = req.id name = req.name - img, rois = yield self.lidar_to_image.get_object_rois(name=id_) + img, rois = yield self.lidar_to_image.get_object_rois(name=str(id_)) resp = CameraDBQueryResponse() if img is None or len(rois) == 0: + fprint("Incorrect from reading from the lidar", msg_color='red') resp.found = False defer.returnValue(resp) - desc = self.config.descriptor.get_descriptor(rois[0]) + r = rois[0] + roi = r["img"] + bbox = r["bbox"] + draw = img.copy() + + desc = self.config.descriptor.get_descriptor(roi) clss, prob = self.classifier.classify(desc) clss = self.config.to_class(clss) - if name == clss and prob < .8: + cv2.rectangle(draw, (bbox[0], bbox[1]), (bbox[0] + bbox[2], bbox[1] + bbox[3]), (0, 0, 255)) + cv2.putText(draw, clss + ": " + str(np.round(prob)), (bbox[0], bbox[1]), 1, 1.0, (0, 255, 0)) + self.debug.add_image(draw, "stuff", topic="panda") + + if name == clss and prob > .8: + fprint("Object Found", msg_color='green') resp.found = True else: + fprint("Object missclassified with class {}, prob {}".format(clss, prob), msg_color='red') resp.found = False defer.returnValue(resp) @@ -50,10 +65,9 @@ def main(): config = Config() class_file = os.path.abspath(__file__) class_file = class_file.split("nodes")[0] - class_file += class_file + "/object_classification/train.p" - print class_file + class_file = class_file + "object_classification/train.p" - cl = pickle.load(open(class_file, 'rb')) + cl = depicklify(class_file) oc = yield ObjectClassifier(nh, cl, config).init_() if __name__ == "__main__": diff --git a/perception/navigator_vision/object_classification/__init__.py b/perception/navigator_vision/object_classification/__init__.py index 9e6ed716..af106f01 100644 --- a/perception/navigator_vision/object_classification/__init__.py +++ b/perception/navigator_vision/object_classification/__init__.py @@ -1,3 +1,5 @@ from lidar_to_image import LidarToImage from SVM_classifier import SVMClassifier -from HOG_descriptor import HOGDescriptor \ No newline at end of file +from HOG_descriptor import HOGDescriptor +from object_classification import * +from depickler import depicklify \ No newline at end of file diff --git a/perception/navigator_vision/object_classification/depickler.py b/perception/navigator_vision/object_classification/depickler.py new file mode 100644 index 00000000..e7245858 --- /dev/null +++ b/perception/navigator_vision/object_classification/depickler.py @@ -0,0 +1,10 @@ +import pickle +import sys +import SVM_classifier + + +def depicklify(filename): + sys.modules['SVM_classifier'] = SVM_classifier + cl = pickle.load(open(filename, 'rb')) + del sys.modules['SVM_classifier'] + return cl diff --git a/perception/navigator_vision/object_classification/lidar_to_image.py b/perception/navigator_vision/object_classification/lidar_to_image.py index 1fbdfd26..c09706c1 100644 --- a/perception/navigator_vision/object_classification/lidar_to_image.py +++ b/perception/navigator_vision/object_classification/lidar_to_image.py @@ -71,18 +71,23 @@ def get_object_rois(self, name=None): else: req.name = name obj = yield self._database(req) + + print name + print obj.found if obj is None or not obj.found: defer.returnValue((None, None)) rois = [] ros_img = yield self._get_closest_image(obj.objects[0].header.stamp) if ros_img is None: + print "gahh" defer.returnValue((None, None)) img = self.bridge.imgmsg_to_cv2(ros_img, "mono8") objects = obj.objects if name is not None: - objects = obj.objects[0] + objects = [obj.objects[0]] for o in objects: + print o.name if o.id not in self.id_to_perist: self.id_to_perist[o.id] = [] ppoints = self.id_to_perist[o.id] @@ -186,7 +191,6 @@ def _get_closest_image(self, time): if diff < min_diff: min_diff = diff min_img = img - print min_diff.to_sec() if min_img is not None: defer.returnValue(min_img) yield self.nh.sleep(.3) diff --git a/perception/navigator_vision/object_classification/object_classification.py b/perception/navigator_vision/object_classification/object_classification.py index 7e4ae634..3382e109 100644 --- a/perception/navigator_vision/object_classification/object_classification.py +++ b/perception/navigator_vision/object_classification/object_classification.py @@ -95,7 +95,7 @@ def __init__(self, roi_file, output): def train(self): descs = [] classify = [] - imgs, roi_val, rois = self.config.get_imgs(self.roi_file) + img, roi_val, rois = self.config.get_imgs(self.roi_file) for r in rois: roi, clss = r desc = self.config.descriptor.get_descriptor(roi) From c9a31c748ae93213e0b15552d3ff612ba79b9b1d Mon Sep 17 00:00:00 2001 From: Tess Date: Mon, 12 Dec 2016 22:35:49 -1000 Subject: [PATCH 192/267] EXPLORE MISSION: updates for pr --- .../navigator_vision/object_classification/lidar_to_image.py | 1 - .../object_classification/object_classification.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/perception/navigator_vision/object_classification/lidar_to_image.py b/perception/navigator_vision/object_classification/lidar_to_image.py index c09706c1..c1ce1b30 100644 --- a/perception/navigator_vision/object_classification/lidar_to_image.py +++ b/perception/navigator_vision/object_classification/lidar_to_image.py @@ -79,7 +79,6 @@ def get_object_rois(self, name=None): rois = [] ros_img = yield self._get_closest_image(obj.objects[0].header.stamp) if ros_img is None: - print "gahh" defer.returnValue((None, None)) img = self.bridge.imgmsg_to_cv2(ros_img, "mono8") objects = obj.objects diff --git a/perception/navigator_vision/object_classification/object_classification.py b/perception/navigator_vision/object_classification/object_classification.py index 3382e109..7e4ae634 100644 --- a/perception/navigator_vision/object_classification/object_classification.py +++ b/perception/navigator_vision/object_classification/object_classification.py @@ -95,7 +95,7 @@ def __init__(self, roi_file, output): def train(self): descs = [] classify = [] - img, roi_val, rois = self.config.get_imgs(self.roi_file) + imgs, roi_val, rois = self.config.get_imgs(self.roi_file) for r in rois: roi, clss = r desc = self.config.descriptor.get_descriptor(roi) From 0b39e50cfc6e1dd467ceafacbcf533d1691769d8 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Mon, 12 Dec 2016 04:06:47 -0500 Subject: [PATCH 193/267] IDENTIFY DOCK: untested mission for docking using dd vision service has two mission options, one relying on a perception node that hasn't been written to identify the location of bays, and another simply moving in front of symbols like in detect deliver, assuming they are in the center dock progress rebase docking bug fixes --- .../navigator_vision/nodes/shape_identification/main.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/perception/navigator_vision/nodes/shape_identification/main.cpp b/perception/navigator_vision/nodes/shape_identification/main.cpp index fb8d68e0..f48b75e2 100644 --- a/perception/navigator_vision/nodes/shape_identification/main.cpp +++ b/perception/navigator_vision/nodes/shape_identification/main.cpp @@ -45,7 +45,9 @@ class ShooterVision { vision->init(); nh_.param("symbol_camera", camera_topic, "/right/right/image_raw"); - runService = nh_.advertiseService("/vision/get_shapes/switch", &ShooterVision::runCallback, this); + std::string get_shapes_topic; + nh_.param("get_shapes_topic",get_shapes_topic,"get_shapes"); + runService = nh_.advertiseService(get_shapes_topic+"/switch", &ShooterVision::runCallback, this); roiService = nh_.advertiseService("setROI", &ShooterVision::roiServiceCallback, this); //#ifdef DO_DEBUG From cf350c07d7d2d8cc55dd4a7266fb087f51550f53 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Tue, 13 Dec 2016 00:12:46 -0500 Subject: [PATCH 194/267] PINGER: exit through gate after circling docking last minute yaml --- .../nodes/shape_identification/ShapeTracker.cpp | 5 ++++- .../nodes/shape_identification/TrackedShape.cpp | 9 ++++++--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp b/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp index e5622573..2f18cac1 100644 --- a/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp +++ b/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp @@ -30,7 +30,10 @@ void ShapeTracker::addShapes(navigator_msgs::DockShapes& newShapes) { addShape(shape); } - shapes.erase(std::remove_if(shapes.begin(), shapes.end(), [] (TrackedShape& s) {return s.isStale();}),shapes.end()); + shapes.erase(std::remove_if(shapes.begin(), shapes.end(), [] (TrackedShape& s) { + // if (s.isStale()) printf("Shape is stale, removing\n"); + return s.isStale(); + }),shapes.end()); // ~printf("Shapes size %d\n",shapes.size()); navigator_msgs::DockShapes found_shapes; for (auto &tracked : shapes) diff --git a/perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp b/perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp index 4c71d345..12c52847 100644 --- a/perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp +++ b/perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp @@ -19,7 +19,9 @@ void TrackedShape::init(ros::NodeHandle& nh) nh.param("tracking/max_distance_gap",MAX_DISTANCE,15); double seconds; nh.param("tracking/max_seen_gap_seconds", seconds, 0.5); - MAX_TIME_GAP = ros::Duration(0, seconds * 1E9); + double partial_secs = fmod(seconds, 1); + seconds -= partial_secs; + MAX_TIME_GAP = ros::Duration(seconds, partial_secs * 1E9); } double TrackedShape::centerDistance(navigator_msgs::DockShape& a, navigator_msgs::DockShape& b) { @@ -31,12 +33,12 @@ bool TrackedShape::update(navigator_msgs::DockShape& s) double distance = centerDistance(latest,s); if (distance > MAX_DISTANCE) { - // ~printf(" %s %s distance too big to %s %s DISTANCE = %f \n",latest.Color.c_str(),latest.Shape.c_str(),s.Color.c_str(),s.Shape.c_str(),distance); + // printf(" %s %s distance too big to %s %s DISTANCE = %f \n",latest.Color.c_str(),latest.Shape.c_str(),s.Color.c_str(),s.Shape.c_str(),distance); return false; } if (tooOld(s)) { - // ~printf(" %s %s too old from %s %s\n",latest.Color.c_str(),latest.Shape.c_str(),s.Color.c_str(),s.Shape.c_str()); + // printf(" %s %s too old from %s %s\n",latest.Color.c_str(),latest.Shape.c_str(),s.Color.c_str(),s.Shape.c_str()); return false; } if (latest.Color != s.Color) @@ -69,6 +71,7 @@ bool TrackedShape::isStale() } bool TrackedShape::isReady() { + // printf("Count=%d, TooOld=%d\n", count, isStale()); return count >= MIN_COUNT && !isStale(); } navigator_msgs::DockShape TrackedShape::get() From 140c0b3f4b0bf74afe56b0952fa6ffd0370fb5a5 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Thu, 15 Dec 2016 03:27:05 -0500 Subject: [PATCH 195/267] PERCEPTION: Improve normal approx in shapefinder --- .../camera_to_lidar/CameraLidarTransformer.cpp | 14 +++++++++++++- .../camera_to_lidar/CameraLidarTransformer.hpp | 1 + 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp index e21ea6c9..ceba6808 100644 --- a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp @@ -1,5 +1,6 @@ #include "CameraLidarTransformer.hpp" +ros::Duration CameraLidarTransformer::MAX_TIME_ERR = ros::Duration(0,6E7); CameraLidarTransformer::CameraLidarTransformer() : nh(ros::this_node::getName()), tfBuffer(), @@ -53,7 +54,18 @@ bool CameraLidarTransformer::transformServiceCallback(navigator_msgs::CameraToLi return true; } visualization_msgs::MarkerArray markers; - sensor_msgs::PointCloud2ConstPtr scloud = lidarCache.getElemAfterTime(req.header.stamp); + std::vector nearbyLidar = lidarCache.getInterval(req.header.stamp-MAX_TIME_ERR, req.header.stamp+MAX_TIME_ERR); + sensor_msgs::PointCloud2ConstPtr scloud; + ros::Duration minErr = ros::Duration(5000,0); + for (auto& pc : nearbyLidar) + { + ros::Duration thisErr = pc->header.stamp - req.header.stamp; + if (thisErr < minErr) + { + scloud = pc; + minErr = thisErr; + } + } if (!scloud) { res.success = false; res.error = navigator_msgs::CameraToLidarTransform::Response::CLOUD_NOT_FOUND; diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp index 97eb8f9e..2f8e2da9 100644 --- a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp +++ b/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp @@ -54,6 +54,7 @@ class CameraLidarTransformer void cameraInfoCallback(const sensor_msgs::CameraInfo info); void drawPoint(cv::Mat& mat, cv::Point2d& p, cv::Scalar color=cv::Scalar(0, 0, 255)); bool transformServiceCallback(navigator_msgs::CameraToLidarTransform::Request &req,navigator_msgs::CameraToLidarTransform::Response &res); + static ros::Duration MAX_TIME_ERR; #ifdef DO_ROS_DEBUG ros::Publisher pubMarkers; image_transport::ImageTransport image_transport; From d2f63d04b119a336898771d4cdbc7d4f7fbda17e Mon Sep 17 00:00:00 2001 From: mattlangford Date: Tue, 13 Dec 2016 03:37:34 -0500 Subject: [PATCH 196/267] BOUNDS: Add gps coordinates for the channel we compete in --- utils/navigator_tools/cfg/channel.yaml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 utils/navigator_tools/cfg/channel.yaml diff --git a/utils/navigator_tools/cfg/channel.yaml b/utils/navigator_tools/cfg/channel.yaml new file mode 100644 index 00000000..aad9510c --- /dev/null +++ b/utils/navigator_tools/cfg/channel.yaml @@ -0,0 +1,25 @@ +!!python/object/new:dynamic_reconfigure.encoding.Config +dictitems: + enforce: true + groups: !!python/object/new:dynamic_reconfigure.encoding.Config + dictitems: + enforce: true + groups: !!python/object/new:dynamic_reconfigure.encoding.Config + state: [] + id: 0 + lla_1: (21.312393, -157.889992) + lla_2: (21.310304, -157.891569) + lla_3: (21.303058, -157.888007) + lla_4: (21.304017, -157.885561) + name: Default + parameters: !!python/object/new:dynamic_reconfigure.encoding.Config + state: [] + parent: 0 + state: true + type: '' + state: [] + lla_1: (21.312393, -157.889992) + lla_2: (21.310304, -157.891569) + lla_3: (21.303058, -157.888007) + lla_4: (21.304017, -157.885561) +state: [] From fb1c9bcd25762efad1b45c7a5b621639200a9027 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Wed, 14 Dec 2016 15:00:49 -0500 Subject: [PATCH 197/267] COLORAMA: Add debug message --- .../navigator_vision/nodes/database_color.py | 59 ++++++++++++++----- 1 file changed, 43 insertions(+), 16 deletions(-) diff --git a/perception/navigator_vision/nodes/database_color.py b/perception/navigator_vision/nodes/database_color.py index f1b02581..7052a20a 100755 --- a/perception/navigator_vision/nodes/database_color.py +++ b/perception/navigator_vision/nodes/database_color.py @@ -12,6 +12,7 @@ import sensor_msgs.point_cloud2 as pc2 from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest, ColorRequest, ColorRequestResponse +from navigator_msgs.msg import ColoramaDebug from sensor_msgs.msg import CameraInfo, Image from nav_msgs.msg import Odometry @@ -121,6 +122,14 @@ def publish(self, *args): class Observation(object): history_length = 100 # Default to 100 + + @classmethod + def as_message(self): + msg = ColoramaDebug() + msg.num_observations = len(self.hues) + + msg.mean_value = np.mean(self.values) + msg.hues = self.hues def __init__(self): self.hues = deque([], maxlen=self.history_length) @@ -154,7 +163,7 @@ def extend(self, (hues, values, dists, q_diffs)): self.dists.extend(dists) self.q_errs.extend(q_diffs) - def compute_confidence(self, (value_w, dist_w, q_diff_w), **kwargs): + def compute_confidence(self, (value_w, dist_w, q_diff_w), get_conf=False, **kwargs): """Don't try to compute weights with bad data (too few samples)""" # Get some value information @@ -164,9 +173,10 @@ def compute_confidence(self, (value_w, dist_w, q_diff_w), **kwargs): q_sig = kwargs.get('q_sig', 1.3) # Compute data required before applying weights - value_errs = self._guass(self.values, v_u, v_sig) - dists = self._guass(self.dists, 5, dist_sig) - q_diffs = self._guass(self.q_errs, 0, q_sig) + guass = self._guass + value_errs = guass(self.values, v_u, v_sig) + dists = guass(self.dists, 5, dist_sig) + q_diffs = guass(self.q_errs, 0, q_sig) # Normalize weights w_norm = value_w + dist_w + q_diff_w @@ -181,6 +191,9 @@ def compute_confidence(self, (value_w, dist_w, q_diff_w), **kwargs): # Compute normalized confidence c = value_w * value_errs + dist_w * dists + q_diff_w * q_diffs + if get_conf: + return c, [value_errs, dists, q_diffs] + return c def _guass(self, data, mean, sig): @@ -193,11 +206,14 @@ def __init__(self): image_topic = camera_root + "/image_rect_color" self.tf_listener = tf.TransformListener() - + self.status_pub = rospy.Publisher("/database_color_status", ColoramaDebug, queue_size=1) + + self.odom = None set_odom = lambda msg: setattr(self, "odom", navigator_tools.pose_to_numpy(msg.pose.pose)) rospy.Subscriber("/odom", Odometry, set_odom) fprint("Waiting for odom...") - self.odom = rospy.wait_for_message("/odom", Odometry, timeout=3) + while self.odom is None and not rospy.is_shutdown(): + rospy.sleep(1) fprint("Odom found!", msg_color='green') db_request = rospy.ServiceProxy("/database/requests", ObjectDBQuery) @@ -206,13 +222,14 @@ def __init__(self): self.image_history = ImageHistory(image_topic) # Wait for camera info, and exit if not found - try: - fprint("Waiting for camera info on: '{}'".format(info_topic)) - camera_info_msg = rospy.wait_for_message(info_topic, CameraInfo, timeout=3) - except rospy.exceptions.ROSException: - fprint("Camera info not found! Terminating.", msg_color="red") - rospy.signal_shutdown("Camera not found!") - return + fprint("Waiting for camera info on: '{}'".format(info_topic)) + while not rospy.is_shutdown(): + try: + camera_info_msg = rospy.wait_for_message(info_topic, CameraInfo, timeout=3) + except rospy.exceptions.ROSException: + rospy.sleep(1) + continue + break fprint("Camera info found!", msg_color="green") self.camera_model = PinholeCameraModel() @@ -246,7 +263,7 @@ def __init__(self): self.v_sig = 60 # Sigma of norm for variance error self.dist_factor = 0.3 # Favor being closer to the totem self.dist_sig = 30 # Sigma of distance (m) - self.q_factor = 0.4 # Favor not looking into the sun + self.q_factor = 0 # Favor not looking into the sun self.q_sig = 1.2 # Sigma of norm for quaternion error (rads) # Maps id => observations @@ -314,7 +331,7 @@ def do_estimate(self, totem_id): kwargs = {'v_u': self.v_u, 'v_sig': self.v_sig, 'dist_sig': self.dist_sig, 'q_factor': self.q_factor, 'q_sig': self.q_sig} - w = t_color.compute_confidence([self.v_factor, self.dist_factor, self.q_factor], **kwargs) + w, weights = t_color.compute_confidence([self.v_factor, self.dist_factor, self.q_factor], True, **kwargs) fprint("CONF: {}".format(w)) if np.mean(w) < self.conf_reject: return None @@ -322,7 +339,17 @@ def do_estimate(self, totem_id): hue_angles = np.radians(np.array(t_color.hues) * 2) angle = self._compute_average_angle(hue_angles, w) color = self._get_closest_color(angle) - + + msg = t_color.as_message + msg.id = totem_id + msg.confidence = w + msg.labels = ["value_errs", "dists", "q_diffs"] + msg.weights = weights + msg.color = colors[0] + msg.est_hues = angle * 2 + msg.hues = np.array(t_color.hues) * 2 + self.status_pub.publish(msg) + fprint("Color: {}".format(color[0])) return color From b3574dcf4d4db693578f6d84a6b0c24602809715 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Sat, 17 Dec 2016 02:58:37 -0500 Subject: [PATCH 198/267] BOUNDS: Add updated bounds files --- utils/navigator_tools/cfg/course_a.yaml | 24 ++++++++++++++++++++++++ utils/navigator_tools/cfg/course_b.yaml | 24 ++++++++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100644 utils/navigator_tools/cfg/course_a.yaml create mode 100644 utils/navigator_tools/cfg/course_b.yaml diff --git a/utils/navigator_tools/cfg/course_a.yaml b/utils/navigator_tools/cfg/course_a.yaml new file mode 100644 index 00000000..efcc6894 --- /dev/null +++ b/utils/navigator_tools/cfg/course_a.yaml @@ -0,0 +1,24 @@ +!!python/object/new:dynamic_reconfigure.encoding.Config +dictitems: + groups: !!python/object/new:dynamic_reconfigure.encoding.Config + dictitems: + enforce: true + groups: !!python/object/new:dynamic_reconfigure.encoding.Config + state: [] + id: 0 + lla_1: (21.31139511990995, -157.88932101141714) + lla_2: (21.310799034961416, -157.8908255966243) + lla_3: (21.30948497606932, -157.89037273981896) + lla_4: (21.310017869308005, -157.88873371841683) + name: Default + parameters: !!python/object/new:dynamic_reconfigure.encoding.Config + state: [] + parent: 0 + state: true + type: '' + state: [] + lla_1: (21.31139511990995, -157.88932101141714) + lla_2: (21.310799034961416, -157.8908255966243) + lla_3: (21.30948497606932, -157.89037273981896) + lla_4: (21.310017869308005, -157.88873371841683) +state: [] diff --git a/utils/navigator_tools/cfg/course_b.yaml b/utils/navigator_tools/cfg/course_b.yaml new file mode 100644 index 00000000..473044f1 --- /dev/null +++ b/utils/navigator_tools/cfg/course_b.yaml @@ -0,0 +1,24 @@ +!!python/object/new:dynamic_reconfigure.encoding.Config +dictitems: + groups: !!python/object/new:dynamic_reconfigure.encoding.Config + dictitems: + enforce: true + groups: !!python/object/new:dynamic_reconfigure.encoding.Config + state: [] + id: 0 + lla_1: (21.3093999977932, -157.88861615540685) + lla_2: (21.308890236491465, -157.88972622804025) + lla_3: (21.307914785432155, -157.88882585130506) + lla_4: (21.30865744423127, -157.88771508071076) + name: Default + parameters: !!python/object/new:dynamic_reconfigure.encoding.Config + state: [] + parent: 0 + state: true + type: '' + state: [] + lla_1: (21.3093999977932, -157.88861615540685) + lla_2: (21.308891236491465, -157.88972622804025) + lla_3: (21.307914785432155, -157.88882585130506) + lla_4: (21.30865744423127, -157.88771508071076) +state: [] From e20c6c9dc6dece31c92491e9c703a56c85bd7281 Mon Sep 17 00:00:00 2001 From: David Soto Date: Mon, 12 Dec 2016 04:59:52 -1000 Subject: [PATCH 199/267] PERCEPTION: find all perturbations of a curve segment --- perception/navigator_vision/CMakeLists.txt | 14 ++ .../navigator_vision_lib/active_contours.hpp | 71 +++++++++ .../navigator_vision_lib/active_contours.cpp | 141 ++++++++++++++++++ 3 files changed, 226 insertions(+) create mode 100644 perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp create mode 100644 perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp diff --git a/perception/navigator_vision/CMakeLists.txt b/perception/navigator_vision/CMakeLists.txt index 9eb3b277..b2d8d2e9 100644 --- a/perception/navigator_vision/CMakeLists.txt +++ b/perception/navigator_vision/CMakeLists.txt @@ -77,6 +77,7 @@ add_library( src/navigator_vision_lib/visualization.cc src/navigator_vision_lib/cv_utils.cc src/navigator_vision_lib/image_filtering.cpp + src/navigator_vision_lib/active_contours.cpp src/navigator_vision_lib/colorizer/pcd_colorizer.cpp src/navigator_vision_lib/colorizer/single_cloud_processor.cpp src/navigator_vision_lib/colorizer/camera_observer.cpp @@ -125,6 +126,19 @@ target_link_libraries( ${OpenCV_INCLUDE_DIRS} ) +add_executable(A_C_test_vision + nodes/A_C_test_vision.cpp +) +add_dependencies(A_C_test_vision navigator_vision_lib ${catkin_EXPORTED_TARGETS}) +target_link_libraries( + A_C_test_vision + navigator_vision_lib + navigator_tools_lib + ${catkin_LIBRARIES} + ${Boost_LIBRARIES} + ${OpenCV_INCLUDE_DIRS} +) + add_executable(shape_identification nodes/shape_identification/main.cpp nodes/shape_identification/DockShapeVision.cpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp b/perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp new file mode 100644 index 00000000..8f0c5218 --- /dev/null +++ b/perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp @@ -0,0 +1,71 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace nav +{ + +class SegmentDescription; +class ClosedCurve; + +namespace Perturbations +{ + struct Perturbation + { + std::vector pt_idxs; + std::vector toPointSequence(cv::Point2i center); + }; + + static std::map, std::vector>> perturbation_cache; + //static std::vector>>> perturbation_cache; + + void initPerturbationCache(); + +// std::vector* getPerturbations(const SegmentDescription& seg_desc); + + std::vector> getPerturbations(uint8_t entry, uint8_t exit); + uint8_t getIdxFromPoint(cv::Point2i hood_point); + cv::Point2i getPointFromIdx(uint8_t idx); + std::vector getHoodIdxs(uint8_t idx, bool include_border); + bool isNeighbor(uint8_t idx1, uint8_t idx2); + void growRoute(const std::vector& partial, const std::vector& occupied, + uint8_t entry, uint8_t exit); +} // namespace Perturbations + + +class ClosedCurve +{ + std::vector _curve_points; + +public: + ClosedCurve(std::vector points); + ClosedCurve applyPerturbation(Perturbations::Perturbation perturb); +}; + +class CurvePerturbation +{ + std::vector perturbed_segment; + uint8_t entry; + uint8_t exit; +}; + +class ActiveContour +{ + ClosedCurve _contour; + +public: + ActiveContour(); +}; + +} // namespace nav diff --git a/perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp b/perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp new file mode 100644 index 00000000..695c288f --- /dev/null +++ b/perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp @@ -0,0 +1,141 @@ +#include + +using namespace std; +using namespace cv; + +namespace nav +{ + +namespace Perturbations +{ + +void initPerturbationCache() +{ + std::unordered_map seq_to_mod_border_idxs; + seq_to_mod_border_idxs[0] = 0; + seq_to_mod_border_idxs[1] = 1; + seq_to_mod_border_idxs[2] = 2; + seq_to_mod_border_idxs[3] = 3; + seq_to_mod_border_idxs[4] = 4; + seq_to_mod_border_idxs[5] = 12; + seq_to_mod_border_idxs[6] = 20; + seq_to_mod_border_idxs[7] = 28; + seq_to_mod_border_idxs[8] = 36; + seq_to_mod_border_idxs[9] = 35; + seq_to_mod_border_idxs[10] = 34; + seq_to_mod_border_idxs[11] = 33; + seq_to_mod_border_idxs[12] = 32; + seq_to_mod_border_idxs[13] = 24; + seq_to_mod_border_idxs[14] = 16; + seq_to_mod_border_idxs[15] = 8; + for(uint8_t entry = 0; entry < 25; entry++) + { + for(uint8_t exit = 0; exit < 25; exit++) + { + if(entry > exit || exit - entry == 1 || (exit + 16) - entry == 1) + continue; + vector perturbation_list; + vector occupied_squares; + auto in = seq_to_mod_border_idxs[entry]; + auto out = seq_to_mod_border_idxs[exit]; + growRoute(perturbation_list, occupied_squares, in, out); // Appends all possible paths from entry point to + } // exit point at the corresponding cache position + } +} + + +vector> getPerturbations(uint8_t entry, uint8_t exit) +{ + auto key = entry <= exit? make_pair(entry, exit) : make_pair(exit, entry); + auto perturbations = perturbation_cache[key]; + if(exit < entry) + for(auto& route : perturbations) + reverse(route.begin(), route.end()); + return perturbations; +} + +uint8_t getIdxFromPoint(Point2i hood_point) +{ + uint8_t idx = hood_point.y * 8; + idx += hood_point.x; + return idx; +} + +Point2i getPointFromIdx(uint8_t idx) +{ + auto x = idx % 8; + auto y = idx / 8; + return std::move(Point2i(x, y)); +} + +vector getHoodIdxs(uint8_t idx, bool include_border) +{ + vector hood; + hood.push_back(idx - 8 - 1); + hood.push_back(idx - 8); + hood.push_back(idx - 8 + 1); + hood.push_back(idx - 1); + hood.push_back(idx + 1); + hood.push_back(idx + 8 - 1); + hood.push_back(idx + 8); + hood.push_back(idx + 8 + 1); + + if(include_border) + hood = vector(hood.begin(), remove_if(hood.begin(), hood.end(), + [](uint8_t x){return x < 0 || x >= 39 || x%8 > 4; })); + else + hood = vector(hood.begin(), remove_if(hood.begin(), hood.end(), + [](uint8_t x){return x < 0 || x >= 39 || x/8 == 0 || x/8 == 4 || x%8 == 0 || x%8 > 3; })); + + return hood; +} + +bool isNeighbor(uint8_t idx1, uint8_t idx2) +{ + auto hood = getHoodIdxs(idx1, true); + return find(hood.begin(), hood.end(), idx2) != hood.end(); +} + +void growRoute(const vector& partial, const vector& occupied, uint8_t entry, uint8_t exit) +{ + uint8_t tail = (partial.size() == 0)? entry : partial.back(); + auto candidates = getHoodIdxs(tail, false); + + // remove candidates that were neighbors of prev tail + candidates = vector(candidates.begin(), remove_if(candidates.begin(), candidates.end(), + [&occupied](uint8_t x){return find(occupied.begin(), occupied.end(), x) != occupied.end(); })); + + auto next_occupied = occupied; // neighbors of current tailed should be blacklisted for next route growth + for(auto cand : candidates) + next_occupied.push_back(cand); + + for(auto new_elem : candidates) + { + auto next_partial = partial; + next_partial.push_back(new_elem); + + auto next_tails = getHoodIdxs(new_elem, true); // true --> include border + auto find_exit_itr = find(next_tails.begin(), next_tails.end(), exit); + if(find_exit_itr != next_tails.end() && *find_exit_itr != tail) // add to cache if exit is a possible next tail + { + pair key; + + if(entry <= exit) + key = make_pair(entry, exit); + else + { + key = make_pair(exit, entry); + reverse(next_partial.begin(), next_partial.end()); + } + perturbation_cache[key].push_back(next_partial); + } + else + growRoute(next_partial, next_occupied, entry, exit); + } + + return; // No viable growth candidates +} + +} // namespace Perturbations + +} // namespace nav From 5320925b52e9e28d27d671257a923a1ea2c56f83 Mon Sep 17 00:00:00 2001 From: David Soto Date: Wed, 14 Dec 2016 10:42:16 -1000 Subject: [PATCH 200/267] PERCEPTION: add utility to sort curves by length --- utils/navigator_tools/include/navigator_tools.hpp | 10 +++++++++- utils/navigator_tools/src/navigator_tools.cpp | 13 +++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/utils/navigator_tools/include/navigator_tools.hpp b/utils/navigator_tools/include/navigator_tools.hpp index 34d23a7c..504f22a2 100644 --- a/utils/navigator_tools/include/navigator_tools.hpp +++ b/utils/navigator_tools/include/navigator_tools.hpp @@ -2,13 +2,17 @@ #include #include +#include #include +#include +#include namespace nav{ static const double PI = 3.1415926535897932; -namespace tools{ +namespace tools +{ // Returns a vector of topic names (std::string) that end with "image_rect_color" // if color is false then it looks for those that end in "image_rect" @@ -20,5 +24,9 @@ inline std::string operator "" _s(const char* str, size_t /*length*/) return std::string(str); } + +// Sorts contours by curve length +void sortContours(std::vector>& contour_vec, bool ascending=true); + } // namespace nav::tools } // namespace nav diff --git a/utils/navigator_tools/src/navigator_tools.cpp b/utils/navigator_tools/src/navigator_tools.cpp index ee932fcf..e5dfd960 100644 --- a/utils/navigator_tools/src/navigator_tools.cpp +++ b/utils/navigator_tools/src/navigator_tools.cpp @@ -39,5 +39,18 @@ vector getRectifiedImageTopics(bool color) return image_rect_topics; } + +void sortContours(vector>& contour_vec, bool ascending) +{ + auto comp_length = ascending? + [](vector const &contour1, vector const &contour2) + { return fabs(arcLength(contour1, true)) < fabs(arcLength(contour2, true)); } : + [](vector const &contour1, vector const &contour2) + { return fabs(arcLength(contour1, true)) > fabs(arcLength(contour2, true)); }; + + sort(contour_vec.begin(), contour_vec.end(), comp_length); +} + + } // namespace nav::tools } // namespace nav From a000be34c2fff4189ad8d611c6eee5172d29bc28 Mon Sep 17 00:00:00 2001 From: David Soto Date: Thu, 15 Dec 2016 11:48:40 -1000 Subject: [PATCH 201/267] PERCEPTION: perturb closed curves by index --- .../navigator_vision_lib/active_contours.hpp | 39 +++--- .../navigator_vision_lib/active_contours.cpp | 112 ++++++++++++++++++ 2 files changed, 130 insertions(+), 21 deletions(-) diff --git a/perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp b/perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp index 8f0c5218..f3a4d658 100644 --- a/perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp +++ b/perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp @@ -8,6 +8,7 @@ #include #include #include +#include #include #include @@ -17,47 +18,43 @@ namespace nav { class SegmentDescription; -class ClosedCurve; +class ClosedCurve; namespace Perturbations { - struct Perturbation - { - std::vector pt_idxs; - std::vector toPointSequence(cv::Point2i center); - }; - static std::map, std::vector>> perturbation_cache; - //static std::vector>>> perturbation_cache; - void initPerturbationCache(); - -// std::vector* getPerturbations(const SegmentDescription& seg_desc); - std::vector> getPerturbations(uint8_t entry, uint8_t exit); uint8_t getIdxFromPoint(cv::Point2i hood_point); cv::Point2i getPointFromIdx(uint8_t idx); + std::vector getPointList(std::vector& idx_list); std::vector getHoodIdxs(uint8_t idx, bool include_border); bool isNeighbor(uint8_t idx1, uint8_t idx2); + bool isNeighborPoint(const cv::Point2i &pt1, const cv::Point2i &pt2); void growRoute(const std::vector& partial, const std::vector& occupied, uint8_t entry, uint8_t exit); + std::vector perturb(const std::vector& src_curve, std::vector perturbation,int idx); } // namespace Perturbations - class ClosedCurve { std::vector _curve_points; public: - ClosedCurve(std::vector points); - ClosedCurve applyPerturbation(Perturbations::Perturbation perturb); -}; + struct Perturbation + { + size_t idx; + uint8_t entry; + uint8_t exit; + std::vector route; + }; -class CurvePerturbation -{ - std::vector perturbed_segment; - uint8_t entry; - uint8_t exit; + ClosedCurve(std::vector points); + void applyPerturbation(const std::vector& perturbation, int idx); + ClosedCurve perturb(const std::vector& perturbation, int idx); + static bool validateCurve(std::vector& curve); + std::vector calcCosts(const cv::Mat& img, std::vector candidate_perturbs, + std::function cb); }; class ActiveContour diff --git a/perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp b/perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp index 695c288f..fe1bafd4 100644 --- a/perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp +++ b/perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp @@ -68,6 +68,21 @@ Point2i getPointFromIdx(uint8_t idx) return std::move(Point2i(x, y)); } +vector getPointList(vector& idx_list) +{ + vector point_list; + for(auto& idx : idx_list) + point_list.push_back(getPointFromIdx(idx)); + + for(auto& pt : point_list) + { + pt.x -= 2; + pt.y -= 2; + } + + return point_list; +} + vector getHoodIdxs(uint8_t idx, bool include_border) { vector hood; @@ -96,6 +111,11 @@ bool isNeighbor(uint8_t idx1, uint8_t idx2) return find(hood.begin(), hood.end(), idx2) != hood.end(); } +bool isNeighborPoint(const Point2i &pt1, const Point2i &pt2) +{ + return abs(pt1.x - pt2.x) == 1 && abs(pt1.y - pt2.y) == 1; +} + void growRoute(const vector& partial, const vector& occupied, uint8_t entry, uint8_t exit) { uint8_t tail = (partial.size() == 0)? entry : partial.back(); @@ -136,6 +156,98 @@ void growRoute(const vector& partial, const vector& occupied, return; // No viable growth candidates } +vector perturb(const vector& src_curve, vector perturbation,int idx) +{ + auto pt = src_curve[idx]; + auto pt_list = getPointList(perturbation); + for(auto& pt_elem : pt_list) + { + pt_elem.x += pt.x; + pt_elem.y += pt.y; + } + + vector dest(src_curve.begin(), src_curve.begin() + idx - 1); + for(auto& pert : pt_list) + dest.push_back(pert); + auto it = src_curve.begin() + idx + 2; + while(it != src_curve.end()) + { + dest.push_back(*it); + it++; + } + + return dest; +} + } // namespace Perturbations +ClosedCurve::ClosedCurve(vector points) +{ + _curve_points = points; +} + +void ClosedCurve::applyPerturbation(const vector& perturbation, int idx) +{ + _curve_points = Perturbations::perturb(_curve_points, perturbation, idx); +} + +ClosedCurve ClosedCurve::perturb(const std::vector& perturbation, int idx) +{ + return ClosedCurve(Perturbations::perturb(_curve_points, perturbation, idx)); +} + +bool ClosedCurve::validateCurve(std::vector& curve) +{ + // Make sure that all consecutive points are 8-connected + auto eight_connected = [](Point2i a, Point2i b){ return abs(a.x - b.x) <= 1 && abs(a.y - b.y) <= 1; }; + if(!eight_connected(curve[0], curve.back())) + return false; + cout << "Checking for unconnected consecutive pts" << endl; + for(size_t i = 1; i < curve.size(); i++) + if(!eight_connected(curve[i - 1], curve[i])) + { + cout << "failure pts: " << curve[i - 1] << "\t" << curve[i] << endl; + return false; + } + + // Make sure that points that are not adjacent are never 8-connected + vector forbidden_neighbors; + forbidden_neighbors.push_back(curve.back()); + cout << "Checking for non-adjacent neighbors" << endl; + for(size_t i = 1; i < curve.size(); i++) + { + auto& pt = curve[i]; + auto count = count_if(forbidden_neighbors.begin(), forbidden_neighbors.end(), [pt](const Point2i &test_pt) + { return Perturbations::isNeighborPoint(pt, test_pt); }); + forbidden_neighbors.push_back(curve[i - 1]); + + if(i > curve.size() - 2) // Should be neighbor with first one added only + { + if(count > 1) + { + auto conflict_pt = find_if(forbidden_neighbors.begin(), forbidden_neighbors.end(), + [pt](Point2i test_pt){ return nav::Perturbations::isNeighborPoint(pt, test_pt); }); + cout << "failure pts: " << curve[1] << "\t" << (conflict_pt != forbidden_neighbors.end()? Point2i(0,0) : *conflict_pt) << endl; + return false; + } + } + else + { + if(count > 0) + { + auto conflict_pt = find_if(forbidden_neighbors.begin(), forbidden_neighbors.end(), + [pt](Point2i test_pt){ return nav::Perturbations::isNeighborPoint(pt, test_pt); }); + cout << "failure pts: " << curve[1] << "\t" << (conflict_pt != forbidden_neighbors.end()? Point2i(0,0) : *conflict_pt) << endl; + return false; + } + } + } + return true; // No failures! +} + +vector calcCosts(const Mat& img, vector candidate_perturbs, function cb) +{ + +} + } // namespace nav From 9ab42621b65cdc87eb513edebd85acc6ce90b4d3 Mon Sep 17 00:00:00 2001 From: David Soto Date: Fri, 16 Dec 2016 19:57:52 -1000 Subject: [PATCH 202/267] PINGER: correct 'None' return of service --- perception/navigator_vision/nodes/pinger_finder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/perception/navigator_vision/nodes/pinger_finder.py b/perception/navigator_vision/nodes/pinger_finder.py index 5279d555..88d2faf8 100755 --- a/perception/navigator_vision/nodes/pinger_finder.py +++ b/perception/navigator_vision/nodes/pinger_finder.py @@ -112,7 +112,7 @@ def line_segment_norm(line_end_pts): def find_pinger(self, srv_request): if self.line_array.shape[0] < 2: print "PINGER_FINDER: Can't locate pinger. Less than two valid observations have been recorded." - return None + return {} res = {'pinger_position' : self.LS_intersection(), 'num_samples' : self.line_array.shape[0]} self.visualize_pinger() return res From f9d1dae096d92d9c15443283913d88abb1146df2 Mon Sep 17 00:00:00 2001 From: David Soto Date: Fri, 16 Dec 2016 20:42:43 -1000 Subject: [PATCH 203/267] USID: test active contours --- .../nodes/A_C_test_vision.cpp | 79 +++++++++++++++++++ 1 file changed, 79 insertions(+) create mode 100644 perception/navigator_vision/nodes/A_C_test_vision.cpp diff --git a/perception/navigator_vision/nodes/A_C_test_vision.cpp b/perception/navigator_vision/nodes/A_C_test_vision.cpp new file mode 100644 index 00000000..a93afdab --- /dev/null +++ b/perception/navigator_vision/nodes/A_C_test_vision.cpp @@ -0,0 +1,79 @@ +#include +#include +#include +#include + +using namespace std; +using namespace cv; + +bool img_stuff(Mat& img_color, int* lo, int* hi, int* kernel_size) +{ + cout << "Image Processing" << endl; + vector hsv_split; + Mat hue; + cvtColor(img_color, hue, CV_BGR2HSV); + split(hue, hsv_split); + Mat blob_mask; + Mat mask; + inRange(hsv_split[0], Scalar(*lo), Scalar(*hi), blob_mask); + int morph_elem = MORPH_ELLIPSE; + int morph_size = *kernel_size; + Mat element = getStructuringElement( morph_elem, Size( 2*morph_size + 1, 2*morph_size+1 ), Point( morph_size, morph_size ) ); + erode(blob_mask, mask, element); + dilate(mask, mask, element); + imshow("mask", mask); + vector> contours; + findContours(mask, contours, CV_RETR_LIST, CV_CHAIN_APPROX_NONE); + Mat curve_img = Mat::zeros(mask.size(), CV_8U); + Scalar color{255}; + nav::tools::sortContours(contours, false); + if(contours.size() != 0) + { + drawContours(curve_img, contours, 0, color); + cout << "Curve size: " << contours[0].size() << endl; + for(auto& curve : contours) + { + cout << "Curve of size " << curve.size() << " is " << (nav::ClosedCurve::validateCurve(curve)? "valid" : "invalid") << endl; + } + } + imshow("curve", curve_img); + return true; +} + + +int main(int argc, char** argv) +{ + cout << "This is Active Contours Test" << endl; + nav::Perturbations::initPerturbationCache(); + + int low_slider {0}; + int hi_slider {8}; + int low_max {255}; + int hi_max {255}; + int kernel_size {5}; + int k_sz_max {255}; + string win_name("Thresh"); + namedWindow(win_name); + createTrackbar("hue_low", win_name, &low_slider, low_max); + createTrackbar("hue_hi", win_name, &hi_slider, hi_max); + createTrackbar("kernel_size", win_name, &kernel_size, k_sz_max); + VideoCapture cap; + if(!cap.open(0)) + return 0; + for(;;) + { + Mat frame; + cap >> frame; + if(frame.empty()) + break; + resize(frame, frame, Size(0,0), 0.5, 0.5); + imshow("Thresh", frame); + uint8_t wk = waitKey(1); + cout << int(wk) << endl; + img_stuff(frame, &low_slider, &hi_slider, &kernel_size); + if(wk == 27) + break; + } + cout << "I finished!" << nav::Perturbations::perturbation_cache.size() << endl; + return 0; +} From bc169925104e200a4ca3bd21ab106163fbc7309f Mon Sep 17 00:00:00 2001 From: Tess Date: Sat, 17 Dec 2016 04:05:40 -1000 Subject: [PATCH 204/267] scan_the_code --- .../object_classification/SVM_classifier.py | 3 +- .../object_classification/depicklify.py | 12 + .../object_classification.py | 1 + .../object_classification/roi_generator.py | 4 +- .../roi_generator_slow.py | 203 +++++++++++++++++ .../object_classification/stc_train.py | 208 ++++++++++++++++++ utils/navigator_tools/cfg/course_c.yaml | 24 ++ .../navigator_tools/object_database_helper.py | 21 +- 8 files changed, 470 insertions(+), 6 deletions(-) create mode 100644 perception/navigator_vision/object_classification/depicklify.py create mode 100644 perception/navigator_vision/object_classification/roi_generator_slow.py create mode 100644 perception/navigator_vision/object_classification/stc_train.py create mode 100644 utils/navigator_tools/cfg/course_c.yaml diff --git a/perception/navigator_vision/object_classification/SVM_classifier.py b/perception/navigator_vision/object_classification/SVM_classifier.py index d3e93275..90213ce5 100644 --- a/perception/navigator_vision/object_classification/SVM_classifier.py +++ b/perception/navigator_vision/object_classification/SVM_classifier.py @@ -20,4 +20,5 @@ def train(self, desc, clss): self.clf.fit(desc, clss) def pickle(self, name): - pickle.dump(self, open(name, "wb")) + with open(name, 'wb') as f: + pickle.dump(self, f) diff --git a/perception/navigator_vision/object_classification/depicklify.py b/perception/navigator_vision/object_classification/depicklify.py new file mode 100644 index 00000000..577a7c16 --- /dev/null +++ b/perception/navigator_vision/object_classification/depicklify.py @@ -0,0 +1,12 @@ +import pickle + + +class Depickle(object): + + def __init__(self): + mypickle = pickle.load(open("/home/tess/bags/color_train.py", 'rb')) + file = open('rois.txt', ) + for m in mypickle.bag_to_rois: + file.write(m) + for f in m: + file.write(f) diff --git a/perception/navigator_vision/object_classification/object_classification.py b/perception/navigator_vision/object_classification/object_classification.py index 7e4ae634..f34bc0a2 100644 --- a/perception/navigator_vision/object_classification/object_classification.py +++ b/perception/navigator_vision/object_classification/object_classification.py @@ -1,3 +1,4 @@ + from roi_generator import ROI_Collection from navigator_tools import CvDebug, BagCrawler import numpy as np diff --git a/perception/navigator_vision/object_classification/roi_generator.py b/perception/navigator_vision/object_classification/roi_generator.py index 713fbfed..cc726829 100644 --- a/perception/navigator_vision/object_classification/roi_generator.py +++ b/perception/navigator_vision/object_classification/roi_generator.py @@ -145,7 +145,7 @@ def mouse_roi(self, event, x, y, flags, params): self.roi_to_tracker[name].init(self.image, r) return if self.rclk: - if event == cv2.EVENT_LBUTTONDOWN and flags == 16: # pressing shift, remove box + if event == cv2.EVENT_LBUTTONDOWN and flags == 48: # 16: # pressing shift, remove box if len(self.rects) > 0: r = min(self.rects.items(), key=lambda rect: np.linalg.norm( np.array([rect[1][0], rect[1][1]]) - np.array([x, y]))) @@ -153,7 +153,7 @@ def mouse_roi(self, event, x, y, flags, params): self.rects.pop(r) self.roi_to_tracker.pop(r) self.sel_rect = None - elif event == cv2.EVENT_LBUTTONDOWN and flags == 8: # pressing cntrl, add box + elif event == cv2.EVENT_LBUTTONDOWN and flags == 40: # 8: # pressing cntrl, add box name = raw_input('Enter name of object: ') if name == "skip": return diff --git a/perception/navigator_vision/object_classification/roi_generator_slow.py b/perception/navigator_vision/object_classification/roi_generator_slow.py new file mode 100644 index 00000000..d2c4a7b1 --- /dev/null +++ b/perception/navigator_vision/object_classification/roi_generator_slow.py @@ -0,0 +1,203 @@ +#!/usr/bin/python +from navigator_tools import BagCrawler +import argparse +from cv_bridge import CvBridge +import cv2 +import sys +import pickle +import os +import numpy as np +from median_flow import MedianFlow +___author___ = "Tess Bianchi" + + +class ROI_Collection_Slow(): + + def __init__(self): + self.bag_to_rois = {} + + def pickle(self, name): + pickle.dump(self, open(name, "wb")) + + +class ROI_Generator_Slow(object): + + def __init__(self): + self.folder = os.path.dirname(os.path.realpath(__file__)) + self.bridge = CvBridge() + self.roi_to_tracker = {} + + def get_roi(self, name): + file = open(self.folder + '/' + name, 'r') + for line in file: + line = line.replace('\n', '') + if len(line) == 0: + continue + x, y, w, h = line.split(" ") + yield x, y, w, h + + def create(self, bag, output, load): + self.rects = {} + self.sel_rect = None + bc = BagCrawler(bag) + self.rclk = False + self.lclk = False + topic = bc.image_topics[0] + self.crawl_bu = bc.crawl(topic=topic) + image = self.crawl_bu.next() + self.image = self.bridge.imgmsg_to_cv2(image, 'bgr8') + self.crawl = bc.crawl(topic=topic) + self.x, self.y = 0, 0 + self.rsel = True + self.output = output + w, h, r = self.image.shape + self.button_pressed = False + if load: + self.collection = pickle.load(open(self.folder + '/' + output, "rb")) + else: + self.collection = ROI_Collection_Slow() + + self.collection.bag_to_rois[bag] = [] + self.mycoll = self.collection.bag_to_rois[bag] + + self.window_name = 'segment' + cv2.namedWindow(self.window_name) + cv2.setMouseCallback(self.window_name, self.mouse_roi) + self.last_rec = None + + def setw(x): + if self.sel_rect is not None: + r = self.rects[self.sel_rect] + r[2] = x + + def seth(x): + if self.sel_rect is not None: + r = self.rects[self.sel_rect] + r[3] = x + + cv2.createTrackbar("width", self.window_name, 0, w, setw) + cv2.createTrackbar("height", self.window_name, 0, h, seth) + + def out_range(self, bbox): + h, w, r = self.image.shape + if bbox[0] < 0 or bbox[0] + bbox[2] > w: + return True + if bbox[1] < 0 or bbox[1] + bbox[3] > h: + return True + return False + + def go(self): + while self.x is None: + cv2.waitKey(33) + image = self.crawl.next() + self.image = self.bridge.imgmsg_to_cv2(image, 'bgr8') + doing = False + pause = True + while True: + if doing: + continue + doing = True + k = chr(cv2.waitKey(50) & 0xFF) + if k == 'q': + break + elif k == ' ': + pause = not pause + elif not pause and not self.rclk: + try: + image = self.crawl.next() + self.image = self.bridge.imgmsg_to_cv2(image, 'bgr8') + except StopIteration: + break + + remove = [] + for name in self.rects: + bbox = self.roi_to_tracker[name].track(self.image) + if bbox is None or self.out_range(bbox): + remove.append(name) + else: + self.rects[name] = [bbox[0], bbox[1], bbox[2], bbox[3]] + for name in remove: + self.rects.pop(name) + self.roi_to_tracker.pop(name) + newrects = {} + for r in self.rects: + coords = self.rects[r] + name = raw_input('Enter name of {} object: '.format(r)) + if name == "skip": + pause = True + break + elif name == ' ': + break + else: + newrects[name] = coords + self.mycoll.append(newrects) + clone = self.image.copy() + for key in self.rects.keys(): + r = self.rects[key] + color = (255, 0, 0) + if key == self.sel_rect: + color = (0, 255, 0) + + cv2.rectangle(clone, (r[0], r[1]), (r[0] + r[2], r[1] + r[3]), color, 2) + cv2.putText(clone, key, (r[0], r[1]), 1, 1.0, (255, 0, 0)) + + cv2.imshow(self.window_name, clone) + doing = False + self.collection.pickle(self.output) + + def mouse_roi(self, event, x, y, flags, params): + if event == cv2.EVENT_RBUTTONDOWN: + self.rclk = not self.rclk + self.sel_rect = None + for name in self.rects: + r = self.rects[name] + self.roi_to_tracker[name] = MedianFlow() + self.roi_to_tracker[name].init(self.image, r) + return + if self.rclk: + if event == cv2.EVENT_LBUTTONDOWN and flags == 48: # 16: # pressing shift, remove box + if len(self.rects) > 0: + r = min(self.rects.items(), key=lambda rect: np.linalg.norm( + np.array([rect[1][0], rect[1][1]]) - np.array([x, y]))) + r = r[0] + self.rects.pop(r) + self.roi_to_tracker.pop(r) + self.sel_rect = None + elif event == cv2.EVENT_LBUTTONDOWN and flags == 40: # 8: # pressing cntrl, add box + name = raw_input('Enter name of object: ') + if name == "skip": + return + r = [20, 20, 20, 20] + self.rects[name] = r + elif event == cv2.EVENT_LBUTTONDOWN: + self.lclk = not self.lclk + if not self.lclk: + if self.sel_rect is not None: + r = self.rects[self.sel_rect] + self.sel_rect = None + else: + if len(self.rects) > 0: + self.sel_rect = min(self.rects.items(), key=lambda rect: np.linalg.norm( + np.array([rect[1][0], rect[1][1]]) - np.array([x, y]))) + self.sel_rect = self.sel_rect[0] + r = self.rects[self.sel_rect] + cv2.setTrackbarPos("width", self.window_name, r[2]) + cv2.setTrackbarPos("height", self.window_name, r[3]) + + elif event == cv2.EVENT_MOUSEMOVE: + self.x, self.y = x, y + if self.sel_rect is not None: + r = self.rects[self.sel_rect] + self.rects[self.sel_rect][0:4] = [self.x, self.y, r[2], r[3]] + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('bag', type=str, help='The bag you would like to use') + parser.add_argument('name', type=str, help='The name of the output file') + parser.add_argument('--load', action='store_true', help='The name of the output file') + args = parser.parse_args(sys.argv[1:]) + + roi = ROI_Generator_Slow() + roi.create(args.bag, args.name, args.load) + roi.go() diff --git a/perception/navigator_vision/object_classification/stc_train.py b/perception/navigator_vision/object_classification/stc_train.py new file mode 100644 index 00000000..1c633ecb --- /dev/null +++ b/perception/navigator_vision/object_classification/stc_train.py @@ -0,0 +1,208 @@ +from __future__ import division +from roi_generator_slow import ROI_Collection_Slow +import pickle +import numpy as np +import cv2 +import numpy.linalg as npl +import numpy.ma as ma +from navigator_tools import BagCrawler +from cv_bridge import CvBridge +import scipy.ndimage.measurements as mes +from SVM_classifier import SVMClassifier + + +def _get_lw(box): + p0 = box[0] + p1 = box[1] + vec1 = np.array(box[2] - p0) + vec1 = vec1 / np.linalg.norm(vec1) + vec2 = np.array(p1 - p0) + vec2 = vec2 / np.linalg.norm(vec2) + vec3 = np.array(box[3] - p0) + vec3 = vec3 / np.linalg.norm(vec3) + ang1 = np.arccos((vec1).dot(vec2)) + ang2 = np.arccos((vec3).dot(vec2)) + dif1 = 1.5708 - ang1 + dif2 = 1.5708 - ang2 + if dif1 < dif2: + p2 = box[2] + else: + p2 = box[3] + l, lp = np.linalg.norm(abs(p1 - p0)), p1 + w, wp = np.linalg.norm(abs(p2 - p0)), p2 + if l < w: + temp = w + templ = wp + w = l + wp = lp + l = temp + lp = templ + direc = (wp - p0) / np.linalg.norm(wp - p0) + dot = direc.dot(np.array([0, 1])) + vcost = abs(dot) + return l, w, vcost + + +def get_rectangle(roi): + """ + Get the rectangle that has changing colors in the roi. + + Returns boolean success value and the four rectangle points in the image + """ + gaussian = cv2.GaussianBlur(roi, (9, 9), 10.0) + roi = cv2.addWeighted(roi, 1.5, gaussian, -0.5, 0, roi) + + nh, nw, r = roi.shape + + # cluster + Z = roi.reshape((-1, 3)) + Z = np.float32(Z) + criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0) + K = 7 + ret, label, centers = cv2.kmeans(Z, K, criteria, 10, 0) + centers = np.uint8(centers) + image_as_centers = centers[label.flatten()] + image_as_centers = image_as_centers.reshape((roi.shape)) + labels = label.reshape((roi.shape[:2])) + + possible_clusters = list(np.arange(K)) + whiteness = map(lambda x: npl.norm(x - np.array([255, 255, 255])), centers) + whitest = np.argmin(whiteness) + possible_clusters.remove(whitest) + energys = [] + correct_masks = [] + for num, p in enumerate(possible_clusters): + mask_clusters = ma.masked_equal(labels, p) + + draw_mask = mask_clusters.mask.astype(np.uint8) + draw_mask *= 255 + + labeled_array, num_features = mes.label(draw_mask) + count = np.bincount(labeled_array.flatten()) + count = count[1:] + val = np.argmax(count) + mask_obj = ma.masked_equal(labeled_array, val + 1) + + draw_mask = mask_obj.mask.astype(np.uint8) + draw_mask *= 255 + + # cv2.imshow(str(num), draw_mask) + # cv2.waitKey(0) + + top = np.count_nonzero(draw_mask) + valz = np.fliplr(np.transpose(draw_mask.nonzero())) + rect = cv2.minAreaRect(valz) + box = cv2.cv.BoxPoints(rect) + box = np.int0(box) + rect_mask = np.zeros((nh, nw)) + cv2.drawContours(rect_mask, [box], 0, 255, -1) + bottom = np.count_nonzero(rect_mask) + + l, w, vcost = _get_lw(box) + if w < .001: + print 'WIDTH TOO SMALL' + continue + + valz = np.fliplr(np.transpose(draw_mask.nonzero())) + area = cv2.contourArea(box) + area /= (nh * nw) + + if vcost > .5: + print "VCOST TOO HIGH" + continue + if area < .03: + print area + print "TOOOO SMALL" + continue + if top / bottom < .7: + print "TOO SPARSE", top / bottom + continue + + energy = area + 1.5 * top / bottom - abs(2.5 - l / w) - .2 * vcost + if energy < 0: + "LOW ENERGY!" + continue + print num, "area: ", area, "filled:", top, "total:", bottom, 'rat', top / bottom, "l/w", abs(2.5 - l / w), "vcost", + vcost, "energy", energy + energys.append(energy) + correct_masks.append(mask_obj) + + if len(energys) == 0: + print "EVERY ENERGY WRONG" + return False, None + + correct_masks = [x for y, x in sorted(zip(energys, correct_masks), reverse=True)] + energys = sorted(energys, reverse=True) + + if len(energys) > 1 and abs(energys[0] - energys[1]) < .2: + print "TOO CLOSE TO CALLS" + return False, None + + correct_mask = correct_masks[0] + colors = roi[correct_mask.mask] + draw_mask = correct_mask.mask.astype(np.uint8) + draw_mask *= 255 + + return True, colors + + +class Config(): + + def __init__(self): + self.mymap = {'r': 1, 'b': 2, 'y': 3, 'k': 4} + self.inv_map = {v: k for k, v in self.mymap.iteritems()} + + def get_class(self, val): + return self.inv_map[val] + + def get_val(self, clss): + return self.mymap[clss] + + +class Training(object): + + def __init__(self): + self.config = Config() + self.svm = SVMClassifier() + self.data = [] + self.colors = [] + + def train(self, img, colors, color): + mean = np.mean(np.mean(img, axis=0), axis=0) + m = np.repeat([mean], len(colors), axis=0) + t = np.hstack([colors, m]) + color = self.config.get_val(color) + c = np.repeat(color, len(colors)) + + self.data.extend(t) + self.colors.extend(c) + + def pickle(self, file): + self.svm.train(self.data, self.colors) + self.svm.pickle(file) + + +if __name__ == "__main__": + t = Training() + bridge = CvBridge() + pick = pickle.load(open("stc_train1.p", 'rb')) + for bag in pick.bag_to_rois: + colors = pick.bag_to_rois[bag] + b = BagCrawler(bag) + topic = b.image_topics[0] + crawl = b.crawl(topic=topic) + for color in colors: + if len(color) is 0: + continue + color, roi = color.iteritems().next() + img = crawl.next() + img = bridge.imgmsg_to_cv2(img, 'bgr8') + image_clone = img.copy() + print roi, color + xmin, ymin, w, h = roi[0], roi[1], roi[2], roi[3] + cv2.rectangle(image_clone, (xmin, ymin), (xmin + w, ymin + h), (0, 255, 0), 2) + roi = img[ymin:ymin + h, xmin:xmin + w] + succ, color_vec = get_rectangle(roi) + if succ: + t.train(img, color_vec, color) + t.pickle("/home/tess/mil_ws/src/Navigator/mission_systems/navigator_scan_the_code/navigator_scan_the_code/scan_the_code_lib/svm_train.p") diff --git a/utils/navigator_tools/cfg/course_c.yaml b/utils/navigator_tools/cfg/course_c.yaml new file mode 100644 index 00000000..e322536d --- /dev/null +++ b/utils/navigator_tools/cfg/course_c.yaml @@ -0,0 +1,24 @@ +!!python/object/new:dynamic_reconfigure.encoding.Config +dictitems: + groups: !!python/object/new:dynamic_reconfigure.encoding.Config + dictitems: + enforce: true + groups: !!python/object/new:dynamic_reconfigure.encoding.Config + state: [] + id: 0 + lla_1: (21.305748423277702, -157.88798305044537) + lla_2: (21.30639710664512, -157.88695004440788) + lla_3: (21.30770272224603, -157.88763716280926) + lla_4: (21.30705404, -157.88867017) + name: Default + parameters: !!python/object/new:dynamic_reconfigure.encoding.Config + state: [] + parent: 0 + state: true + type: '' + state: [] + lla_1: (21.305748423277702, -157.88798305044537) + lla_2: (21.30639710664512, -157.88695004440788) + lla_3: (21.30770272224603, -157.88763716280926) + lla_4: (21.30705404, -157.88867017) +state: [] diff --git a/utils/navigator_tools/navigator_tools/object_database_helper.py b/utils/navigator_tools/navigator_tools/object_database_helper.py index 4c3543ab..4822a856 100644 --- a/utils/navigator_tools/navigator_tools/object_database_helper.py +++ b/utils/navigator_tools/navigator_tools/object_database_helper.py @@ -32,7 +32,7 @@ def __init__(self, nh): @util.cancellableInlineCallbacks def init_(self, navigator=None): """Initialize the txros parts of the DBHelper.""" - self._sub_database = yield self.nh.subscribe('/database/objects', PerceptionObjectArray, self.object_cb) + # self._sub_database = yield self.nh.subscribe('/database/objects', PerceptionObjectArray, self.object_cb) self._database = yield self.nh.get_service_client("/database/requests", ObjectDBQuery) self.navigator = navigator if navigator is None: @@ -226,8 +226,6 @@ def get_object(self, object_name, volume_only=False, thresh=50, thresh_strict=50 min_obj = o defer.returnValue(min_obj) - print "good" - if len(actual_objects) == 1: defer.returnValue(actual_objects[0]) @@ -248,3 +246,20 @@ def set_color(self, color, name): def set_fake_position(self, pos): """Set the position of a fake perception object.""" raise NotImplementedError() + + @util.cancellableInlineCallbacks + def get_objects_in_radius(self, pos, radius, objects="all"): + req = ObjectDBQueryRequest() + req.name = 'all' + resp = yield self._database(req) + ans = [] + + if not resp.found: + defer.returnValue(ans) + + for o in resp.objects: + if objects == "all" or o.name in objects: + dist = np.linalg.norm(pos - nt.rosmsg_to_numpy(o.position)) + if dist < radius: + ans.append(o) + defer.returnValue(ans) From 84e56b2aabeb9236f9cafa815e6d7d9b999f4134 Mon Sep 17 00:00:00 2001 From: forrestv Date: Sat, 17 Dec 2016 21:15:16 -1000 Subject: [PATCH 205/267] DETECT DELIVER: add perception node to track shooter platform --- .../navigator_vision/nodes/shooter_finder.py | 264 ++++++++++++++++++ 1 file changed, 264 insertions(+) create mode 100755 perception/navigator_vision/nodes/shooter_finder.py diff --git a/perception/navigator_vision/nodes/shooter_finder.py b/perception/navigator_vision/nodes/shooter_finder.py new file mode 100755 index 00000000..2b0e4ab2 --- /dev/null +++ b/perception/navigator_vision/nodes/shooter_finder.py @@ -0,0 +1,264 @@ +#!/usr/bin/python + +from __future__ import division + +import math +import numpy +from twisted.internet import defer, threads +import txros +from txros import util, tf +import cv2 +import traceback +import time +import operator +import random + +defer.setDebugging(True) + +from navigator_tools import msg_helpers + +from tf import transformations +from sensor_msgs.msg import PointCloud2 +import sensor_msgs.point_cloud2 as pc2 +from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest +from visualization_msgs.msg import Marker, MarkerArray +from std_msgs.msg import Header, ColorRGBA +from geometry_msgs.msg import Pose, Vector3, Point, Quaternion, PoseStamped +from nav_msgs.msg import Odometry + +def rect_lines(rect): + for i in xrange(4): + angle = rect[4] + i/4 * 2*math.pi + normal = numpy.array([math.cos(angle), math.sin(angle)]) + d_normal_over_d_angle = numpy.array([-math.sin(angle), math.cos(angle)]) + p = rect[:2] + (rect[2] if i%2 == 0 else rect[3])/2 * normal + yield (p, normal), numpy.array([ + [1, 0, normal[0]/2 if i%2==0 else 0, normal[0]/2 if i%2==1 else 0, (rect[2] if i%2 == 0 else rect[3])/2 * d_normal_over_d_angle[0]], + [0, 1, normal[1]/2 if i%2==0 else 0, normal[1]/2 if i%2==1 else 0, (rect[2] if i%2 == 0 else rect[3])/2 * d_normal_over_d_angle[1]], + [0, 0, 0, 0, d_normal_over_d_angle[0]], + [0, 0, 0, 0, d_normal_over_d_angle[1]], + ]) + +def dist_from_line(p, line, J): + return (p - line[0]).dot(line[1]), numpy.array([-line[1][0], -line[1][1], p[0]-line[0][0], p[1]-line[0][1]]).dot(J) + +def dist_from_rect(p, rect): + return min([dist_from_line(p, line, J) for line, J in rect_lines(rect)], key=lambda (dist, J): abs(dist)) + +def fit(good_points, rect): + print 'count', len(good_points) + for i in xrange(1): + r, J = zip(*[dist_from_rect(p, rect) for p in good_points]) + r = numpy.array(r) + J = numpy.vstack(list(J)) + cost = numpy.sum(r*r) + print 'cost', i, cost + try: + rect = rect - numpy.linalg.inv(J.T.dot(J)).dot(J.T).dot(r) + except numpy.linalg.LinAlgError: + traceback.print_exc() + return None + #print rect + return rect + +center_holder = [None] + +@util.cancellableInlineCallbacks +def poller(nh): + db_srv = nh.get_service_client('/database/requests', ObjectDBQuery) + + while True: + yield util.wall_sleep(3) + + while True: + try: + db_res = yield db_srv(ObjectDBQueryRequest( + name='shooter', + cmd='', + )) + except: + traceback.print_exc() + yield util.wall_sleep(1) + else: + break + + if not db_res.found: + print 'shooter not found' + continue + + obj = db_res.objects[0] + + points = map(msg_helpers.ros_to_np_3D, obj.points) + + if points: + center_holder[0] = numpy.mean(points, axis=0) + +@util.cancellableInlineCallbacks +def main(): + try: + center = None + nh = yield txros.NodeHandle.from_argv('shooter_finder', anonymous=True) + + pc_sub = nh.subscribe('/velodyne_points', PointCloud2) + tf_listener = tf.TransformListener(nh) + marker_pub = nh.advertise('/shooter_fit', MarkerArray) + result_pub = nh.advertise('/shooter_pose', PoseStamped) + odom_sub = nh.subscribe('/odom', Odometry) + + poller(nh) + + while True: + ts = [time.time()] + yield util.wall_sleep(.1) + ts.append(time.time()) + + center = center_holder[0] + odom = yield odom_sub.get_next_message() + cloud = yield pc_sub.get_next_message() + + if center is None: + yield util.wall_sleep(1) + continue + + #print center + + Z_MIN = 0 + RADIUS = 5 + + ts.append(time.time()) + + print 'start' + print cloud.header.stamp + try: + transform = yield tf_listener.get_transform('/enu', cloud.header.frame_id, cloud.header.stamp) + except tf.TooPastError: + print 'TooPastError!' + continue + gen = numpy.array(list(pc2.read_points(cloud, skip_nans=True, field_names=("x", "y", "z")))).T + print gen.shape + print transform._p + gen = (transform._q_mat.dot(gen) + numpy.vstack([transform._p]*gen.shape[1]).T).T + good_points = numpy.array([(x[0], x[1]) for x in gen if x[2] > Z_MIN and math.hypot(x[0]-center[0], x[1]-center[1]) < RADIUS]) + print 'end' + + if len(good_points) < 10: + print 'no good points', len(good_points) + continue + + #if len(good_points) > 100: + # good_points = numpy.array(random.sample(list(good_points), 100)) + #print good_points + + ts.append(time.time()) + + rect = cv2.minAreaRect(good_points.astype(numpy.float32)) + rect = numpy.array(rect[0] + rect[1] + (math.radians(rect[2]),)) + + + ts.append(time.time()) + + #rect = yield threads.deferToThread(fit, good_points, rect) + if rect is None: + print 'bad fit' + continue + + rect = rect[:2], rect[2:4], rect[4] + + print 'rect:', rect + + + ts.append(time.time()) + + marker_pub.publish(MarkerArray( + markers=[Marker( + header=Header( + frame_id='/enu', + stamp=nh.get_time(), + ), + ns='shooter_ns', + id=1, + type=Marker.CUBE, + action=Marker.ADD, + pose=Pose( + position=Point(rect[0][0], rect[0][1], .5), + orientation=Quaternion(*transformations.quaternion_about_axis(rect[2], [0, 0, 1])), + ), + scale=Vector3(rect[1][0], rect[1][1], 2), + color=ColorRGBA(1, 1, 1, .5), + )], + )) + + possibilities = [] + for i in xrange(4): + angle = rect[2] + i/4 * 2*math.pi + normal = numpy.array([math.cos(angle), math.sin(angle)]) + p = numpy.array(rect[0]) + (rect[1][0] if i%2 == 0 else rect[1][1])/2 * normal + possibilities.append((normal, p, transformations.quaternion_about_axis(angle, [0, 0, 1]), rect[1][0] if i%2 == 1 else rect[1][1])) + best = max(possibilities, key=lambda (normal, p, q, size): (msg_helpers.ros_to_np_3D(odom.pose.pose.position)[:2] - rect[0]).dot(normal)) + + print 'side length:', best[-1] + if abs(best[-1] - 1.8) > .25: + print 'filtered out based on side length' + continue + + front_points = [p for p in good_points if abs((p - best[1]).dot(best[0])) < .2] + print 'len(front_points)', len(front_points) + a, b = numpy.linalg.lstsq( + numpy.vstack([ + [p[0] for p in front_points], + [p[1] for p in front_points], + ]).T, + numpy.ones(len(front_points)), + )[0] + m, b_ = numpy.linalg.lstsq( + numpy.vstack([ + [p[0] for p in front_points], + numpy.ones(len(front_points)), + ]).T, + [p[1] for p in front_points], + )[0] + print 'a, b', a, b, m, b_ + + print 'RES', numpy.std([numpy.array([a, b]).dot(p) for p in good_points]) + + # x = a, b + # x . p = 1 + # |x| (||x|| . p) = 1 + # ||x|| . p = 1 / |x| + + normal = numpy.array([a, b]) + dist = 1 / numpy.linalg.norm(normal) + normal = normal / numpy.linalg.norm(normal) + p = dist * normal + + print 'ZZZ', p.dot(numpy.array([a, b])) + + perp = numpy.array([normal[1], -normal[0]]) + x = (best[1] - p).dot(perp) + p = p + x * perp + if normal.dot(best[0]) < 0: normal = -normal + q = transformations.quaternion_about_axis(math.atan2(normal[1], normal[0]), [0, 0, 1]) + + print 'XXX', p, best[1] + + #sqrt(a2 + b2) (a x + b y) = 1 + + result_pub.publish(PoseStamped( + header=Header( + frame_id='/enu', + stamp=nh.get_time(), + ), + pose=Pose( + position=Point(p[0], p[1], 0), + orientation=Quaternion(*q), + ), + )) + + + ts.append(time.time()) + + print 'timing', ts[-1] - ts[0], map(operator.sub, ts[1:], ts[:-1]) + except: + traceback.print_exc() + +util.launch_main(main) From 0b0f35eb418d1a66d892f6fd181925a862978224 Mon Sep 17 00:00:00 2001 From: Tess Date: Sat, 17 Dec 2016 12:06:27 -1000 Subject: [PATCH 206/267] scan the code --- .../roi_generator_slow.py | 203 ------------------ 1 file changed, 203 deletions(-) delete mode 100644 perception/navigator_vision/object_classification/roi_generator_slow.py diff --git a/perception/navigator_vision/object_classification/roi_generator_slow.py b/perception/navigator_vision/object_classification/roi_generator_slow.py deleted file mode 100644 index d2c4a7b1..00000000 --- a/perception/navigator_vision/object_classification/roi_generator_slow.py +++ /dev/null @@ -1,203 +0,0 @@ -#!/usr/bin/python -from navigator_tools import BagCrawler -import argparse -from cv_bridge import CvBridge -import cv2 -import sys -import pickle -import os -import numpy as np -from median_flow import MedianFlow -___author___ = "Tess Bianchi" - - -class ROI_Collection_Slow(): - - def __init__(self): - self.bag_to_rois = {} - - def pickle(self, name): - pickle.dump(self, open(name, "wb")) - - -class ROI_Generator_Slow(object): - - def __init__(self): - self.folder = os.path.dirname(os.path.realpath(__file__)) - self.bridge = CvBridge() - self.roi_to_tracker = {} - - def get_roi(self, name): - file = open(self.folder + '/' + name, 'r') - for line in file: - line = line.replace('\n', '') - if len(line) == 0: - continue - x, y, w, h = line.split(" ") - yield x, y, w, h - - def create(self, bag, output, load): - self.rects = {} - self.sel_rect = None - bc = BagCrawler(bag) - self.rclk = False - self.lclk = False - topic = bc.image_topics[0] - self.crawl_bu = bc.crawl(topic=topic) - image = self.crawl_bu.next() - self.image = self.bridge.imgmsg_to_cv2(image, 'bgr8') - self.crawl = bc.crawl(topic=topic) - self.x, self.y = 0, 0 - self.rsel = True - self.output = output - w, h, r = self.image.shape - self.button_pressed = False - if load: - self.collection = pickle.load(open(self.folder + '/' + output, "rb")) - else: - self.collection = ROI_Collection_Slow() - - self.collection.bag_to_rois[bag] = [] - self.mycoll = self.collection.bag_to_rois[bag] - - self.window_name = 'segment' - cv2.namedWindow(self.window_name) - cv2.setMouseCallback(self.window_name, self.mouse_roi) - self.last_rec = None - - def setw(x): - if self.sel_rect is not None: - r = self.rects[self.sel_rect] - r[2] = x - - def seth(x): - if self.sel_rect is not None: - r = self.rects[self.sel_rect] - r[3] = x - - cv2.createTrackbar("width", self.window_name, 0, w, setw) - cv2.createTrackbar("height", self.window_name, 0, h, seth) - - def out_range(self, bbox): - h, w, r = self.image.shape - if bbox[0] < 0 or bbox[0] + bbox[2] > w: - return True - if bbox[1] < 0 or bbox[1] + bbox[3] > h: - return True - return False - - def go(self): - while self.x is None: - cv2.waitKey(33) - image = self.crawl.next() - self.image = self.bridge.imgmsg_to_cv2(image, 'bgr8') - doing = False - pause = True - while True: - if doing: - continue - doing = True - k = chr(cv2.waitKey(50) & 0xFF) - if k == 'q': - break - elif k == ' ': - pause = not pause - elif not pause and not self.rclk: - try: - image = self.crawl.next() - self.image = self.bridge.imgmsg_to_cv2(image, 'bgr8') - except StopIteration: - break - - remove = [] - for name in self.rects: - bbox = self.roi_to_tracker[name].track(self.image) - if bbox is None or self.out_range(bbox): - remove.append(name) - else: - self.rects[name] = [bbox[0], bbox[1], bbox[2], bbox[3]] - for name in remove: - self.rects.pop(name) - self.roi_to_tracker.pop(name) - newrects = {} - for r in self.rects: - coords = self.rects[r] - name = raw_input('Enter name of {} object: '.format(r)) - if name == "skip": - pause = True - break - elif name == ' ': - break - else: - newrects[name] = coords - self.mycoll.append(newrects) - clone = self.image.copy() - for key in self.rects.keys(): - r = self.rects[key] - color = (255, 0, 0) - if key == self.sel_rect: - color = (0, 255, 0) - - cv2.rectangle(clone, (r[0], r[1]), (r[0] + r[2], r[1] + r[3]), color, 2) - cv2.putText(clone, key, (r[0], r[1]), 1, 1.0, (255, 0, 0)) - - cv2.imshow(self.window_name, clone) - doing = False - self.collection.pickle(self.output) - - def mouse_roi(self, event, x, y, flags, params): - if event == cv2.EVENT_RBUTTONDOWN: - self.rclk = not self.rclk - self.sel_rect = None - for name in self.rects: - r = self.rects[name] - self.roi_to_tracker[name] = MedianFlow() - self.roi_to_tracker[name].init(self.image, r) - return - if self.rclk: - if event == cv2.EVENT_LBUTTONDOWN and flags == 48: # 16: # pressing shift, remove box - if len(self.rects) > 0: - r = min(self.rects.items(), key=lambda rect: np.linalg.norm( - np.array([rect[1][0], rect[1][1]]) - np.array([x, y]))) - r = r[0] - self.rects.pop(r) - self.roi_to_tracker.pop(r) - self.sel_rect = None - elif event == cv2.EVENT_LBUTTONDOWN and flags == 40: # 8: # pressing cntrl, add box - name = raw_input('Enter name of object: ') - if name == "skip": - return - r = [20, 20, 20, 20] - self.rects[name] = r - elif event == cv2.EVENT_LBUTTONDOWN: - self.lclk = not self.lclk - if not self.lclk: - if self.sel_rect is not None: - r = self.rects[self.sel_rect] - self.sel_rect = None - else: - if len(self.rects) > 0: - self.sel_rect = min(self.rects.items(), key=lambda rect: np.linalg.norm( - np.array([rect[1][0], rect[1][1]]) - np.array([x, y]))) - self.sel_rect = self.sel_rect[0] - r = self.rects[self.sel_rect] - cv2.setTrackbarPos("width", self.window_name, r[2]) - cv2.setTrackbarPos("height", self.window_name, r[3]) - - elif event == cv2.EVENT_MOUSEMOVE: - self.x, self.y = x, y - if self.sel_rect is not None: - r = self.rects[self.sel_rect] - self.rects[self.sel_rect][0:4] = [self.x, self.y, r[2], r[3]] - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('bag', type=str, help='The bag you would like to use') - parser.add_argument('name', type=str, help='The name of the output file') - parser.add_argument('--load', action='store_true', help='The name of the output file') - args = parser.parse_args(sys.argv[1:]) - - roi = ROI_Generator_Slow() - roi.create(args.bag, args.name, args.load) - roi.go() From 1f5ca356cd773a99d770bcc99c7167b0981713be Mon Sep 17 00:00:00 2001 From: Tess Date: Sat, 17 Dec 2016 20:28:18 -1000 Subject: [PATCH 207/267] changes to stc, and db --- .../object_classification/lidar_to_image.py | 182 ++++++++---------- 1 file changed, 76 insertions(+), 106 deletions(-) diff --git a/perception/navigator_vision/object_classification/lidar_to_image.py b/perception/navigator_vision/object_classification/lidar_to_image.py index c1ce1b30..b67b5357 100644 --- a/perception/navigator_vision/object_classification/lidar_to_image.py +++ b/perception/navigator_vision/object_classification/lidar_to_image.py @@ -3,6 +3,8 @@ from txros import util, tf import navigator_tools as nt from navigator_tools import CvDebug +from collections import Counter +from image_geometry import PinholeCameraModel import sys from collections import deque from cv_bridge import CvBridge @@ -19,16 +21,14 @@ class LidarToImage(object): - def __init__(self, nh, training=False, classes=None, dist=50): + def __init__(self, nh, classes=None, dist=50): self.MAX_SIZE = 74 self.IMAGE_SIZE = 100 self.max_dist = dist self.bridge = CvBridge() self.nh = nh - self.id_to_perist = {} self.image_cache = deque() self.pose = None - self.training = training self.image = None self.classes = classes self.cam_info = None @@ -38,19 +38,9 @@ def __init__(self, nh, training=False, classes=None, dist=50): self.debug = CvDebug(nh) @util.cancellableInlineCallbacks - def init_(self, cam="r"): + def init_(self, cam): image_sub = "/stereo/right/image_rect_color" - self.tf_frame = "/stereo_right_cam" cam_info = "/stereo/right/camera_info" - if cam == 'l': - image_sub = "/stereo/left/image_rect_color" - self.tf_frame = "/stereo_left_cam" - cam_info = "/stereo/left/camera_info" - - if cam == 'rr': - image_sub = "/right/right/image_rect_color" - self.tf_frame = "/right_right_cam" - cam_info = "/right/right/camera_info" yield self.nh.subscribe(image_sub, Image, self._img_cb) self._database = yield self.nh.get_service_client('/database/requests', ObjectDBQuery) @@ -63,6 +53,36 @@ def init_(self, cam="r"): def _info_cb(self, info): self.cam_info = info + def _get_2d_points(self, points_3d): + # xmin, ymin, zmin = self._get_top_left_point(points_3d) + points_2d = map(lambda x: self.camera_model.project3dToPixel(x), points_3d) + return points_2d + + def _get_bounding_rect(self, points_2d, img): + xmin = np.inf + xmax = -np.inf + ymin = np.inf + ymax = -np.inf + h, w, r = img.shape + for i, point in enumerate(points_2d): + if(point[0] < xmin): + xmin = point[0] + if(point[0] > xmax): + xmax = point[0] + if(point[1] > ymax): + ymax = point[1] + if(point[1] < ymin): + ymin = point[1] + if xmin < 0: + xmin = 1 + if ymin < 0: + ymin = 1 + if xmax > w: + xmax = w - 1 + if ymax > h: + ymax = h - 1 + return xmin, ymin, xmax, ymax + @util.cancellableInlineCallbacks def get_object_rois(self, name=None): req = ObjectDBQueryRequest() @@ -72,8 +92,6 @@ def get_object_rois(self, name=None): req.name = name obj = yield self._database(req) - print name - print obj.found if obj is None or not obj.found: defer.returnValue((None, None)) rois = [] @@ -81,49 +99,31 @@ def get_object_rois(self, name=None): if ros_img is None: defer.returnValue((None, None)) img = self.bridge.imgmsg_to_cv2(ros_img, "mono8") - objects = obj.objects - if name is not None: - objects = [obj.objects[0]] - - for o in objects: - print o.name - if o.id not in self.id_to_perist: - self.id_to_perist[o.id] = [] - ppoints = self.id_to_perist[o.id] - ppoints.extend(o.points) - if len(ppoints) > 500: - ppoints = ppoints[:500] - if self.training and o.name not in self.classes: - continue - position = yield self._get_position() - o_pos = nt.rosmsg_to_numpy(o.position) - diff = np.linalg.norm(position - o_pos) - if diff > self.max_dist: - continue - points, bbox = yield self._get_bounding_box_2d(ppoints, obj.objects[0].header.stamp) - if bbox is None: - continue - - xmin, ymin, xmax, ymax = bbox - - h, w, r = img.shape - if xmin < 0 or xmax < 0 or xmin > w or xmax > w or xmax - xmin == 0 or ymax - ymin == 0: - continue - if ymin < 0: - ymin = 0 - roi = img[ymin:ymax, xmin:xmax] - roi = self._resize_image(roi) - ret_obj = {} - ret_obj["id"] = o.id - ret_obj["points"] = points - ret_obj["img"] = roi - ret_obj["time"] = o.header.stamp - ret_obj["name"] = o.name - ret_obj["bbox"] = [xmin, ymin, xmax, ymax] - rois.append(ret_obj) + o = obj.objects[0] + + points_3d = yield self.get_3d_points(o) + points_2d_all = map(lambda x: self.camera_model.project3dToPixel(x), points_3d) + points_2d = self._get_2d_points(points_3d) + xmin, ymin, xmax, ymax = self._get_bounding_rect(points_2d, img) + xmin, ymin, xmax, ymax = int(xmin), int(ymin), int(xmax), int(ymax) + h, w, r = img.shape + if xmin < 0 or xmax < 0 or xmin > w or xmax > w or xmax - xmin == 0 or ymax - ymin == 0: + continue + if ymin < 0: + ymin = 0 + roi = img[ymin:ymax, xmin:xmax] + roi = self._resize_image(roi) + ret_obj = {} + ret_obj["id"] = o.id + ret_obj["points"] = points_2d_all + ret_obj["img"] = roi + ret_obj["time"] = o.header.stamp + ret_obj["name"] = o.name + ret_obj["bbox"] = [xmin, ymin, xmax, ymax] + rois.append(ret_obj) defer.returnValue((img, rois)) - def _img_cb(self, image_ros): + def img_cb(self, image_ros): """Add an image to the image cache.""" self.image = image_ros @@ -133,55 +133,7 @@ def _img_cb(self, image_ros): self.image_cache.append(image_ros) @util.cancellableInlineCallbacks - def _get_position(self): - last_odom_msg = yield self._odom_sub.get_next_message() - defer.returnValue(nt.odometry_to_numpy(last_odom_msg)[0][0]) - - @txros.util.cancellableInlineCallbacks - def _get_bounding_box_2d(self, points_3d_enu, time): - if self.cam_info is None: - defer.returnValue((None, None)) - self.camera_model = PinholeCameraModel() - self.camera_model.fromCameraInfo(self.cam_info) - points_2d = [] - try: - trans = yield self.tf_listener.get_transform(self.tf_frame, "/enu", time) - except Exception: - defer.returnValue((None, None)) - transformation = trans.as_matrix() - - for point in points_3d_enu: - point = nt.rosmsg_to_numpy(point) - point = np.append(point, 1.0) - t_p = transformation.dot(point) - if t_p[3] < 1E-15: - defer.returnValue((None, None)) - t_p[0] /= t_p[3] - t_p[1] /= t_p[3] - t_p[2] /= t_p[3] - t_p = t_p[0:3] - if t_p[2] < 0: - defer.returnValue((None, None)) - - point_2d = self.camera_model.project3dToPixel(t_p) - points_2d.append((int(point_2d[0]), int(point_2d[1]))) - - xmin, ymin = sys.maxint, sys.maxint - xmax, ymax = -sys.maxint, -sys.maxint - - for i, point in enumerate(points_2d): - if point[0] < xmin: - xmin = point[0] - if point[0] > xmax: - xmax = point[0] - if point[1] < ymin: - ymin = point[1] - if point[1] > ymax: - ymax = point[1] - defer.returnValue((points_2d, (xmin, ymin, xmax, ymax))) - - @util.cancellableInlineCallbacks - def _get_closest_image(self, time): + def get_closest_image(self, time): min_img = None for i in range(0, 20): min_diff = genpy.Duration(sys.maxint) @@ -222,3 +174,21 @@ def _resize_image(self, img): img = np.repeat(img, reph, axis=0) return np.repeat(img, rep, axis=1) + + @txros.util.cancellableInlineCallbacks + def get_3d_points(self, perc_obj): + trans = yield self.my_tf.get_transform("/stereo_right_cam", "/enu", perc_obj.header.stamp) + + stereo_points = [] + for point in perc_obj.points: + stereo_points.append(np.array([point.x, point.y, point.z, 1])) + stereo_points = map(lambda x: trans.as_matrix().dot(x), stereo_points) + points = [] + for p in stereo_points: + if p[3] < 1E-15: + raise ZeroDivisionError + p[0] /= p[3] + p[1] /= p[3] + p[2] /= p[3] + points.append(p[:3]) + defer.returnValue(points) From 98465529edf75cbb33fa9da868da152c609aa0a7 Mon Sep 17 00:00:00 2001 From: Tess Date: Sat, 17 Dec 2016 21:10:26 -1000 Subject: [PATCH 208/267] changed mission planner to allow for sime mode --- .../object_classification/object_classification.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/perception/navigator_vision/object_classification/object_classification.py b/perception/navigator_vision/object_classification/object_classification.py index f34bc0a2..679cf78e 100644 --- a/perception/navigator_vision/object_classification/object_classification.py +++ b/perception/navigator_vision/object_classification/object_classification.py @@ -103,6 +103,7 @@ def train(self): desc = desc.flatten() descs.append(desc) classify.append(clss) + print clss descs = np.array(descs) classify = np.array(classify) counts = dict((x, list(classify).count(x)) for x in set(classify)) @@ -142,8 +143,8 @@ def classify(self): cv2.waitKey(33) if __name__ == "__main__": - # t = Training("train_roi.p", "train.p") - # t.train() - # print "done" - c = ClassiferTest("val_roi.p", "train.p") - c.classify() + t = Training("roi_competition.p", "train_competition.p") + t.train() + print "done" + # c = ClassiferTest("val_roi.p", "train.p") + # c.classify() From a67b1506edcbb75a8ccbc4e392b2df2a7aa7456f Mon Sep 17 00:00:00 2001 From: mattlangford Date: Mon, 2 Jan 2017 19:45:21 -0500 Subject: [PATCH 209/267] TOOLS: Add fprint, add more generic tool interface --- utils/sub8_ros_tools/setup.py | 2 +- .../sub8_misc_tools/__init__.py | 2 + .../sub8_ros_tools/sub8_misc_tools/fprint.py | 78 +++++++++++++++++++ .../sub8_ros_tools/sub8_ros_tools/__init__.py | 18 ++--- utils/sub8_ros_tools/sub8_tools/__init__.py | 2 + 5 files changed, 89 insertions(+), 13 deletions(-) create mode 100644 utils/sub8_ros_tools/sub8_misc_tools/fprint.py create mode 100644 utils/sub8_ros_tools/sub8_tools/__init__.py diff --git a/utils/sub8_ros_tools/setup.py b/utils/sub8_ros_tools/setup.py index 4ef788ff..5dfbc42f 100644 --- a/utils/sub8_ros_tools/setup.py +++ b/utils/sub8_ros_tools/setup.py @@ -5,7 +5,7 @@ # fetch values from package.xml setup_args = generate_distutils_setup( - packages=['sub8_ros_tools', 'sub8_misc_tools'], + packages=['sub8_ros_tools', 'sub8_misc_tools', 'sub8_tools'], ) setup(**setup_args) diff --git a/utils/sub8_ros_tools/sub8_misc_tools/__init__.py b/utils/sub8_ros_tools/sub8_misc_tools/__init__.py index e518ed59..0d80c755 100644 --- a/utils/sub8_ros_tools/sub8_misc_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_misc_tools/__init__.py @@ -1,3 +1,5 @@ # flake8: noqa from download import download_and_unzip from download import download + +from fprint import fprint diff --git a/utils/sub8_ros_tools/sub8_misc_tools/fprint.py b/utils/sub8_ros_tools/sub8_misc_tools/fprint.py new file mode 100644 index 00000000..fbd87b05 --- /dev/null +++ b/utils/sub8_ros_tools/sub8_misc_tools/fprint.py @@ -0,0 +1,78 @@ +import rospy + + +class MissingPerceptionObject(): + def __init__(self, name): + self.name = name + +class Colors(): + # Some cool stuff could happen here + red = '\033[91m' + green = '\033[92m' + yellow = '\033[93m' + blue = '\033[94m' + bold = '\033[1m' + + reset = '\033[0m' + + def __getattr__(self, arg): + # If we get a non existent color, return the reset color + return self.reset + +class Seperator(): + # TODO + def __getattr__(self, *args, **kwargs): + return + + def equals(self, _len, blank_space=False): + text = "{}".format("=" * len) + if blank_space: + return "\n{}\n".format(text) + + return text + + +def fprint(msg, time=None, title=None, newline=True, msg_color=None): + time_header = False + title_header = False + + if title is not None: + title_header = "{C.blue}{C.bold}{title}{C.reset}".format(C=Colors, title=title) + + if msg_color is not None: + msg = "{color}{msg}{C.reset}".format(color=getattr(Colors(), msg_color), C=Colors, msg=msg) + + if time == "": + time_header = False + + elif time is None: + try: + time = rospy.Time.now().to_sec() + time_header = "{C.bold}{time}{C.reset}".format(C=Colors, time=time) + except rospy.exceptions.ROSInitException: + pass + else: + time_header = "{C.bold}{time}{C.reset}".format(C=Colors, time=time) + + if title_header and time_header: + # (TIME) HEADER: message + to_print = "{time} {title}: {msg}" + elif time_header: + # (TIME): message + to_print = "{time}: {msg}" + elif title_header: + # HEADER: message + to_print = "{title}: {msg}" + else: + # message + to_print = "{msg}" + + if newline: + if not isinstance(newline, bool): + msg += "\n" * (newline - 1) # Since printing implicitly adds a new line + print to_print.format(time=time_header, title=title_header, msg=msg) + else: + # Note, this adds a space at the end. + print to_print.format(time=time_header, title=title_header, msg=msg), + +print_t = fprint # For legacy diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index 8ef8b94a..ae5c2664 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -6,15 +6,9 @@ import geometry_helpers import func_helpers -# TODO: Adjust all existing code to not require these to be top-level imports -from init_helpers import wait_for_param, wait_for_subscriber -from image_helpers import Image_Subscriber, Image_Publisher, make_image_msg, get_image_msg, get_parameter_range -from msg_helpers import ( - make_header, odom_sub, - rosmsg_to_numpy, pose_to_numpy, twist_to_numpy, odometry_to_numpy, posetwist_to_numpy, - make_wrench_stamped, make_pose_stamped, - numpy_to_quaternion, numpy_pair_to_pose, numpy_to_point, numpy_quat_pair_to_pose, numpy_to_twist -) -from threading_helpers import thread_lock -from geometry_helpers import (make_rotation, normalize, skew_symmetric_cross, deskew, compose_transformation, - project_pt_to_plane, clip_norm) +from image_helpers import * +from init_helper import * +from msg_helpers import * +from threading_helpers import * +from geometry_helpers import * +from func_helpers import * diff --git a/utils/sub8_ros_tools/sub8_tools/__init__.py b/utils/sub8_ros_tools/sub8_tools/__init__.py new file mode 100644 index 00000000..c36a9ccb --- /dev/null +++ b/utils/sub8_ros_tools/sub8_tools/__init__.py @@ -0,0 +1,2 @@ +from sub8_ros_tools import * +from sub8_misc_tools import * From e1feec48d2481c102a7d356e1b979fa4ebc5af8a Mon Sep 17 00:00:00 2001 From: mattlangford Date: Mon, 2 Jan 2017 20:16:15 -0500 Subject: [PATCH 210/267] MISSIONS: Add vision proxy config, port some more navigator files back --- utils/sub8_ros_tools/sub8_ros_tools/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py index ae5c2664..f0d53137 100644 --- a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_ros_tools/__init__.py @@ -7,7 +7,7 @@ import func_helpers from image_helpers import * -from init_helper import * +from init_helpers import * from msg_helpers import * from threading_helpers import * from geometry_helpers import * From 72e9134e27a81ae8177bde5c4bb0499428b8511c Mon Sep 17 00:00:00 2001 From: mattlangford Date: Tue, 3 Jan 2017 03:03:53 -0500 Subject: [PATCH 211/267] FPRINT: Glorious updates --- .../sub8_misc_tools/__init__.py | 2 +- .../sub8_ros_tools/sub8_misc_tools/fprint.py | 78 ---------- .../sub8_misc_tools/text_effects.py | 138 ++++++++++++++++++ 3 files changed, 139 insertions(+), 79 deletions(-) delete mode 100644 utils/sub8_ros_tools/sub8_misc_tools/fprint.py create mode 100644 utils/sub8_ros_tools/sub8_misc_tools/text_effects.py diff --git a/utils/sub8_ros_tools/sub8_misc_tools/__init__.py b/utils/sub8_ros_tools/sub8_misc_tools/__init__.py index 0d80c755..86a0aa42 100644 --- a/utils/sub8_ros_tools/sub8_misc_tools/__init__.py +++ b/utils/sub8_ros_tools/sub8_misc_tools/__init__.py @@ -2,4 +2,4 @@ from download import download_and_unzip from download import download -from fprint import fprint +import text_effects diff --git a/utils/sub8_ros_tools/sub8_misc_tools/fprint.py b/utils/sub8_ros_tools/sub8_misc_tools/fprint.py deleted file mode 100644 index fbd87b05..00000000 --- a/utils/sub8_ros_tools/sub8_misc_tools/fprint.py +++ /dev/null @@ -1,78 +0,0 @@ -import rospy - - -class MissingPerceptionObject(): - def __init__(self, name): - self.name = name - -class Colors(): - # Some cool stuff could happen here - red = '\033[91m' - green = '\033[92m' - yellow = '\033[93m' - blue = '\033[94m' - bold = '\033[1m' - - reset = '\033[0m' - - def __getattr__(self, arg): - # If we get a non existent color, return the reset color - return self.reset - -class Seperator(): - # TODO - def __getattr__(self, *args, **kwargs): - return - - def equals(self, _len, blank_space=False): - text = "{}".format("=" * len) - if blank_space: - return "\n{}\n".format(text) - - return text - - -def fprint(msg, time=None, title=None, newline=True, msg_color=None): - time_header = False - title_header = False - - if title is not None: - title_header = "{C.blue}{C.bold}{title}{C.reset}".format(C=Colors, title=title) - - if msg_color is not None: - msg = "{color}{msg}{C.reset}".format(color=getattr(Colors(), msg_color), C=Colors, msg=msg) - - if time == "": - time_header = False - - elif time is None: - try: - time = rospy.Time.now().to_sec() - time_header = "{C.bold}{time}{C.reset}".format(C=Colors, time=time) - except rospy.exceptions.ROSInitException: - pass - else: - time_header = "{C.bold}{time}{C.reset}".format(C=Colors, time=time) - - if title_header and time_header: - # (TIME) HEADER: message - to_print = "{time} {title}: {msg}" - elif time_header: - # (TIME): message - to_print = "{time}: {msg}" - elif title_header: - # HEADER: message - to_print = "{title}: {msg}" - else: - # message - to_print = "{msg}" - - if newline: - if not isinstance(newline, bool): - msg += "\n" * (newline - 1) # Since printing implicitly adds a new line - print to_print.format(time=time_header, title=title_header, msg=msg) - else: - # Note, this adds a space at the end. - print to_print.format(time=time_header, title=title_header, msg=msg), - -print_t = fprint # For legacy diff --git a/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py b/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py new file mode 100644 index 00000000..76306ec9 --- /dev/null +++ b/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py @@ -0,0 +1,138 @@ +class Colors(): + red = '\033[31m' + green = '\033[32m' + yellow = '\033[33m' + blue = '\033[34m' + purple = '\033[35m' + cyan = '\033[36m' + white = '\033[37m' + + underline = '\033[2m' + bold = '\033[1m' + negative = '\033[3m' + + reset = '\033[0m' + + def __getattr__(self, arg): + # If we get a non existent color, return the reset color + return self.reset + + +class Printer(object): + def __init__(self, string="", autospace=False): + self._autospace = autospace + self._string = string + + # Default adding text + self.text = lambda text: self + text + + # Colors + self.red = lambda text: self + (Colors.red + str(text) + Colors.reset) + self.green = lambda text: self + (Colors.green + str(text) + Colors.reset) + self.yellow = lambda text: self + (Colors.yellow + str(text) + Colors.reset) + self.blue = lambda text: self + (Colors.blue + str(text) + Colors.reset) + self.purple = lambda text: self + (Colors.purple + str(text) + Colors.reset) + self.cyan = lambda text: self + (Colors.cyna + str(text) + Colors.reset) + self.white = lambda text: self + (Colors.white + str(text) + Colors.reset) + + # Text effects + self.underline = lambda text: Printer(self._string + Colors.underline + str(text) + Colors.reset) + self.bold = lambda text: Printer(self._string + Colors.bold + str(text) + Colors.reset) + self.negative = lambda text: Printer(self._string + Colors.negative + str(text) + Colors.reset) + + # For passing in custom formatting + self.custom = lambda text, effect: self + (effect + str(text) + Colors.reset) + + def __repr__(self): + return self._string + Colors.reset + __str__ = __repr__ + + def __add__(self, other): + extra_space = ' ' if self._autospace and self._string is not '' else '' + return Printer(self._string + extra_space + str(other), self._autospace) + + @property + def set_red(self): + return Printer(self._string + Colors.red) + + @property + def set_green(self): + return Printer(self._string + Colors.green) + + @property + def set_yellow(self): + return Printer(self._string + Colors.yellow) + + @property + def set_blue(self): + return Printer(self._string + Colors.blue) + + @property + def set_purple(self): + return Printer(self._string + Colors.purple) + + @property + def set_cyan(self): + return Printer(self._string + Colors.cyan) + + @property + def set_white(self): + return Printer(self._string + Colors.white) + + @property + def reset(self): + return Printer(self._string + Colors.reset) + + def space(self, count=1): + return Printer(self._string + ' ' * count) + + def newline(self, count=1): + return Printer(self._string + '\n' * count) + + def enable_autospaces(self): + self._autospace = False + + def disable_autospaces(self): + self._autospace = True + + +class FprintFactory(object): + def __init__(self, title=None, time=None, msg_color=None, newline=1): + assert time is None or callable(time), "`time` should be `None` for no printing or a function that generates a timestamp." + assert msg_color is None or isinstance(newline, str), "`msg_color` should be `None` for default printing or a string color." + assert newline is None or isinstance(newline, int), "`newline` should be the number of newlines after the text (default 1)" + + # All these can be overwritten if not specified here + self.title = title # Title to print with each message + self.time = time # Either `None` for no printing or a function that generates a timestamp + self.msg_color = msg_color # Either `None` for deafult printing or a string color + self.newline = newline # The number of newlines characters to add to the end + + self.printer = Printer() + + def fprint(self, text, **kwargs): + title = kwargs.get("title", self.title) + time = kwargs.get("time", self.time) + msg_color = kwargs.get("msg_color", self.msg_color) + newline = kwargs.get("newline", self.newline) + + message = self.printer + if title is not None: + message = message.set_blue.bold(title).reset.space() + + if time is not None: + t = time() + message = message.bold(t).space() + + if msg_color is not None: + message = message.custom(text, getattr(Colors, msg_color)) + else: + message = message.text(text) + + if newline == 1: + print message + else: + print message.newline(newline - 1) + +# Standard instantiation +fprint = FprintFactory().fprint From ee785e2ba60b43056145fe914db1b7dc84cb5006 Mon Sep 17 00:00:00 2001 From: David Soto Date: Mon, 9 Jan 2017 13:16:29 -0500 Subject: [PATCH 212/267] SONAR: fix sporadic bootloading bug --- drivers/paulboard_driver/scripts/paulboard_driver | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/drivers/paulboard_driver/scripts/paulboard_driver b/drivers/paulboard_driver/scripts/paulboard_driver index 53a0e29c..d5dcbafd 100755 --- a/drivers/paulboard_driver/scripts/paulboard_driver +++ b/drivers/paulboard_driver/scripts/paulboard_driver @@ -128,7 +128,7 @@ if __name__ == '__main__': port = rospy.get_param('~port') print 'Port: ', port - with serial.Serial(port, 1152000, timeout=.3) as ser: + with serial.Serial(port, 115200, timeout=.3) as ser: if check_board_bootloaded(ser): rospy.loginfo('PaulBoard already bootloaded') else: From b7f5d60e53ad5cc4a3433de0b5e34b830a9b1176 Mon Sep 17 00:00:00 2001 From: RustyBamboo Date: Fri, 13 Jan 2017 09:01:52 -0500 Subject: [PATCH 213/267] FPRINT: Fixed typo in initiating FPrintFactor with default color --- utils/sub8_ros_tools/sub8_misc_tools/text_effects.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py b/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py index 76306ec9..0e3bea24 100644 --- a/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py +++ b/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py @@ -99,7 +99,7 @@ def disable_autospaces(self): class FprintFactory(object): def __init__(self, title=None, time=None, msg_color=None, newline=1): assert time is None or callable(time), "`time` should be `None` for no printing or a function that generates a timestamp." - assert msg_color is None or isinstance(newline, str), "`msg_color` should be `None` for default printing or a string color." + assert msg_color is None or isinstance(msg_color, str), "`msg_color` should be `None` for default printing or a string color." assert newline is None or isinstance(newline, int), "`newline` should be the number of newlines after the text (default 1)" # All these can be overwritten if not specified here From f6c085af5294d120969be2d46e090c531a7dd9c4 Mon Sep 17 00:00:00 2001 From: mattlangford Date: Fri, 20 Jan 2017 23:37:30 -0500 Subject: [PATCH 214/267] FPRINT: Add auto_bold option --- utils/sub8_ros_tools/sub8_misc_tools/text_effects.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py b/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py index 0e3bea24..15e6121d 100644 --- a/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py +++ b/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py @@ -97,15 +97,17 @@ def disable_autospaces(self): class FprintFactory(object): - def __init__(self, title=None, time=None, msg_color=None, newline=1): + def __init__(self, title=None, time=None, msg_color=None, auto_bold=True, newline=1): assert time is None or callable(time), "`time` should be `None` for no printing or a function that generates a timestamp." assert msg_color is None or isinstance(msg_color, str), "`msg_color` should be `None` for default printing or a string color." + assert isinstance(auto_bold, bool), "`auto_bold` should be true or false if messages should be printed as bold by default or not" assert newline is None or isinstance(newline, int), "`newline` should be the number of newlines after the text (default 1)" # All these can be overwritten if not specified here self.title = title # Title to print with each message self.time = time # Either `None` for no printing or a function that generates a timestamp self.msg_color = msg_color # Either `None` for deafult printing or a string color + self.auto_bold = auto_bold # Should each message be bolded by default self.newline = newline # The number of newlines characters to add to the end self.printer = Printer() @@ -114,6 +116,7 @@ def fprint(self, text, **kwargs): title = kwargs.get("title", self.title) time = kwargs.get("time", self.time) msg_color = kwargs.get("msg_color", self.msg_color) + auto_bold = kwargs.get("auto_bold", self.auto_bold) newline = kwargs.get("newline", self.newline) message = self.printer @@ -123,7 +126,12 @@ def fprint(self, text, **kwargs): if time is not None: t = time() message = message.bold(t).space() + + message += ": " + if auto_bold: + text = str(self.printer.bold(text)) + if msg_color is not None: message = message.custom(text, getattr(Colors, msg_color)) else: From 4fbd5c0aa60da92504e896ace0cc2125afbb2879 Mon Sep 17 00:00:00 2001 From: David Soto Date: Mon, 16 Jan 2017 02:29:40 -0500 Subject: [PATCH 215/267] TESTS: add ros_alarms server to integration test --- utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py index 6ca2f41b..7ced2bf7 100644 --- a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py +++ b/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py @@ -51,7 +51,7 @@ def test_make_wrench_stamped(self): msg_force = rosmsg_to_numpy(wrench_msg.wrench.force) # noqa msg_torque = rosmsg_to_numpy(wrench_msg.wrench.torque) # noqa self.assertIsNotNone(msg_force) - self.assertIsnotNone(msg_torque) + self.assertIsNotNone(msg_torque) def test_make_image_msg(self): '''Test that make ros image message doesn't fail''' From c776d2a1d89a320a43e8a3aa8f482ac1a6b7d85c Mon Sep 17 00:00:00 2001 From: Anthony Olive Date: Thu, 2 Feb 2017 10:03:33 -0500 Subject: [PATCH 216/267] INSTALL: Replace all progressbar dependencies with tqdm --- utils/navigator_tools/navigator_tools/bag_crawler.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/utils/navigator_tools/navigator_tools/bag_crawler.py b/utils/navigator_tools/navigator_tools/bag_crawler.py index 662971df..46ab8a26 100644 --- a/utils/navigator_tools/navigator_tools/bag_crawler.py +++ b/utils/navigator_tools/navigator_tools/bag_crawler.py @@ -1,12 +1,13 @@ + #!/usr/bin/python """ -This file wis written by the team at UF MIL for the 2016 robosub competition. +This file wis written by the team at UF MIL for the 2016 robosub competition. github.com/uf-mil """ import rosbag from cv_bridge import CvBridge -import progressbar +import tqdm class BagCrawler(object): @@ -23,7 +24,7 @@ def convert(self, msg): def crawl(self, topic=None, is_image=False, max_msgs=float('inf')): num_seen = 0 num_msgs = 0 - bar = progressbar.ProgressBar(max_value=self.bag.get_message_count()) + bar = tqdm.tqdm(total=self.bag.get_message_count()) if is_image: topic = self.image_topics[0] for msg_topic, msg, t in self.bag.read_messages(): @@ -35,6 +36,7 @@ def crawl(self, topic=None, is_image=False, max_msgs=float('inf')): break num_seen += 1 yield msg + bar.close() @property def image_topics(self, cam="right"): From f4bd9e68efed5fe650f360f194f59d6bd3470bc5 Mon Sep 17 00:00:00 2001 From: David Soto Date: Tue, 31 Jan 2017 03:53:52 -0500 Subject: [PATCH 217/267] PERCEPTION: move sub pkg to top level perception dir --- .../sub8_perception/ros_tools/easy_thresh.py | 162 ++++++++++++++++++ .../sub8_perception/ros_tools/image_dumper.py | 155 +++++++++++++++++ 2 files changed, 317 insertions(+) create mode 100755 perception/sub8_perception/ros_tools/easy_thresh.py create mode 100755 perception/sub8_perception/ros_tools/image_dumper.py diff --git a/perception/sub8_perception/ros_tools/easy_thresh.py b/perception/sub8_perception/ros_tools/easy_thresh.py new file mode 100755 index 00000000..5fb36d6e --- /dev/null +++ b/perception/sub8_perception/ros_tools/easy_thresh.py @@ -0,0 +1,162 @@ +#!/usr/bin/python +# PYTHON_ARGCOMPLETE_OK + +import argcomplete +import sys +import argparse +import rospy + +all_topics = rospy.get_published_topics() +topics = [topic[0] for topic in all_topics if topic[1] == 'sensor_msgs/Image'] + +usage_msg = ("Name an image topic, we'll subscribe to it and grab the first image we can. " + + "Then click a rectangle around the area of interest") +desc_msg = "A tool for making threshold determination fun!" + +parser = argparse.ArgumentParser(usage=usage_msg, description=desc_msg) +parser.add_argument(dest='topic_name', + help="The topic name you'd like to listen to", + choices=topics) +parser.add_argument('--hsv', action='store_true', + help="Would you like to look at hsv instead of bgr?" + ) + +argcomplete.autocomplete(parser) +args = parser.parse_args(sys.argv[1:]) +if args.hsv: + print 'Using HSV instead of bgr' +prefix = 'hsv' if args.hsv else 'bgr' + +# Importing these late so that argcomplete can run quickly +from sub8_vision_tools import visual_threshold_tools # noqa +from sub8_ros_tools.image_helpers import Image_Subscriber # noqa +import cv2 # noqa +import numpy as np # noqa +import os # noqa +from sklearn import cluster # noqa +os.system("export ETS_TOOLKIT=qt4") + + +class Segmenter(object): + def __init__(self): + self.is_done = False + self.corners = [] + self.state = 0 + + def mouse_cb(self, event, x, y, flags, param): + if event == cv2.EVENT_LBUTTONDOWN: + if not self.is_done: + print 'click' + self.corners.append(np.array([x, y])) + self.state += 1 + if self.state >= 4: + print 'done' + self.is_done = True + self.state = 0 + + if event == cv2.EVENT_RBUTTONDOWN: + pass + + def segment(self): + while(not self.is_done and not rospy.is_shutdown()): + if cv2.waitKey(50) & 0xFF == ord('q'): + break + + self.is_done = False + rect = cv2.minAreaRect(np.array([np.array(self.corners)])) + box = cv2.cv.BoxPoints(rect) + box = np.int0(box) + return box + + +class ImageGetter(object): + def __init__(self, topic_name='/down/left/image_raw'): + self.sub = Image_Subscriber(topic_name, self.get_image) + + print 'getting topic', topic_name + self.frame = None + self.done = False + + def get_image(self, msg): + self.frame = msg + self.done = True + + def wait_for_image(self): + while not self.done and not rospy.is_shutdown(): + if cv2.waitKey(50) & 0xFF == ord('q'): + exit() + + +if __name__ == '__main__': + rospy.init_node('easy_thresh') + + # Do the import after arg parse + + ig = ImageGetter(args.topic_name) + ig.wait_for_image() + print 'Got image' + frame_initial = np.copy(ig.frame) + + cv2.namedWindow("color") + seg = Segmenter() + cv2.setMouseCallback("color", seg.mouse_cb) + + frame_unblurred = frame_initial[::2, ::2, :] + frame = frame_unblurred + + if args.hsv: + analysis_image = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV) + else: + analysis_image = frame + + draw_image = np.copy(frame_unblurred) + seg_image = np.zeros_like(frame_unblurred[:, :, 0]) + + cv2.imshow("color", frame_unblurred) + + box = seg.segment() + print 'finished' + + cv2.drawContours(seg_image, [box], 0, 1, -2) + + hsv_in_box = analysis_image[seg_image.astype(np.bool)] + hsv_list = np.reshape(hsv_in_box, (-1, 3)) + + clust = cluster.KMeans(n_clusters=2) + print 'done clustering' + + clust.fit(hsv_list) + + hsv_dsamp = hsv_list + labels_dsamp = clust.labels_ + + visual_threshold_tools.points_with_labels( + hsv_dsamp[:, 0], + hsv_dsamp[:, 1], + hsv_dsamp[:, 2], + labels_dsamp, + scale_factor=5.0, + ) + + thresholder = visual_threshold_tools.make_extent_dialog( + ranges=visual_threshold_tools.color_ranges[prefix], + image=analysis_image + ) + + while not rospy.is_shutdown(): + if cv2.waitKey(50) & 0xFF == ord('q'): + break + + ranges = thresholder.ranges + labels = visual_threshold_tools.np_inrange(hsv_dsamp, ranges[:, 0], ranges[:, 1]) + + # Print out thresholds that can be put in the configuration yaml + low = ranges[:, 0] + print ' {}_low: [{}, {}, {}]'.format(prefix, low[0], low[1], low[2]) + + high = ranges[:, 1] + print ' {}_high: [{}, {}, {}]'.format(prefix, high[0], high[1], high[2]) + + print 'np.' + repr(ranges) + + cv2.destroyAllWindows() diff --git a/perception/sub8_perception/ros_tools/image_dumper.py b/perception/sub8_perception/ros_tools/image_dumper.py new file mode 100755 index 00000000..317db72a --- /dev/null +++ b/perception/sub8_perception/ros_tools/image_dumper.py @@ -0,0 +1,155 @@ +#!/usr/bin/python + +import os +import re +import sys +import fnmatch +import argparse +from tqdm import tqdm + +import cv2 +import rospy +import roslib +import rosbag + +from cv_bridge import CvBridge, CvBridgeError + +# encoding=utf8 +reload(sys) +sys.setdefaultencoding('utf8') + + +class ImageHandler: + def __init__(self, filepath, savepath, bag): + self.bag = bag + self.msgs = 0 # Used to show remaining frames + self.image_index = 0 + self.image_topics = [] + self.bridge = CvBridge() + self.bagname = filepath.split('/')[-1] # Returns ../../bagname.bag + + if savepath is not None: + # Alternative save path (i.e. want to save to ext. drive) + if savepath[-1] != '/': + savepath += '/' + self.working_dir = savepath + self.bagname.split('.')[0]\ + + '_images' + else: + # Save path defaults to launch path + self.working_dir = filepath.split('.')[0] + '_images' + + if not os.path.exists(self.working_dir): + os.makedirs(self.working_dir) + + print '\033[1m' + self.bagname + ':\033[0m' + print 'Saving images to: ', self.working_dir + + self.parse_bag() + self.save_images() + + def parse_bag(self): + types = [] + bag_msg_cnt = [] + topic_status = False + topics = bag.get_type_and_topic_info()[1].keys() + + for i in range(0, len(bag.get_type_and_topic_info()[1].values())): + types.append(bag.get_type_and_topic_info()[1].values()[i][0]) + bag_msg_cnt.append(bag.get_type_and_topic_info()[1].values()[i][1]) + + topics = zip(topics, types, bag_msg_cnt) + + for topic, type, count in topics: + # Want to ignore image topics other than /image_raw + # TODO: Make this changeable + match = re.search(r'\mono|rect|theora|color\b', topic) + if match: + pass + elif type == 'sensor_msgs/Image': + if topic_status is False: + print 'Topic(s):' + topic_status = True + print ' ' + topic + self.image_topics.append(topic) + self.msgs = self.msgs + count + + def save_images(self): + # TODO: Add ability to pickup where it last left off + with tqdm(total=self.msgs) as pbar: + for topic, msg, t in self.bag.read_messages( + topics=self.image_topics): + try: + cv_image = self.bridge.imgmsg_to_cv2(msg, "bgr8") + cv2.imwrite(self.working_dir + '/' + + str(self.image_index) + '.png', cv_image) + self.image_index = self.image_index + 1 + except CvBridgeError, e: + print e + pbar.update(1) + self.bag.close() + print self.image_index + 1, 'images saved to', self.working_dir, '\n' + +if __name__ == '__main__': + + desc_msg = ('Automated tool that traverses a directory dumping all of' + + 'the images from all non-rectificed, non-mono, and non-' + + 'theora topics found in all of the ROS bag files encountered.') + + parser = argparse.ArgumentParser(description=desc_msg) + + parser.add_argument('-f', '--filepath', + help='File path containing the ROS bags') + parser.add_argument('-s', '--savepath', + help='Path where the images will be saved') + parser.add_argument('-r', '--resize', + help='Resolution to resize images to') + + args = parser.parse_args() + + # TODO: Path validation can be moved to ImageHandler class + if args.filepath is not None: + filepath = args.filepath + else: + print 'No bag source provided' + filepath = sys.path[0] + + if args.savepath is not None: + savepath = args.savepath + print 'Setting save path to:', savepath + else: + savepath = None + + if args.resize: + # We're not ready for the future yet + pass + + print '\033[1mFilepath:', filepath + '\033[0m' # Print working directory + + matches = [] + bag_count = 0 + + if filepath[-4:] == '.bag': + bag = rosbag.Bag(filepath) + ImageHandler(filepath, savepath, bag) + else: + + # Generate list of file paths containing bag files + for root, dirnames, filenames in os.walk(filepath): + for filename in fnmatch.filter(filenames, '*.bag'): + if not filename.startswith('.'): + matches.append(os.path.join(root, filename)) + + print '\033[1m' + str(len(matches)) + ' bags found\033[0m\n' + + # Iterate through found bags + for bag_dir in matches: + bag = rosbag.Bag(bag_dir) + try: + ImageHandler(bag_dir, savepath, bag) + bag_count = bag_count + 1 + except rospy.ROSInterruptException: + pass + + print 'Processed', bag_count, 'bags.\n' + + print "Done!" From 94129f2b4d3d707409eba0b00298620b53b0f7ac Mon Sep 17 00:00:00 2001 From: Anthony Olive Date: Fri, 24 Feb 2017 00:11:07 -0500 Subject: [PATCH 218/267] REPO: Update references to uf-mil repositories after renaming --- perception/navigator_vision/object_classification/stc_train.py | 2 +- perception/navigator_vision/package.xml | 2 +- utils/navigator_tools/navigator_tools/bag_crawler.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/perception/navigator_vision/object_classification/stc_train.py b/perception/navigator_vision/object_classification/stc_train.py index 1c633ecb..375c4da1 100644 --- a/perception/navigator_vision/object_classification/stc_train.py +++ b/perception/navigator_vision/object_classification/stc_train.py @@ -205,4 +205,4 @@ def pickle(self, file): succ, color_vec = get_rectangle(roi) if succ: t.train(img, color_vec, color) - t.pickle("/home/tess/mil_ws/src/Navigator/mission_systems/navigator_scan_the_code/navigator_scan_the_code/scan_the_code_lib/svm_train.p") + t.pickle("/home/tess/mil_ws/src/NaviGator/mission_systems/navigator_scan_the_code/navigator_scan_the_code/scan_the_code_lib/svm_train.p") diff --git a/perception/navigator_vision/package.xml b/perception/navigator_vision/package.xml index 2ef95ee2..cd84ab19 100644 --- a/perception/navigator_vision/package.xml +++ b/perception/navigator_vision/package.xml @@ -2,7 +2,7 @@ navigator_vision 1.0.0 - Nodes and libraries used for computer vision perception on Navigator + Nodes and libraries used for computer vision perception on NaviGator tess David Soto diff --git a/utils/navigator_tools/navigator_tools/bag_crawler.py b/utils/navigator_tools/navigator_tools/bag_crawler.py index 46ab8a26..47401ec0 100644 --- a/utils/navigator_tools/navigator_tools/bag_crawler.py +++ b/utils/navigator_tools/navigator_tools/bag_crawler.py @@ -63,7 +63,7 @@ def image_info_topics(self, cam="right"): if __name__ == '__main__': import cv2 - bag = '/home/jacob/catkin_ws/src/Sub8/gnc/sub8_perception/data/bag_test.bag' + bag = '/home/jacob/catkin_ws/src/SubjuGator/gnc/sub8_perception/data/bag_test.bag' bc = BagCrawler(bag) for image in bc.crawl(topic=bc.image_topics[0]): From 824d2957d5a921dc0fabb87fb625bed6f59a90b9 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Fri, 17 Mar 2017 00:30:54 -0400 Subject: [PATCH 219/267] REPO: temporarily move all great merge files to .great_merge so catkin doesnt build it --- .../drivers}/hydrophones/CMakeLists.txt | 0 .../drivers}/hydrophones/README.md | 0 .../drivers}/hydrophones/msg/Debug.msg | 0 .../drivers}/hydrophones/msg/Ping.msg | 0 .../drivers}/hydrophones/msg/ProcessedPing.msg | 0 .../drivers}/hydrophones/package.xml | 0 .../drivers}/hydrophones/scripts/hydrophones | 0 .../drivers}/hydrophones/scripts/ping_logger | 0 .../drivers}/hydrophones/scripts/ping_plotter | 0 .../drivers}/hydrophones/scripts/ping_printer | 0 .../drivers}/hydrophones/setup.py | 0 .../drivers}/hydrophones/src/__init__.py | 0 .../hydrophones/src/hydrophones/__init__.py | 0 .../hydrophones/src/hydrophones/algorithms.py | 0 .../drivers}/hydrophones/src/hydrophones/util.py | 0 .../drivers}/paulboard_driver/CMakeLists.txt | 0 .../drivers}/paulboard_driver/README.md | 0 .../paulboard_driver/blobs/SimpleHyd2013.bin | Bin .../drivers}/paulboard_driver/package.xml | 0 .../paulboard_driver/scripts/paulboard_driver | 0 .../drivers}/paulboard_driver/setup.py | 0 .../src/paulboard_driver/SimpleHyd2013.bin | Bin .../src/paulboard_driver/__init__.py | 0 .../src/paulboard_driver/ais_bootloader.py | 0 .../drivers}/sub8_sonar/CMakeLists.txt | 0 .../drivers}/sub8_sonar/README.md | 0 .../drivers}/sub8_sonar/launch/test.launch | 0 .../drivers}/sub8_sonar/multilateration/__init__.py | 0 .../sub8_sonar/multilateration/multilateration.py | 0 .../drivers}/sub8_sonar/nodes/plotter.py | 0 .../drivers}/sub8_sonar/nodes/sonar_test.py | 0 .../drivers}/sub8_sonar/package.xml | 0 .../drivers}/sub8_sonar/setup.py | 0 .../drivers}/sub8_sonar/sub8_sonar/__init__.py | 0 .../drivers}/sub8_sonar/sub8_sonar/sonar_driver.py | 0 .../perception}/navigator_vision/CMakeLists.txt | 0 .../perception}/navigator_vision/README.md | 0 .../perception}/navigator_vision/calibration.xml | 0 .../config/underwater_shape_detection.yaml | 0 .../navigator_vision/exFAST_SparseStereo/GPL.txt | 0 .../navigator_vision/exFAST_SparseStereo/README.txt | 0 .../example-data/calibration.xml | 0 .../exFAST_SparseStereo/example-data/left.png | Bin .../exFAST_SparseStereo/example-data/right.png | Bin .../exFAST_SparseStereo/src/example/example.cpp | 0 .../src/sparsestereo/CMakeLists.txt | 0 .../src/sparsestereo/calibrationresult.cpp | 0 .../src/sparsestereo/calibrationresult.h | 0 .../src/sparsestereo/census-inl.h | 0 .../exFAST_SparseStereo/src/sparsestereo/census.cpp | 0 .../exFAST_SparseStereo/src/sparsestereo/census.h | 0 .../src/sparsestereo/censuswindow.h | 0 .../src/sparsestereo/exception.h | 0 .../src/sparsestereo/extendedfast-inl.h | 0 .../src/sparsestereo/extendedfast.cpp | 0 .../src/sparsestereo/extendedfast.h | 0 .../src/sparsestereo/fast9-inl.h | 0 .../exFAST_SparseStereo/src/sparsestereo/fast9.h | 0 .../src/sparsestereo/hammingdistance.cpp | 0 .../src/sparsestereo/hammingdistance.h | 0 .../src/sparsestereo/imageconversion.cpp | 0 .../src/sparsestereo/imageconversion.h | 0 .../exFAST_SparseStereo/src/sparsestereo/simd.cpp | 0 .../exFAST_SparseStereo/src/sparsestereo/simd.h | 0 .../src/sparsestereo/sparsematch.h | 0 .../src/sparsestereo/sparserectification.cpp | 0 .../src/sparsestereo/sparserectification.h | 0 .../src/sparsestereo/sparsestereo-inl.h | 0 .../src/sparsestereo/sparsestereo.h | 0 .../src/sparsestereo/stereorectification.cpp | 0 .../src/sparsestereo/stereorectification.h | 0 .../missions/underwater_shape_identification.hpp | 0 .../navigator_vision_lib/active_contours.hpp | 0 .../colorizer/camera_observer.hpp | 0 .../colorizer/color_observation.hpp | 0 .../navigator_vision_lib/colorizer/common.hpp | 0 .../colorizer/pcd_colorizer.hpp | 0 .../colorizer/single_cloud_processor.hpp | 0 .../include/navigator_vision_lib/cv_tools.hpp | 0 .../image_acquisition/camera_frame.hpp | 0 .../image_acquisition/camera_frame_sequence.hpp | 0 .../image_acquisition/camera_model.hpp | 0 .../image_acquisition/ros_camera_stream.hpp | 0 .../navigator_vision_lib/image_filtering.hpp | 0 .../navigator_vision_lib/pcd_sub_pub_algorithm.hpp | 0 .../navigator_vision_lib/point_cloud_algorithms.hpp | 0 .../include/navigator_vision_lib/visualization.hpp | 0 .../navigator_vision/nodes/A_C_test_vision.cpp | 0 .../navigator_vision/nodes/camera_stream_demo.cpp | 0 .../camera_to_lidar/CameraLidarTransformer.cpp | 0 .../camera_to_lidar/CameraLidarTransformer.hpp | 0 .../navigator_vision/nodes/camera_to_lidar/main.cpp | 0 .../navigator_vision/nodes/database_color.py | 0 .../navigator_vision/nodes/object_classifier.py | 0 .../navigator_vision/nodes/pinger_finder.py | 0 .../navigator_vision/nodes/pinger_locator.py | 0 .../nodes/shape_identification/DockShapeVision.cpp | 0 .../nodes/shape_identification/DockShapeVision.h | 0 .../GrayscaleContour/GrayscaleContour.cpp | 0 .../GrayscaleContour/GrayscaleContour.h | 0 .../nodes/shape_identification/ShapeTracker.cpp | 0 .../nodes/shape_identification/ShapeTracker.h | 0 .../nodes/shape_identification/TrackedShape.cpp | 0 .../nodes/shape_identification/TrackedShape.h | 0 .../nodes/shape_identification/main.cpp | 0 .../navigator_vision/nodes/shooter_finder.py | 0 .../navigator_vision/nodes/start_gate_manual.py | 0 .../nodes/stereo_point_cloud_driver.cpp | 0 .../underwater_shape_identification_vision.cpp | 0 .../nodes/velodyne_pcd_colorizer.cpp | 0 .../object_classification/HOG_descriptor.py | 0 .../object_classification/SVM_classifier.py | 0 .../object_classification/__init__.py | 0 .../object_classification/depickler.py | 0 .../object_classification/depicklify.py | 0 .../object_classification/lidar_to_image.py | 0 .../object_classification/median_flow.py | 0 .../object_classification/object_classification.py | 0 .../object_classification/roi_generator.py | 0 .../object_classification/stc_train.py | 0 .../perception}/navigator_vision/package.xml | 0 .../ros_tools/on_the_fly_thresholder.py | 0 .../perception}/navigator_vision/setup.py | 0 .../missions/underwater_shape_identification.cpp | 0 .../src/navigator_vision_lib/active_contours.cpp | 0 .../colorizer/camera_observer.cpp | 0 .../colorizer/color_observation.cpp | 0 .../colorizer/pcd_colorizer.cpp | 0 .../colorizer/single_cloud_processor.cpp | 0 .../src/navigator_vision_lib/cv_utils.cc | 0 .../src/navigator_vision_lib/image_filtering.cpp | 0 .../navigator_vision_lib/point_cloud_algorithms.cc | 0 .../src/navigator_vision_lib/visualization.cc | 0 .../underwater_shape_identification/circle.png | Bin .../underwater_shape_identification/cross.png | Bin .../underwater_shape_identification/triangle.png | Bin .../sub8_perception/ros_tools/easy_thresh.py | 0 .../sub8_perception/ros_tools/image_dumper.py | 0 .../utils}/navigator_tools/CMakeLists.txt | 0 .../utils}/navigator_tools/cfg/BoundsConfig.cfg | 0 .../utils}/navigator_tools/cfg/channel.yaml | 0 .../utils}/navigator_tools/cfg/course_a.yaml | 0 .../utils}/navigator_tools/cfg/course_b.yaml | 0 .../utils}/navigator_tools/cfg/course_c.yaml | 0 .../utils}/navigator_tools/cfg/sim.yaml | 0 .../utils}/navigator_tools/cfg/wauberg.yaml | 0 .../navigator_tools/include/navigator_tools.hpp | 0 .../navigator_tools/navigator_tools/__init__.py | 0 .../navigator_tools/navigator_tools/bag_crawler.py | 0 .../navigator_tools/navigator_tools/cv_debug.py | 0 .../navigator_tools/general_helpers.py | 0 .../navigator_tools/geometry_helpers.py | 0 .../navigator_tools/image_helpers.py | 0 .../navigator_tools/navigator_tools/init_helpers.py | 0 .../navigator_tools/missing_perception_object.py | 0 .../navigator_tools/navigator_tools/move_helper.py | 0 .../navigator_tools/navigator_tools/msg_helpers.py | 0 .../navigator_tools/object_database_helper.py | 0 .../navigator_tools/navigator_tools/rviz_helpers.py | 0 .../navigator_tools/navigator_tools/self_check.py | 0 .../navigator_tools/threading_helpers.py | 0 .../utils}/navigator_tools/nodes/boat_info.py | 0 .../utils}/navigator_tools/nodes/bounds.py | 0 .../navigator_tools/nodes/clicked_point_recorder.py | 0 .../nodes/coordinate_conversion_server.py | 0 .../nodes/estimated_object_setter.py | 0 .../navigator_tools/nodes/fake_action_server.py | 0 .../utils}/navigator_tools/nodes/ogrid_draw.py | 0 .../utils}/navigator_tools/nodes/rviz_waypoint.py | 0 .../utils}/navigator_tools/nodes/tf_fudger.py | 0 .../utils}/navigator_tools/nodes/tf_to_gazebo.py | 0 .../utils}/navigator_tools/nodes/video_player | 0 .../utils}/navigator_tools/package.xml | 0 .../utils}/navigator_tools/setup.py | 0 .../utils}/navigator_tools/src/navigator_tools.cpp | 0 .../navigator_tools/tests/math_helpers_test.py | 0 .../utils}/sub8_ros_tools/CMakeLists.txt | 0 .../utils}/sub8_ros_tools/package.xml | 0 .../utils}/sub8_ros_tools/readme.md | 0 .../utils}/sub8_ros_tools/setup.py | 0 .../sub8_ros_tools/sub8_misc_tools/__init__.py | 0 .../sub8_ros_tools/sub8_misc_tools/download.py | 0 .../sub8_ros_tools/sub8_misc_tools/text_effects.py | 0 .../sub8_ros_tools/sub8_ros_tools/__init__.py | 0 .../sub8_ros_tools/sub8_ros_tools/func_helpers.py | 0 .../sub8_ros_tools/geometry_helpers.py | 0 .../sub8_ros_tools/sub8_ros_tools/image_helpers.py | 0 .../sub8_ros_tools/sub8_ros_tools/init_helpers.py | 0 .../sub8_ros_tools/sub8_ros_tools/msg_helpers.py | 0 .../sub8_ros_tools/threading_helpers.py | 0 .../utils}/sub8_ros_tools/sub8_tools/__init__.py | 0 .../sub8_ros_tools/test_ros_tools/test_ros_tools.py | 0 .../utils}/uf_common/CMakeLists.txt | 0 .../utils}/uf_common/action/MoveTo.action | 0 .../uf_common/include/uf_common/msg_helpers.h | 0 .../uf_common/include/uf_common/param_helpers.h | 0 .../utils}/uf_common/msg/Acceleration.msg | 0 .../utils}/uf_common/msg/Float64Stamped.msg | 0 .../utils}/uf_common/msg/PoseTwist.msg | 0 .../utils}/uf_common/msg/PoseTwistStamped.msg | 0 .../utils}/uf_common/msg/VelocityMeasurement.msg | 0 .../utils}/uf_common/msg/VelocityMeasurements.msg | 0 {utils => .great_merge/utils}/uf_common/package.xml | 0 .../utils}/uf_common/scripts/param_saver | 0 {utils => .great_merge/utils}/uf_common/setup.py | 0 .../utils}/uf_common/uf_common/__init__.py | 0 206 files changed, 0 insertions(+), 0 deletions(-) rename {drivers => .great_merge/drivers}/hydrophones/CMakeLists.txt (100%) rename {drivers => .great_merge/drivers}/hydrophones/README.md (100%) rename {drivers => .great_merge/drivers}/hydrophones/msg/Debug.msg (100%) rename {drivers => .great_merge/drivers}/hydrophones/msg/Ping.msg (100%) rename {drivers => .great_merge/drivers}/hydrophones/msg/ProcessedPing.msg (100%) rename {drivers => .great_merge/drivers}/hydrophones/package.xml (100%) rename {drivers => .great_merge/drivers}/hydrophones/scripts/hydrophones (100%) rename {drivers => .great_merge/drivers}/hydrophones/scripts/ping_logger (100%) rename {drivers => .great_merge/drivers}/hydrophones/scripts/ping_plotter (100%) rename {drivers => .great_merge/drivers}/hydrophones/scripts/ping_printer (100%) rename {drivers => .great_merge/drivers}/hydrophones/setup.py (100%) rename {drivers => .great_merge/drivers}/hydrophones/src/__init__.py (100%) rename {drivers => .great_merge/drivers}/hydrophones/src/hydrophones/__init__.py (100%) rename {drivers => .great_merge/drivers}/hydrophones/src/hydrophones/algorithms.py (100%) rename {drivers => .great_merge/drivers}/hydrophones/src/hydrophones/util.py (100%) rename {drivers => .great_merge/drivers}/paulboard_driver/CMakeLists.txt (100%) rename {drivers => .great_merge/drivers}/paulboard_driver/README.md (100%) rename {drivers => .great_merge/drivers}/paulboard_driver/blobs/SimpleHyd2013.bin (100%) rename {drivers => .great_merge/drivers}/paulboard_driver/package.xml (100%) rename {drivers => .great_merge/drivers}/paulboard_driver/scripts/paulboard_driver (100%) rename {drivers => .great_merge/drivers}/paulboard_driver/setup.py (100%) rename {drivers => .great_merge/drivers}/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin (100%) rename {drivers => .great_merge/drivers}/paulboard_driver/src/paulboard_driver/__init__.py (100%) rename {drivers => .great_merge/drivers}/paulboard_driver/src/paulboard_driver/ais_bootloader.py (100%) rename {drivers => .great_merge/drivers}/sub8_sonar/CMakeLists.txt (100%) rename {drivers => .great_merge/drivers}/sub8_sonar/README.md (100%) rename {drivers => .great_merge/drivers}/sub8_sonar/launch/test.launch (100%) rename {drivers => .great_merge/drivers}/sub8_sonar/multilateration/__init__.py (100%) rename {drivers => .great_merge/drivers}/sub8_sonar/multilateration/multilateration.py (100%) rename {drivers => .great_merge/drivers}/sub8_sonar/nodes/plotter.py (100%) rename {drivers => .great_merge/drivers}/sub8_sonar/nodes/sonar_test.py (100%) rename {drivers => .great_merge/drivers}/sub8_sonar/package.xml (100%) rename {drivers => .great_merge/drivers}/sub8_sonar/setup.py (100%) rename {drivers => .great_merge/drivers}/sub8_sonar/sub8_sonar/__init__.py (100%) rename {drivers => .great_merge/drivers}/sub8_sonar/sub8_sonar/sonar_driver.py (100%) rename {perception => .great_merge/perception}/navigator_vision/CMakeLists.txt (100%) rename {perception => .great_merge/perception}/navigator_vision/README.md (100%) rename {perception => .great_merge/perception}/navigator_vision/calibration.xml (100%) rename {perception => .great_merge/perception}/navigator_vision/config/underwater_shape_detection.yaml (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/GPL.txt (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/README.txt (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/example-data/calibration.xml (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/example-data/left.png (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/example-data/right.png (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/example/example.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census-inl.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/censuswindow.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/exception.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsematch.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.h (100%) rename {perception => .great_merge/perception}/navigator_vision/include/missions/underwater_shape_identification.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/active_contours.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/colorizer/color_observation.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/colorizer/common.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/cv_tools.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/image_filtering.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/include/navigator_vision_lib/visualization.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/A_C_test_vision.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/camera_stream_demo.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/camera_to_lidar/main.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/database_color.py (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/object_classifier.py (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/pinger_finder.py (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/pinger_locator.py (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/shape_identification/DockShapeVision.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/shape_identification/DockShapeVision.h (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/shape_identification/ShapeTracker.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/shape_identification/ShapeTracker.h (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/shape_identification/TrackedShape.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/shape_identification/TrackedShape.h (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/shape_identification/main.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/shooter_finder.py (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/start_gate_manual.py (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/stereo_point_cloud_driver.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/underwater_shape_identification_vision.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/nodes/velodyne_pcd_colorizer.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/object_classification/HOG_descriptor.py (100%) rename {perception => .great_merge/perception}/navigator_vision/object_classification/SVM_classifier.py (100%) rename {perception => .great_merge/perception}/navigator_vision/object_classification/__init__.py (100%) rename {perception => .great_merge/perception}/navigator_vision/object_classification/depickler.py (100%) rename {perception => .great_merge/perception}/navigator_vision/object_classification/depicklify.py (100%) rename {perception => .great_merge/perception}/navigator_vision/object_classification/lidar_to_image.py (100%) rename {perception => .great_merge/perception}/navigator_vision/object_classification/median_flow.py (100%) rename {perception => .great_merge/perception}/navigator_vision/object_classification/object_classification.py (100%) rename {perception => .great_merge/perception}/navigator_vision/object_classification/roi_generator.py (100%) rename {perception => .great_merge/perception}/navigator_vision/object_classification/stc_train.py (100%) rename {perception => .great_merge/perception}/navigator_vision/package.xml (100%) rename {perception => .great_merge/perception}/navigator_vision/ros_tools/on_the_fly_thresholder.py (100%) rename {perception => .great_merge/perception}/navigator_vision/setup.py (100%) rename {perception => .great_merge/perception}/navigator_vision/src/missions/underwater_shape_identification.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/src/navigator_vision_lib/active_contours.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/src/navigator_vision_lib/colorizer/color_observation.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/src/navigator_vision_lib/cv_utils.cc (100%) rename {perception => .great_merge/perception}/navigator_vision/src/navigator_vision_lib/image_filtering.cpp (100%) rename {perception => .great_merge/perception}/navigator_vision/src/navigator_vision_lib/point_cloud_algorithms.cc (100%) rename {perception => .great_merge/perception}/navigator_vision/src/navigator_vision_lib/visualization.cc (100%) rename {perception => .great_merge/perception}/navigator_vision/templates/underwater_shape_identification/circle.png (100%) rename {perception => .great_merge/perception}/navigator_vision/templates/underwater_shape_identification/cross.png (100%) rename {perception => .great_merge/perception}/navigator_vision/templates/underwater_shape_identification/triangle.png (100%) rename {perception => .great_merge/perception}/sub8_perception/ros_tools/easy_thresh.py (100%) rename {perception => .great_merge/perception}/sub8_perception/ros_tools/image_dumper.py (100%) rename {utils => .great_merge/utils}/navigator_tools/CMakeLists.txt (100%) rename {utils => .great_merge/utils}/navigator_tools/cfg/BoundsConfig.cfg (100%) rename {utils => .great_merge/utils}/navigator_tools/cfg/channel.yaml (100%) rename {utils => .great_merge/utils}/navigator_tools/cfg/course_a.yaml (100%) rename {utils => .great_merge/utils}/navigator_tools/cfg/course_b.yaml (100%) rename {utils => .great_merge/utils}/navigator_tools/cfg/course_c.yaml (100%) rename {utils => .great_merge/utils}/navigator_tools/cfg/sim.yaml (100%) rename {utils => .great_merge/utils}/navigator_tools/cfg/wauberg.yaml (100%) rename {utils => .great_merge/utils}/navigator_tools/include/navigator_tools.hpp (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/__init__.py (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/bag_crawler.py (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/cv_debug.py (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/general_helpers.py (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/geometry_helpers.py (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/image_helpers.py (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/init_helpers.py (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/missing_perception_object.py (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/move_helper.py (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/msg_helpers.py (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/object_database_helper.py (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/rviz_helpers.py (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/self_check.py (100%) rename {utils => .great_merge/utils}/navigator_tools/navigator_tools/threading_helpers.py (100%) rename {utils => .great_merge/utils}/navigator_tools/nodes/boat_info.py (100%) rename {utils => .great_merge/utils}/navigator_tools/nodes/bounds.py (100%) rename {utils => .great_merge/utils}/navigator_tools/nodes/clicked_point_recorder.py (100%) rename {utils => .great_merge/utils}/navigator_tools/nodes/coordinate_conversion_server.py (100%) rename {utils => .great_merge/utils}/navigator_tools/nodes/estimated_object_setter.py (100%) rename {utils => .great_merge/utils}/navigator_tools/nodes/fake_action_server.py (100%) rename {utils => .great_merge/utils}/navigator_tools/nodes/ogrid_draw.py (100%) rename {utils => .great_merge/utils}/navigator_tools/nodes/rviz_waypoint.py (100%) rename {utils => .great_merge/utils}/navigator_tools/nodes/tf_fudger.py (100%) rename {utils => .great_merge/utils}/navigator_tools/nodes/tf_to_gazebo.py (100%) rename {utils => .great_merge/utils}/navigator_tools/nodes/video_player (100%) rename {utils => .great_merge/utils}/navigator_tools/package.xml (100%) rename {utils => .great_merge/utils}/navigator_tools/setup.py (100%) rename {utils => .great_merge/utils}/navigator_tools/src/navigator_tools.cpp (100%) rename {utils => .great_merge/utils}/navigator_tools/tests/math_helpers_test.py (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/CMakeLists.txt (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/package.xml (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/readme.md (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/setup.py (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/sub8_misc_tools/__init__.py (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/sub8_misc_tools/download.py (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/sub8_misc_tools/text_effects.py (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/sub8_ros_tools/__init__.py (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/sub8_ros_tools/func_helpers.py (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/sub8_ros_tools/image_helpers.py (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/sub8_ros_tools/init_helpers.py (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/sub8_ros_tools/msg_helpers.py (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/sub8_ros_tools/threading_helpers.py (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/sub8_tools/__init__.py (100%) rename {utils => .great_merge/utils}/sub8_ros_tools/test_ros_tools/test_ros_tools.py (100%) rename {utils => .great_merge/utils}/uf_common/CMakeLists.txt (100%) rename {utils => .great_merge/utils}/uf_common/action/MoveTo.action (100%) rename {utils => .great_merge/utils}/uf_common/include/uf_common/msg_helpers.h (100%) rename {utils => .great_merge/utils}/uf_common/include/uf_common/param_helpers.h (100%) rename {utils => .great_merge/utils}/uf_common/msg/Acceleration.msg (100%) rename {utils => .great_merge/utils}/uf_common/msg/Float64Stamped.msg (100%) rename {utils => .great_merge/utils}/uf_common/msg/PoseTwist.msg (100%) rename {utils => .great_merge/utils}/uf_common/msg/PoseTwistStamped.msg (100%) rename {utils => .great_merge/utils}/uf_common/msg/VelocityMeasurement.msg (100%) rename {utils => .great_merge/utils}/uf_common/msg/VelocityMeasurements.msg (100%) rename {utils => .great_merge/utils}/uf_common/package.xml (100%) rename {utils => .great_merge/utils}/uf_common/scripts/param_saver (100%) rename {utils => .great_merge/utils}/uf_common/setup.py (100%) rename {utils => .great_merge/utils}/uf_common/uf_common/__init__.py (100%) diff --git a/drivers/hydrophones/CMakeLists.txt b/.great_merge/drivers/hydrophones/CMakeLists.txt similarity index 100% rename from drivers/hydrophones/CMakeLists.txt rename to .great_merge/drivers/hydrophones/CMakeLists.txt diff --git a/drivers/hydrophones/README.md b/.great_merge/drivers/hydrophones/README.md similarity index 100% rename from drivers/hydrophones/README.md rename to .great_merge/drivers/hydrophones/README.md diff --git a/drivers/hydrophones/msg/Debug.msg b/.great_merge/drivers/hydrophones/msg/Debug.msg similarity index 100% rename from drivers/hydrophones/msg/Debug.msg rename to .great_merge/drivers/hydrophones/msg/Debug.msg diff --git a/drivers/hydrophones/msg/Ping.msg b/.great_merge/drivers/hydrophones/msg/Ping.msg similarity index 100% rename from drivers/hydrophones/msg/Ping.msg rename to .great_merge/drivers/hydrophones/msg/Ping.msg diff --git a/drivers/hydrophones/msg/ProcessedPing.msg b/.great_merge/drivers/hydrophones/msg/ProcessedPing.msg similarity index 100% rename from drivers/hydrophones/msg/ProcessedPing.msg rename to .great_merge/drivers/hydrophones/msg/ProcessedPing.msg diff --git a/drivers/hydrophones/package.xml b/.great_merge/drivers/hydrophones/package.xml similarity index 100% rename from drivers/hydrophones/package.xml rename to .great_merge/drivers/hydrophones/package.xml diff --git a/drivers/hydrophones/scripts/hydrophones b/.great_merge/drivers/hydrophones/scripts/hydrophones similarity index 100% rename from drivers/hydrophones/scripts/hydrophones rename to .great_merge/drivers/hydrophones/scripts/hydrophones diff --git a/drivers/hydrophones/scripts/ping_logger b/.great_merge/drivers/hydrophones/scripts/ping_logger similarity index 100% rename from drivers/hydrophones/scripts/ping_logger rename to .great_merge/drivers/hydrophones/scripts/ping_logger diff --git a/drivers/hydrophones/scripts/ping_plotter b/.great_merge/drivers/hydrophones/scripts/ping_plotter similarity index 100% rename from drivers/hydrophones/scripts/ping_plotter rename to .great_merge/drivers/hydrophones/scripts/ping_plotter diff --git a/drivers/hydrophones/scripts/ping_printer b/.great_merge/drivers/hydrophones/scripts/ping_printer similarity index 100% rename from drivers/hydrophones/scripts/ping_printer rename to .great_merge/drivers/hydrophones/scripts/ping_printer diff --git a/drivers/hydrophones/setup.py b/.great_merge/drivers/hydrophones/setup.py similarity index 100% rename from drivers/hydrophones/setup.py rename to .great_merge/drivers/hydrophones/setup.py diff --git a/drivers/hydrophones/src/__init__.py b/.great_merge/drivers/hydrophones/src/__init__.py similarity index 100% rename from drivers/hydrophones/src/__init__.py rename to .great_merge/drivers/hydrophones/src/__init__.py diff --git a/drivers/hydrophones/src/hydrophones/__init__.py b/.great_merge/drivers/hydrophones/src/hydrophones/__init__.py similarity index 100% rename from drivers/hydrophones/src/hydrophones/__init__.py rename to .great_merge/drivers/hydrophones/src/hydrophones/__init__.py diff --git a/drivers/hydrophones/src/hydrophones/algorithms.py b/.great_merge/drivers/hydrophones/src/hydrophones/algorithms.py similarity index 100% rename from drivers/hydrophones/src/hydrophones/algorithms.py rename to .great_merge/drivers/hydrophones/src/hydrophones/algorithms.py diff --git a/drivers/hydrophones/src/hydrophones/util.py b/.great_merge/drivers/hydrophones/src/hydrophones/util.py similarity index 100% rename from drivers/hydrophones/src/hydrophones/util.py rename to .great_merge/drivers/hydrophones/src/hydrophones/util.py diff --git a/drivers/paulboard_driver/CMakeLists.txt b/.great_merge/drivers/paulboard_driver/CMakeLists.txt similarity index 100% rename from drivers/paulboard_driver/CMakeLists.txt rename to .great_merge/drivers/paulboard_driver/CMakeLists.txt diff --git a/drivers/paulboard_driver/README.md b/.great_merge/drivers/paulboard_driver/README.md similarity index 100% rename from drivers/paulboard_driver/README.md rename to .great_merge/drivers/paulboard_driver/README.md diff --git a/drivers/paulboard_driver/blobs/SimpleHyd2013.bin b/.great_merge/drivers/paulboard_driver/blobs/SimpleHyd2013.bin similarity index 100% rename from drivers/paulboard_driver/blobs/SimpleHyd2013.bin rename to .great_merge/drivers/paulboard_driver/blobs/SimpleHyd2013.bin diff --git a/drivers/paulboard_driver/package.xml b/.great_merge/drivers/paulboard_driver/package.xml similarity index 100% rename from drivers/paulboard_driver/package.xml rename to .great_merge/drivers/paulboard_driver/package.xml diff --git a/drivers/paulboard_driver/scripts/paulboard_driver b/.great_merge/drivers/paulboard_driver/scripts/paulboard_driver similarity index 100% rename from drivers/paulboard_driver/scripts/paulboard_driver rename to .great_merge/drivers/paulboard_driver/scripts/paulboard_driver diff --git a/drivers/paulboard_driver/setup.py b/.great_merge/drivers/paulboard_driver/setup.py similarity index 100% rename from drivers/paulboard_driver/setup.py rename to .great_merge/drivers/paulboard_driver/setup.py diff --git a/drivers/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin b/.great_merge/drivers/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin similarity index 100% rename from drivers/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin rename to .great_merge/drivers/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin diff --git a/drivers/paulboard_driver/src/paulboard_driver/__init__.py b/.great_merge/drivers/paulboard_driver/src/paulboard_driver/__init__.py similarity index 100% rename from drivers/paulboard_driver/src/paulboard_driver/__init__.py rename to .great_merge/drivers/paulboard_driver/src/paulboard_driver/__init__.py diff --git a/drivers/paulboard_driver/src/paulboard_driver/ais_bootloader.py b/.great_merge/drivers/paulboard_driver/src/paulboard_driver/ais_bootloader.py similarity index 100% rename from drivers/paulboard_driver/src/paulboard_driver/ais_bootloader.py rename to .great_merge/drivers/paulboard_driver/src/paulboard_driver/ais_bootloader.py diff --git a/drivers/sub8_sonar/CMakeLists.txt b/.great_merge/drivers/sub8_sonar/CMakeLists.txt similarity index 100% rename from drivers/sub8_sonar/CMakeLists.txt rename to .great_merge/drivers/sub8_sonar/CMakeLists.txt diff --git a/drivers/sub8_sonar/README.md b/.great_merge/drivers/sub8_sonar/README.md similarity index 100% rename from drivers/sub8_sonar/README.md rename to .great_merge/drivers/sub8_sonar/README.md diff --git a/drivers/sub8_sonar/launch/test.launch b/.great_merge/drivers/sub8_sonar/launch/test.launch similarity index 100% rename from drivers/sub8_sonar/launch/test.launch rename to .great_merge/drivers/sub8_sonar/launch/test.launch diff --git a/drivers/sub8_sonar/multilateration/__init__.py b/.great_merge/drivers/sub8_sonar/multilateration/__init__.py similarity index 100% rename from drivers/sub8_sonar/multilateration/__init__.py rename to .great_merge/drivers/sub8_sonar/multilateration/__init__.py diff --git a/drivers/sub8_sonar/multilateration/multilateration.py b/.great_merge/drivers/sub8_sonar/multilateration/multilateration.py similarity index 100% rename from drivers/sub8_sonar/multilateration/multilateration.py rename to .great_merge/drivers/sub8_sonar/multilateration/multilateration.py diff --git a/drivers/sub8_sonar/nodes/plotter.py b/.great_merge/drivers/sub8_sonar/nodes/plotter.py similarity index 100% rename from drivers/sub8_sonar/nodes/plotter.py rename to .great_merge/drivers/sub8_sonar/nodes/plotter.py diff --git a/drivers/sub8_sonar/nodes/sonar_test.py b/.great_merge/drivers/sub8_sonar/nodes/sonar_test.py similarity index 100% rename from drivers/sub8_sonar/nodes/sonar_test.py rename to .great_merge/drivers/sub8_sonar/nodes/sonar_test.py diff --git a/drivers/sub8_sonar/package.xml b/.great_merge/drivers/sub8_sonar/package.xml similarity index 100% rename from drivers/sub8_sonar/package.xml rename to .great_merge/drivers/sub8_sonar/package.xml diff --git a/drivers/sub8_sonar/setup.py b/.great_merge/drivers/sub8_sonar/setup.py similarity index 100% rename from drivers/sub8_sonar/setup.py rename to .great_merge/drivers/sub8_sonar/setup.py diff --git a/drivers/sub8_sonar/sub8_sonar/__init__.py b/.great_merge/drivers/sub8_sonar/sub8_sonar/__init__.py similarity index 100% rename from drivers/sub8_sonar/sub8_sonar/__init__.py rename to .great_merge/drivers/sub8_sonar/sub8_sonar/__init__.py diff --git a/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/.great_merge/drivers/sub8_sonar/sub8_sonar/sonar_driver.py similarity index 100% rename from drivers/sub8_sonar/sub8_sonar/sonar_driver.py rename to .great_merge/drivers/sub8_sonar/sub8_sonar/sonar_driver.py diff --git a/perception/navigator_vision/CMakeLists.txt b/.great_merge/perception/navigator_vision/CMakeLists.txt similarity index 100% rename from perception/navigator_vision/CMakeLists.txt rename to .great_merge/perception/navigator_vision/CMakeLists.txt diff --git a/perception/navigator_vision/README.md b/.great_merge/perception/navigator_vision/README.md similarity index 100% rename from perception/navigator_vision/README.md rename to .great_merge/perception/navigator_vision/README.md diff --git a/perception/navigator_vision/calibration.xml b/.great_merge/perception/navigator_vision/calibration.xml similarity index 100% rename from perception/navigator_vision/calibration.xml rename to .great_merge/perception/navigator_vision/calibration.xml diff --git a/perception/navigator_vision/config/underwater_shape_detection.yaml b/.great_merge/perception/navigator_vision/config/underwater_shape_detection.yaml similarity index 100% rename from perception/navigator_vision/config/underwater_shape_detection.yaml rename to .great_merge/perception/navigator_vision/config/underwater_shape_detection.yaml diff --git a/perception/navigator_vision/exFAST_SparseStereo/GPL.txt b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/GPL.txt similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/GPL.txt rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/GPL.txt diff --git a/perception/navigator_vision/exFAST_SparseStereo/README.txt b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/README.txt similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/README.txt rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/README.txt diff --git a/perception/navigator_vision/exFAST_SparseStereo/example-data/calibration.xml b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/example-data/calibration.xml similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/example-data/calibration.xml rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/example-data/calibration.xml diff --git a/perception/navigator_vision/exFAST_SparseStereo/example-data/left.png b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/example-data/left.png similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/example-data/left.png rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/example-data/left.png diff --git a/perception/navigator_vision/exFAST_SparseStereo/example-data/right.png b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/example-data/right.png similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/example-data/right.png rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/example-data/right.png diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/example/example.cpp b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/example/example.cpp similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/example/example.cpp rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/example/example.cpp diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census-inl.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census-inl.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census-inl.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census-inl.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.cpp b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.cpp similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.cpp rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.cpp diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/censuswindow.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/censuswindow.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/censuswindow.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/censuswindow.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/exception.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/exception.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/exception.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/exception.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.cpp b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.cpp similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.cpp rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.cpp diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsematch.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsematch.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsematch.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsematch.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp diff --git a/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.h b/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.h similarity index 100% rename from perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.h rename to .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.h diff --git a/perception/navigator_vision/include/missions/underwater_shape_identification.hpp b/.great_merge/perception/navigator_vision/include/missions/underwater_shape_identification.hpp similarity index 100% rename from perception/navigator_vision/include/missions/underwater_shape_identification.hpp rename to .great_merge/perception/navigator_vision/include/missions/underwater_shape_identification.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/colorizer/color_observation.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/color_observation.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/colorizer/color_observation.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/color_observation.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/colorizer/common.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/common.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/colorizer/common.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/common.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/cv_tools.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/cv_tools.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/cv_tools.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/cv_tools.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp diff --git a/perception/navigator_vision/include/navigator_vision_lib/visualization.hpp b/.great_merge/perception/navigator_vision/include/navigator_vision_lib/visualization.hpp similarity index 100% rename from perception/navigator_vision/include/navigator_vision_lib/visualization.hpp rename to .great_merge/perception/navigator_vision/include/navigator_vision_lib/visualization.hpp diff --git a/perception/navigator_vision/nodes/A_C_test_vision.cpp b/.great_merge/perception/navigator_vision/nodes/A_C_test_vision.cpp similarity index 100% rename from perception/navigator_vision/nodes/A_C_test_vision.cpp rename to .great_merge/perception/navigator_vision/nodes/A_C_test_vision.cpp diff --git a/perception/navigator_vision/nodes/camera_stream_demo.cpp b/.great_merge/perception/navigator_vision/nodes/camera_stream_demo.cpp similarity index 100% rename from perception/navigator_vision/nodes/camera_stream_demo.cpp rename to .great_merge/perception/navigator_vision/nodes/camera_stream_demo.cpp diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp b/.great_merge/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp similarity index 100% rename from perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp rename to .great_merge/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp diff --git a/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp b/.great_merge/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp similarity index 100% rename from perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp rename to .great_merge/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp diff --git a/perception/navigator_vision/nodes/camera_to_lidar/main.cpp b/.great_merge/perception/navigator_vision/nodes/camera_to_lidar/main.cpp similarity index 100% rename from perception/navigator_vision/nodes/camera_to_lidar/main.cpp rename to .great_merge/perception/navigator_vision/nodes/camera_to_lidar/main.cpp diff --git a/perception/navigator_vision/nodes/database_color.py b/.great_merge/perception/navigator_vision/nodes/database_color.py similarity index 100% rename from perception/navigator_vision/nodes/database_color.py rename to .great_merge/perception/navigator_vision/nodes/database_color.py diff --git a/perception/navigator_vision/nodes/object_classifier.py b/.great_merge/perception/navigator_vision/nodes/object_classifier.py similarity index 100% rename from perception/navigator_vision/nodes/object_classifier.py rename to .great_merge/perception/navigator_vision/nodes/object_classifier.py diff --git a/perception/navigator_vision/nodes/pinger_finder.py b/.great_merge/perception/navigator_vision/nodes/pinger_finder.py similarity index 100% rename from perception/navigator_vision/nodes/pinger_finder.py rename to .great_merge/perception/navigator_vision/nodes/pinger_finder.py diff --git a/perception/navigator_vision/nodes/pinger_locator.py b/.great_merge/perception/navigator_vision/nodes/pinger_locator.py similarity index 100% rename from perception/navigator_vision/nodes/pinger_locator.py rename to .great_merge/perception/navigator_vision/nodes/pinger_locator.py diff --git a/perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp b/.great_merge/perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp similarity index 100% rename from perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp rename to .great_merge/perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp diff --git a/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h b/.great_merge/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h similarity index 100% rename from perception/navigator_vision/nodes/shape_identification/DockShapeVision.h rename to .great_merge/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h diff --git a/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp b/.great_merge/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp similarity index 100% rename from perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp rename to .great_merge/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp diff --git a/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h b/.great_merge/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h similarity index 100% rename from perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h rename to .great_merge/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h diff --git a/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp b/.great_merge/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp similarity index 100% rename from perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp rename to .great_merge/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp diff --git a/perception/navigator_vision/nodes/shape_identification/ShapeTracker.h b/.great_merge/perception/navigator_vision/nodes/shape_identification/ShapeTracker.h similarity index 100% rename from perception/navigator_vision/nodes/shape_identification/ShapeTracker.h rename to .great_merge/perception/navigator_vision/nodes/shape_identification/ShapeTracker.h diff --git a/perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp b/.great_merge/perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp similarity index 100% rename from perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp rename to .great_merge/perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp diff --git a/perception/navigator_vision/nodes/shape_identification/TrackedShape.h b/.great_merge/perception/navigator_vision/nodes/shape_identification/TrackedShape.h similarity index 100% rename from perception/navigator_vision/nodes/shape_identification/TrackedShape.h rename to .great_merge/perception/navigator_vision/nodes/shape_identification/TrackedShape.h diff --git a/perception/navigator_vision/nodes/shape_identification/main.cpp b/.great_merge/perception/navigator_vision/nodes/shape_identification/main.cpp similarity index 100% rename from perception/navigator_vision/nodes/shape_identification/main.cpp rename to .great_merge/perception/navigator_vision/nodes/shape_identification/main.cpp diff --git a/perception/navigator_vision/nodes/shooter_finder.py b/.great_merge/perception/navigator_vision/nodes/shooter_finder.py similarity index 100% rename from perception/navigator_vision/nodes/shooter_finder.py rename to .great_merge/perception/navigator_vision/nodes/shooter_finder.py diff --git a/perception/navigator_vision/nodes/start_gate_manual.py b/.great_merge/perception/navigator_vision/nodes/start_gate_manual.py similarity index 100% rename from perception/navigator_vision/nodes/start_gate_manual.py rename to .great_merge/perception/navigator_vision/nodes/start_gate_manual.py diff --git a/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp b/.great_merge/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp similarity index 100% rename from perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp rename to .great_merge/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp diff --git a/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp b/.great_merge/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp similarity index 100% rename from perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp rename to .great_merge/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp diff --git a/perception/navigator_vision/nodes/velodyne_pcd_colorizer.cpp b/.great_merge/perception/navigator_vision/nodes/velodyne_pcd_colorizer.cpp similarity index 100% rename from perception/navigator_vision/nodes/velodyne_pcd_colorizer.cpp rename to .great_merge/perception/navigator_vision/nodes/velodyne_pcd_colorizer.cpp diff --git a/perception/navigator_vision/object_classification/HOG_descriptor.py b/.great_merge/perception/navigator_vision/object_classification/HOG_descriptor.py similarity index 100% rename from perception/navigator_vision/object_classification/HOG_descriptor.py rename to .great_merge/perception/navigator_vision/object_classification/HOG_descriptor.py diff --git a/perception/navigator_vision/object_classification/SVM_classifier.py b/.great_merge/perception/navigator_vision/object_classification/SVM_classifier.py similarity index 100% rename from perception/navigator_vision/object_classification/SVM_classifier.py rename to .great_merge/perception/navigator_vision/object_classification/SVM_classifier.py diff --git a/perception/navigator_vision/object_classification/__init__.py b/.great_merge/perception/navigator_vision/object_classification/__init__.py similarity index 100% rename from perception/navigator_vision/object_classification/__init__.py rename to .great_merge/perception/navigator_vision/object_classification/__init__.py diff --git a/perception/navigator_vision/object_classification/depickler.py b/.great_merge/perception/navigator_vision/object_classification/depickler.py similarity index 100% rename from perception/navigator_vision/object_classification/depickler.py rename to .great_merge/perception/navigator_vision/object_classification/depickler.py diff --git a/perception/navigator_vision/object_classification/depicklify.py b/.great_merge/perception/navigator_vision/object_classification/depicklify.py similarity index 100% rename from perception/navigator_vision/object_classification/depicklify.py rename to .great_merge/perception/navigator_vision/object_classification/depicklify.py diff --git a/perception/navigator_vision/object_classification/lidar_to_image.py b/.great_merge/perception/navigator_vision/object_classification/lidar_to_image.py similarity index 100% rename from perception/navigator_vision/object_classification/lidar_to_image.py rename to .great_merge/perception/navigator_vision/object_classification/lidar_to_image.py diff --git a/perception/navigator_vision/object_classification/median_flow.py b/.great_merge/perception/navigator_vision/object_classification/median_flow.py similarity index 100% rename from perception/navigator_vision/object_classification/median_flow.py rename to .great_merge/perception/navigator_vision/object_classification/median_flow.py diff --git a/perception/navigator_vision/object_classification/object_classification.py b/.great_merge/perception/navigator_vision/object_classification/object_classification.py similarity index 100% rename from perception/navigator_vision/object_classification/object_classification.py rename to .great_merge/perception/navigator_vision/object_classification/object_classification.py diff --git a/perception/navigator_vision/object_classification/roi_generator.py b/.great_merge/perception/navigator_vision/object_classification/roi_generator.py similarity index 100% rename from perception/navigator_vision/object_classification/roi_generator.py rename to .great_merge/perception/navigator_vision/object_classification/roi_generator.py diff --git a/perception/navigator_vision/object_classification/stc_train.py b/.great_merge/perception/navigator_vision/object_classification/stc_train.py similarity index 100% rename from perception/navigator_vision/object_classification/stc_train.py rename to .great_merge/perception/navigator_vision/object_classification/stc_train.py diff --git a/perception/navigator_vision/package.xml b/.great_merge/perception/navigator_vision/package.xml similarity index 100% rename from perception/navigator_vision/package.xml rename to .great_merge/perception/navigator_vision/package.xml diff --git a/perception/navigator_vision/ros_tools/on_the_fly_thresholder.py b/.great_merge/perception/navigator_vision/ros_tools/on_the_fly_thresholder.py similarity index 100% rename from perception/navigator_vision/ros_tools/on_the_fly_thresholder.py rename to .great_merge/perception/navigator_vision/ros_tools/on_the_fly_thresholder.py diff --git a/perception/navigator_vision/setup.py b/.great_merge/perception/navigator_vision/setup.py similarity index 100% rename from perception/navigator_vision/setup.py rename to .great_merge/perception/navigator_vision/setup.py diff --git a/perception/navigator_vision/src/missions/underwater_shape_identification.cpp b/.great_merge/perception/navigator_vision/src/missions/underwater_shape_identification.cpp similarity index 100% rename from perception/navigator_vision/src/missions/underwater_shape_identification.cpp rename to .great_merge/perception/navigator_vision/src/missions/underwater_shape_identification.cpp diff --git a/perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp b/.great_merge/perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp similarity index 100% rename from perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp rename to .great_merge/perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp diff --git a/perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp b/.great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp similarity index 100% rename from perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp rename to .great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp diff --git a/perception/navigator_vision/src/navigator_vision_lib/colorizer/color_observation.cpp b/.great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/color_observation.cpp similarity index 100% rename from perception/navigator_vision/src/navigator_vision_lib/colorizer/color_observation.cpp rename to .great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/color_observation.cpp diff --git a/perception/navigator_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp b/.great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp similarity index 100% rename from perception/navigator_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp rename to .great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp diff --git a/perception/navigator_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp b/.great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp similarity index 100% rename from perception/navigator_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp rename to .great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp diff --git a/perception/navigator_vision/src/navigator_vision_lib/cv_utils.cc b/.great_merge/perception/navigator_vision/src/navigator_vision_lib/cv_utils.cc similarity index 100% rename from perception/navigator_vision/src/navigator_vision_lib/cv_utils.cc rename to .great_merge/perception/navigator_vision/src/navigator_vision_lib/cv_utils.cc diff --git a/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp b/.great_merge/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp similarity index 100% rename from perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp rename to .great_merge/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp diff --git a/perception/navigator_vision/src/navigator_vision_lib/point_cloud_algorithms.cc b/.great_merge/perception/navigator_vision/src/navigator_vision_lib/point_cloud_algorithms.cc similarity index 100% rename from perception/navigator_vision/src/navigator_vision_lib/point_cloud_algorithms.cc rename to .great_merge/perception/navigator_vision/src/navigator_vision_lib/point_cloud_algorithms.cc diff --git a/perception/navigator_vision/src/navigator_vision_lib/visualization.cc b/.great_merge/perception/navigator_vision/src/navigator_vision_lib/visualization.cc similarity index 100% rename from perception/navigator_vision/src/navigator_vision_lib/visualization.cc rename to .great_merge/perception/navigator_vision/src/navigator_vision_lib/visualization.cc diff --git a/perception/navigator_vision/templates/underwater_shape_identification/circle.png b/.great_merge/perception/navigator_vision/templates/underwater_shape_identification/circle.png similarity index 100% rename from perception/navigator_vision/templates/underwater_shape_identification/circle.png rename to .great_merge/perception/navigator_vision/templates/underwater_shape_identification/circle.png diff --git a/perception/navigator_vision/templates/underwater_shape_identification/cross.png b/.great_merge/perception/navigator_vision/templates/underwater_shape_identification/cross.png similarity index 100% rename from perception/navigator_vision/templates/underwater_shape_identification/cross.png rename to .great_merge/perception/navigator_vision/templates/underwater_shape_identification/cross.png diff --git a/perception/navigator_vision/templates/underwater_shape_identification/triangle.png b/.great_merge/perception/navigator_vision/templates/underwater_shape_identification/triangle.png similarity index 100% rename from perception/navigator_vision/templates/underwater_shape_identification/triangle.png rename to .great_merge/perception/navigator_vision/templates/underwater_shape_identification/triangle.png diff --git a/perception/sub8_perception/ros_tools/easy_thresh.py b/.great_merge/perception/sub8_perception/ros_tools/easy_thresh.py similarity index 100% rename from perception/sub8_perception/ros_tools/easy_thresh.py rename to .great_merge/perception/sub8_perception/ros_tools/easy_thresh.py diff --git a/perception/sub8_perception/ros_tools/image_dumper.py b/.great_merge/perception/sub8_perception/ros_tools/image_dumper.py similarity index 100% rename from perception/sub8_perception/ros_tools/image_dumper.py rename to .great_merge/perception/sub8_perception/ros_tools/image_dumper.py diff --git a/utils/navigator_tools/CMakeLists.txt b/.great_merge/utils/navigator_tools/CMakeLists.txt similarity index 100% rename from utils/navigator_tools/CMakeLists.txt rename to .great_merge/utils/navigator_tools/CMakeLists.txt diff --git a/utils/navigator_tools/cfg/BoundsConfig.cfg b/.great_merge/utils/navigator_tools/cfg/BoundsConfig.cfg similarity index 100% rename from utils/navigator_tools/cfg/BoundsConfig.cfg rename to .great_merge/utils/navigator_tools/cfg/BoundsConfig.cfg diff --git a/utils/navigator_tools/cfg/channel.yaml b/.great_merge/utils/navigator_tools/cfg/channel.yaml similarity index 100% rename from utils/navigator_tools/cfg/channel.yaml rename to .great_merge/utils/navigator_tools/cfg/channel.yaml diff --git a/utils/navigator_tools/cfg/course_a.yaml b/.great_merge/utils/navigator_tools/cfg/course_a.yaml similarity index 100% rename from utils/navigator_tools/cfg/course_a.yaml rename to .great_merge/utils/navigator_tools/cfg/course_a.yaml diff --git a/utils/navigator_tools/cfg/course_b.yaml b/.great_merge/utils/navigator_tools/cfg/course_b.yaml similarity index 100% rename from utils/navigator_tools/cfg/course_b.yaml rename to .great_merge/utils/navigator_tools/cfg/course_b.yaml diff --git a/utils/navigator_tools/cfg/course_c.yaml b/.great_merge/utils/navigator_tools/cfg/course_c.yaml similarity index 100% rename from utils/navigator_tools/cfg/course_c.yaml rename to .great_merge/utils/navigator_tools/cfg/course_c.yaml diff --git a/utils/navigator_tools/cfg/sim.yaml b/.great_merge/utils/navigator_tools/cfg/sim.yaml similarity index 100% rename from utils/navigator_tools/cfg/sim.yaml rename to .great_merge/utils/navigator_tools/cfg/sim.yaml diff --git a/utils/navigator_tools/cfg/wauberg.yaml b/.great_merge/utils/navigator_tools/cfg/wauberg.yaml similarity index 100% rename from utils/navigator_tools/cfg/wauberg.yaml rename to .great_merge/utils/navigator_tools/cfg/wauberg.yaml diff --git a/utils/navigator_tools/include/navigator_tools.hpp b/.great_merge/utils/navigator_tools/include/navigator_tools.hpp similarity index 100% rename from utils/navigator_tools/include/navigator_tools.hpp rename to .great_merge/utils/navigator_tools/include/navigator_tools.hpp diff --git a/utils/navigator_tools/navigator_tools/__init__.py b/.great_merge/utils/navigator_tools/navigator_tools/__init__.py similarity index 100% rename from utils/navigator_tools/navigator_tools/__init__.py rename to .great_merge/utils/navigator_tools/navigator_tools/__init__.py diff --git a/utils/navigator_tools/navigator_tools/bag_crawler.py b/.great_merge/utils/navigator_tools/navigator_tools/bag_crawler.py similarity index 100% rename from utils/navigator_tools/navigator_tools/bag_crawler.py rename to .great_merge/utils/navigator_tools/navigator_tools/bag_crawler.py diff --git a/utils/navigator_tools/navigator_tools/cv_debug.py b/.great_merge/utils/navigator_tools/navigator_tools/cv_debug.py similarity index 100% rename from utils/navigator_tools/navigator_tools/cv_debug.py rename to .great_merge/utils/navigator_tools/navigator_tools/cv_debug.py diff --git a/utils/navigator_tools/navigator_tools/general_helpers.py b/.great_merge/utils/navigator_tools/navigator_tools/general_helpers.py similarity index 100% rename from utils/navigator_tools/navigator_tools/general_helpers.py rename to .great_merge/utils/navigator_tools/navigator_tools/general_helpers.py diff --git a/utils/navigator_tools/navigator_tools/geometry_helpers.py b/.great_merge/utils/navigator_tools/navigator_tools/geometry_helpers.py similarity index 100% rename from utils/navigator_tools/navigator_tools/geometry_helpers.py rename to .great_merge/utils/navigator_tools/navigator_tools/geometry_helpers.py diff --git a/utils/navigator_tools/navigator_tools/image_helpers.py b/.great_merge/utils/navigator_tools/navigator_tools/image_helpers.py similarity index 100% rename from utils/navigator_tools/navigator_tools/image_helpers.py rename to .great_merge/utils/navigator_tools/navigator_tools/image_helpers.py diff --git a/utils/navigator_tools/navigator_tools/init_helpers.py b/.great_merge/utils/navigator_tools/navigator_tools/init_helpers.py similarity index 100% rename from utils/navigator_tools/navigator_tools/init_helpers.py rename to .great_merge/utils/navigator_tools/navigator_tools/init_helpers.py diff --git a/utils/navigator_tools/navigator_tools/missing_perception_object.py b/.great_merge/utils/navigator_tools/navigator_tools/missing_perception_object.py similarity index 100% rename from utils/navigator_tools/navigator_tools/missing_perception_object.py rename to .great_merge/utils/navigator_tools/navigator_tools/missing_perception_object.py diff --git a/utils/navigator_tools/navigator_tools/move_helper.py b/.great_merge/utils/navigator_tools/navigator_tools/move_helper.py similarity index 100% rename from utils/navigator_tools/navigator_tools/move_helper.py rename to .great_merge/utils/navigator_tools/navigator_tools/move_helper.py diff --git a/utils/navigator_tools/navigator_tools/msg_helpers.py b/.great_merge/utils/navigator_tools/navigator_tools/msg_helpers.py similarity index 100% rename from utils/navigator_tools/navigator_tools/msg_helpers.py rename to .great_merge/utils/navigator_tools/navigator_tools/msg_helpers.py diff --git a/utils/navigator_tools/navigator_tools/object_database_helper.py b/.great_merge/utils/navigator_tools/navigator_tools/object_database_helper.py similarity index 100% rename from utils/navigator_tools/navigator_tools/object_database_helper.py rename to .great_merge/utils/navigator_tools/navigator_tools/object_database_helper.py diff --git a/utils/navigator_tools/navigator_tools/rviz_helpers.py b/.great_merge/utils/navigator_tools/navigator_tools/rviz_helpers.py similarity index 100% rename from utils/navigator_tools/navigator_tools/rviz_helpers.py rename to .great_merge/utils/navigator_tools/navigator_tools/rviz_helpers.py diff --git a/utils/navigator_tools/navigator_tools/self_check.py b/.great_merge/utils/navigator_tools/navigator_tools/self_check.py similarity index 100% rename from utils/navigator_tools/navigator_tools/self_check.py rename to .great_merge/utils/navigator_tools/navigator_tools/self_check.py diff --git a/utils/navigator_tools/navigator_tools/threading_helpers.py b/.great_merge/utils/navigator_tools/navigator_tools/threading_helpers.py similarity index 100% rename from utils/navigator_tools/navigator_tools/threading_helpers.py rename to .great_merge/utils/navigator_tools/navigator_tools/threading_helpers.py diff --git a/utils/navigator_tools/nodes/boat_info.py b/.great_merge/utils/navigator_tools/nodes/boat_info.py similarity index 100% rename from utils/navigator_tools/nodes/boat_info.py rename to .great_merge/utils/navigator_tools/nodes/boat_info.py diff --git a/utils/navigator_tools/nodes/bounds.py b/.great_merge/utils/navigator_tools/nodes/bounds.py similarity index 100% rename from utils/navigator_tools/nodes/bounds.py rename to .great_merge/utils/navigator_tools/nodes/bounds.py diff --git a/utils/navigator_tools/nodes/clicked_point_recorder.py b/.great_merge/utils/navigator_tools/nodes/clicked_point_recorder.py similarity index 100% rename from utils/navigator_tools/nodes/clicked_point_recorder.py rename to .great_merge/utils/navigator_tools/nodes/clicked_point_recorder.py diff --git a/utils/navigator_tools/nodes/coordinate_conversion_server.py b/.great_merge/utils/navigator_tools/nodes/coordinate_conversion_server.py similarity index 100% rename from utils/navigator_tools/nodes/coordinate_conversion_server.py rename to .great_merge/utils/navigator_tools/nodes/coordinate_conversion_server.py diff --git a/utils/navigator_tools/nodes/estimated_object_setter.py b/.great_merge/utils/navigator_tools/nodes/estimated_object_setter.py similarity index 100% rename from utils/navigator_tools/nodes/estimated_object_setter.py rename to .great_merge/utils/navigator_tools/nodes/estimated_object_setter.py diff --git a/utils/navigator_tools/nodes/fake_action_server.py b/.great_merge/utils/navigator_tools/nodes/fake_action_server.py similarity index 100% rename from utils/navigator_tools/nodes/fake_action_server.py rename to .great_merge/utils/navigator_tools/nodes/fake_action_server.py diff --git a/utils/navigator_tools/nodes/ogrid_draw.py b/.great_merge/utils/navigator_tools/nodes/ogrid_draw.py similarity index 100% rename from utils/navigator_tools/nodes/ogrid_draw.py rename to .great_merge/utils/navigator_tools/nodes/ogrid_draw.py diff --git a/utils/navigator_tools/nodes/rviz_waypoint.py b/.great_merge/utils/navigator_tools/nodes/rviz_waypoint.py similarity index 100% rename from utils/navigator_tools/nodes/rviz_waypoint.py rename to .great_merge/utils/navigator_tools/nodes/rviz_waypoint.py diff --git a/utils/navigator_tools/nodes/tf_fudger.py b/.great_merge/utils/navigator_tools/nodes/tf_fudger.py similarity index 100% rename from utils/navigator_tools/nodes/tf_fudger.py rename to .great_merge/utils/navigator_tools/nodes/tf_fudger.py diff --git a/utils/navigator_tools/nodes/tf_to_gazebo.py b/.great_merge/utils/navigator_tools/nodes/tf_to_gazebo.py similarity index 100% rename from utils/navigator_tools/nodes/tf_to_gazebo.py rename to .great_merge/utils/navigator_tools/nodes/tf_to_gazebo.py diff --git a/utils/navigator_tools/nodes/video_player b/.great_merge/utils/navigator_tools/nodes/video_player similarity index 100% rename from utils/navigator_tools/nodes/video_player rename to .great_merge/utils/navigator_tools/nodes/video_player diff --git a/utils/navigator_tools/package.xml b/.great_merge/utils/navigator_tools/package.xml similarity index 100% rename from utils/navigator_tools/package.xml rename to .great_merge/utils/navigator_tools/package.xml diff --git a/utils/navigator_tools/setup.py b/.great_merge/utils/navigator_tools/setup.py similarity index 100% rename from utils/navigator_tools/setup.py rename to .great_merge/utils/navigator_tools/setup.py diff --git a/utils/navigator_tools/src/navigator_tools.cpp b/.great_merge/utils/navigator_tools/src/navigator_tools.cpp similarity index 100% rename from utils/navigator_tools/src/navigator_tools.cpp rename to .great_merge/utils/navigator_tools/src/navigator_tools.cpp diff --git a/utils/navigator_tools/tests/math_helpers_test.py b/.great_merge/utils/navigator_tools/tests/math_helpers_test.py similarity index 100% rename from utils/navigator_tools/tests/math_helpers_test.py rename to .great_merge/utils/navigator_tools/tests/math_helpers_test.py diff --git a/utils/sub8_ros_tools/CMakeLists.txt b/.great_merge/utils/sub8_ros_tools/CMakeLists.txt similarity index 100% rename from utils/sub8_ros_tools/CMakeLists.txt rename to .great_merge/utils/sub8_ros_tools/CMakeLists.txt diff --git a/utils/sub8_ros_tools/package.xml b/.great_merge/utils/sub8_ros_tools/package.xml similarity index 100% rename from utils/sub8_ros_tools/package.xml rename to .great_merge/utils/sub8_ros_tools/package.xml diff --git a/utils/sub8_ros_tools/readme.md b/.great_merge/utils/sub8_ros_tools/readme.md similarity index 100% rename from utils/sub8_ros_tools/readme.md rename to .great_merge/utils/sub8_ros_tools/readme.md diff --git a/utils/sub8_ros_tools/setup.py b/.great_merge/utils/sub8_ros_tools/setup.py similarity index 100% rename from utils/sub8_ros_tools/setup.py rename to .great_merge/utils/sub8_ros_tools/setup.py diff --git a/utils/sub8_ros_tools/sub8_misc_tools/__init__.py b/.great_merge/utils/sub8_ros_tools/sub8_misc_tools/__init__.py similarity index 100% rename from utils/sub8_ros_tools/sub8_misc_tools/__init__.py rename to .great_merge/utils/sub8_ros_tools/sub8_misc_tools/__init__.py diff --git a/utils/sub8_ros_tools/sub8_misc_tools/download.py b/.great_merge/utils/sub8_ros_tools/sub8_misc_tools/download.py similarity index 100% rename from utils/sub8_ros_tools/sub8_misc_tools/download.py rename to .great_merge/utils/sub8_ros_tools/sub8_misc_tools/download.py diff --git a/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py b/.great_merge/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py similarity index 100% rename from utils/sub8_ros_tools/sub8_misc_tools/text_effects.py rename to .great_merge/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py diff --git a/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/__init__.py similarity index 100% rename from utils/sub8_ros_tools/sub8_ros_tools/__init__.py rename to .great_merge/utils/sub8_ros_tools/sub8_ros_tools/__init__.py diff --git a/utils/sub8_ros_tools/sub8_ros_tools/func_helpers.py b/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/func_helpers.py similarity index 100% rename from utils/sub8_ros_tools/sub8_ros_tools/func_helpers.py rename to .great_merge/utils/sub8_ros_tools/sub8_ros_tools/func_helpers.py diff --git a/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py b/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py similarity index 100% rename from utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py rename to .great_merge/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py diff --git a/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py similarity index 100% rename from utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py rename to .great_merge/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py diff --git a/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py b/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py similarity index 100% rename from utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py rename to .great_merge/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py diff --git a/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py b/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py similarity index 100% rename from utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py rename to .great_merge/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py diff --git a/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py b/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py similarity index 100% rename from utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py rename to .great_merge/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py diff --git a/utils/sub8_ros_tools/sub8_tools/__init__.py b/.great_merge/utils/sub8_ros_tools/sub8_tools/__init__.py similarity index 100% rename from utils/sub8_ros_tools/sub8_tools/__init__.py rename to .great_merge/utils/sub8_ros_tools/sub8_tools/__init__.py diff --git a/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py b/.great_merge/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py similarity index 100% rename from utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py rename to .great_merge/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py diff --git a/utils/uf_common/CMakeLists.txt b/.great_merge/utils/uf_common/CMakeLists.txt similarity index 100% rename from utils/uf_common/CMakeLists.txt rename to .great_merge/utils/uf_common/CMakeLists.txt diff --git a/utils/uf_common/action/MoveTo.action b/.great_merge/utils/uf_common/action/MoveTo.action similarity index 100% rename from utils/uf_common/action/MoveTo.action rename to .great_merge/utils/uf_common/action/MoveTo.action diff --git a/utils/uf_common/include/uf_common/msg_helpers.h b/.great_merge/utils/uf_common/include/uf_common/msg_helpers.h similarity index 100% rename from utils/uf_common/include/uf_common/msg_helpers.h rename to .great_merge/utils/uf_common/include/uf_common/msg_helpers.h diff --git a/utils/uf_common/include/uf_common/param_helpers.h b/.great_merge/utils/uf_common/include/uf_common/param_helpers.h similarity index 100% rename from utils/uf_common/include/uf_common/param_helpers.h rename to .great_merge/utils/uf_common/include/uf_common/param_helpers.h diff --git a/utils/uf_common/msg/Acceleration.msg b/.great_merge/utils/uf_common/msg/Acceleration.msg similarity index 100% rename from utils/uf_common/msg/Acceleration.msg rename to .great_merge/utils/uf_common/msg/Acceleration.msg diff --git a/utils/uf_common/msg/Float64Stamped.msg b/.great_merge/utils/uf_common/msg/Float64Stamped.msg similarity index 100% rename from utils/uf_common/msg/Float64Stamped.msg rename to .great_merge/utils/uf_common/msg/Float64Stamped.msg diff --git a/utils/uf_common/msg/PoseTwist.msg b/.great_merge/utils/uf_common/msg/PoseTwist.msg similarity index 100% rename from utils/uf_common/msg/PoseTwist.msg rename to .great_merge/utils/uf_common/msg/PoseTwist.msg diff --git a/utils/uf_common/msg/PoseTwistStamped.msg b/.great_merge/utils/uf_common/msg/PoseTwistStamped.msg similarity index 100% rename from utils/uf_common/msg/PoseTwistStamped.msg rename to .great_merge/utils/uf_common/msg/PoseTwistStamped.msg diff --git a/utils/uf_common/msg/VelocityMeasurement.msg b/.great_merge/utils/uf_common/msg/VelocityMeasurement.msg similarity index 100% rename from utils/uf_common/msg/VelocityMeasurement.msg rename to .great_merge/utils/uf_common/msg/VelocityMeasurement.msg diff --git a/utils/uf_common/msg/VelocityMeasurements.msg b/.great_merge/utils/uf_common/msg/VelocityMeasurements.msg similarity index 100% rename from utils/uf_common/msg/VelocityMeasurements.msg rename to .great_merge/utils/uf_common/msg/VelocityMeasurements.msg diff --git a/utils/uf_common/package.xml b/.great_merge/utils/uf_common/package.xml similarity index 100% rename from utils/uf_common/package.xml rename to .great_merge/utils/uf_common/package.xml diff --git a/utils/uf_common/scripts/param_saver b/.great_merge/utils/uf_common/scripts/param_saver similarity index 100% rename from utils/uf_common/scripts/param_saver rename to .great_merge/utils/uf_common/scripts/param_saver diff --git a/utils/uf_common/setup.py b/.great_merge/utils/uf_common/setup.py similarity index 100% rename from utils/uf_common/setup.py rename to .great_merge/utils/uf_common/setup.py diff --git a/utils/uf_common/uf_common/__init__.py b/.great_merge/utils/uf_common/uf_common/__init__.py similarity index 100% rename from utils/uf_common/uf_common/__init__.py rename to .great_merge/utils/uf_common/uf_common/__init__.py From 9c052c82b26a3ac0a811998c1d4e6dd575135e3f Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Fri, 17 Mar 2017 00:42:24 -0400 Subject: [PATCH 220/267] REPO: move sub8 tools to mil_dev_tools need to refactor later --- .../utils/sub8_ros_tools => mil_dev_tools}/CMakeLists.txt | 0 .../sub8_ros_tools => mil_dev_tools/mil_dev_tools}/__init__.py | 0 .../sub8_misc_tools => mil_dev_tools/mil_dev_tools}/download.py | 0 .../mil_dev_tools}/func_helpers.py | 0 .../mil_dev_tools}/geometry_helpers.py | 0 .../mil_dev_tools}/image_helpers.py | 0 .../mil_dev_tools}/init_helpers.py | 0 .../sub8_ros_tools => mil_dev_tools/mil_dev_tools}/msg_helpers.py | 0 .../mil_dev_tools}/text_effects.py | 0 .../mil_dev_tools}/threading_helpers.py | 0 {.great_merge/utils/sub8_ros_tools => mil_dev_tools}/package.xml | 0 {.great_merge/utils/sub8_ros_tools => mil_dev_tools}/readme.md | 0 {.great_merge/utils/sub8_ros_tools => mil_dev_tools}/setup.py | 0 .../test_ros_tools => mil_dev_tools/test}/test_ros_tools.py | 0 14 files changed, 0 insertions(+), 0 deletions(-) rename {.great_merge/utils/sub8_ros_tools => mil_dev_tools}/CMakeLists.txt (100%) rename {.great_merge/utils/sub8_ros_tools/sub8_ros_tools => mil_dev_tools/mil_dev_tools}/__init__.py (100%) rename {.great_merge/utils/sub8_ros_tools/sub8_misc_tools => mil_dev_tools/mil_dev_tools}/download.py (100%) rename {.great_merge/utils/sub8_ros_tools/sub8_ros_tools => mil_dev_tools/mil_dev_tools}/func_helpers.py (100%) rename {.great_merge/utils/sub8_ros_tools/sub8_ros_tools => mil_dev_tools/mil_dev_tools}/geometry_helpers.py (100%) rename {.great_merge/utils/sub8_ros_tools/sub8_ros_tools => mil_dev_tools/mil_dev_tools}/image_helpers.py (100%) rename {.great_merge/utils/sub8_ros_tools/sub8_ros_tools => mil_dev_tools/mil_dev_tools}/init_helpers.py (100%) rename {.great_merge/utils/sub8_ros_tools/sub8_ros_tools => mil_dev_tools/mil_dev_tools}/msg_helpers.py (100%) rename {.great_merge/utils/sub8_ros_tools/sub8_misc_tools => mil_dev_tools/mil_dev_tools}/text_effects.py (100%) rename {.great_merge/utils/sub8_ros_tools/sub8_ros_tools => mil_dev_tools/mil_dev_tools}/threading_helpers.py (100%) rename {.great_merge/utils/sub8_ros_tools => mil_dev_tools}/package.xml (100%) rename {.great_merge/utils/sub8_ros_tools => mil_dev_tools}/readme.md (100%) rename {.great_merge/utils/sub8_ros_tools => mil_dev_tools}/setup.py (100%) rename {.great_merge/utils/sub8_ros_tools/test_ros_tools => mil_dev_tools/test}/test_ros_tools.py (100%) diff --git a/.great_merge/utils/sub8_ros_tools/CMakeLists.txt b/mil_dev_tools/CMakeLists.txt similarity index 100% rename from .great_merge/utils/sub8_ros_tools/CMakeLists.txt rename to mil_dev_tools/CMakeLists.txt diff --git a/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/__init__.py b/mil_dev_tools/mil_dev_tools/__init__.py similarity index 100% rename from .great_merge/utils/sub8_ros_tools/sub8_ros_tools/__init__.py rename to mil_dev_tools/mil_dev_tools/__init__.py diff --git a/.great_merge/utils/sub8_ros_tools/sub8_misc_tools/download.py b/mil_dev_tools/mil_dev_tools/download.py similarity index 100% rename from .great_merge/utils/sub8_ros_tools/sub8_misc_tools/download.py rename to mil_dev_tools/mil_dev_tools/download.py diff --git a/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/func_helpers.py b/mil_dev_tools/mil_dev_tools/func_helpers.py similarity index 100% rename from .great_merge/utils/sub8_ros_tools/sub8_ros_tools/func_helpers.py rename to mil_dev_tools/mil_dev_tools/func_helpers.py diff --git a/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py b/mil_dev_tools/mil_dev_tools/geometry_helpers.py similarity index 100% rename from .great_merge/utils/sub8_ros_tools/sub8_ros_tools/geometry_helpers.py rename to mil_dev_tools/mil_dev_tools/geometry_helpers.py diff --git a/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py b/mil_dev_tools/mil_dev_tools/image_helpers.py similarity index 100% rename from .great_merge/utils/sub8_ros_tools/sub8_ros_tools/image_helpers.py rename to mil_dev_tools/mil_dev_tools/image_helpers.py diff --git a/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py b/mil_dev_tools/mil_dev_tools/init_helpers.py similarity index 100% rename from .great_merge/utils/sub8_ros_tools/sub8_ros_tools/init_helpers.py rename to mil_dev_tools/mil_dev_tools/init_helpers.py diff --git a/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py b/mil_dev_tools/mil_dev_tools/msg_helpers.py similarity index 100% rename from .great_merge/utils/sub8_ros_tools/sub8_ros_tools/msg_helpers.py rename to mil_dev_tools/mil_dev_tools/msg_helpers.py diff --git a/.great_merge/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py b/mil_dev_tools/mil_dev_tools/text_effects.py similarity index 100% rename from .great_merge/utils/sub8_ros_tools/sub8_misc_tools/text_effects.py rename to mil_dev_tools/mil_dev_tools/text_effects.py diff --git a/.great_merge/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py b/mil_dev_tools/mil_dev_tools/threading_helpers.py similarity index 100% rename from .great_merge/utils/sub8_ros_tools/sub8_ros_tools/threading_helpers.py rename to mil_dev_tools/mil_dev_tools/threading_helpers.py diff --git a/.great_merge/utils/sub8_ros_tools/package.xml b/mil_dev_tools/package.xml similarity index 100% rename from .great_merge/utils/sub8_ros_tools/package.xml rename to mil_dev_tools/package.xml diff --git a/.great_merge/utils/sub8_ros_tools/readme.md b/mil_dev_tools/readme.md similarity index 100% rename from .great_merge/utils/sub8_ros_tools/readme.md rename to mil_dev_tools/readme.md diff --git a/.great_merge/utils/sub8_ros_tools/setup.py b/mil_dev_tools/setup.py similarity index 100% rename from .great_merge/utils/sub8_ros_tools/setup.py rename to mil_dev_tools/setup.py diff --git a/.great_merge/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py b/mil_dev_tools/test/test_ros_tools.py similarity index 100% rename from .great_merge/utils/sub8_ros_tools/test_ros_tools/test_ros_tools.py rename to mil_dev_tools/test/test_ros_tools.py From b07a0a110996b4c5d7c928991de1908937094b86 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Fri, 17 Mar 2017 00:44:21 -0400 Subject: [PATCH 221/267] REPO: clear remaining sub8 tools --- .../utils/sub8_ros_tools/sub8_misc_tools/__init__.py | 5 ----- .great_merge/utils/sub8_ros_tools/sub8_tools/__init__.py | 2 -- mil_dev_tools/mil_dev_tools/__init__.py | 3 +++ 3 files changed, 3 insertions(+), 7 deletions(-) delete mode 100644 .great_merge/utils/sub8_ros_tools/sub8_misc_tools/__init__.py delete mode 100644 .great_merge/utils/sub8_ros_tools/sub8_tools/__init__.py diff --git a/.great_merge/utils/sub8_ros_tools/sub8_misc_tools/__init__.py b/.great_merge/utils/sub8_ros_tools/sub8_misc_tools/__init__.py deleted file mode 100644 index 86a0aa42..00000000 --- a/.great_merge/utils/sub8_ros_tools/sub8_misc_tools/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# flake8: noqa -from download import download_and_unzip -from download import download - -import text_effects diff --git a/.great_merge/utils/sub8_ros_tools/sub8_tools/__init__.py b/.great_merge/utils/sub8_ros_tools/sub8_tools/__init__.py deleted file mode 100644 index c36a9ccb..00000000 --- a/.great_merge/utils/sub8_ros_tools/sub8_tools/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from sub8_ros_tools import * -from sub8_misc_tools import * diff --git a/mil_dev_tools/mil_dev_tools/__init__.py b/mil_dev_tools/mil_dev_tools/__init__.py index f0d53137..f54e520a 100644 --- a/mil_dev_tools/mil_dev_tools/__init__.py +++ b/mil_dev_tools/mil_dev_tools/__init__.py @@ -12,3 +12,6 @@ from threading_helpers import * from geometry_helpers import * from func_helpers import * +from download import download_and_unzip, download +import text_effects + From 318eac42e0c56baba34595eed2105cb93ba72200 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Fri, 17 Mar 2017 00:53:57 -0400 Subject: [PATCH 222/267] REPO: move navigator_tools files to mil_dev_tools, purge remaining --- .../navigator_tools/cfg/BoundsConfig.cfg | 14 - .../utils/navigator_tools/cfg/channel.yaml | 25 -- .../utils/navigator_tools/cfg/course_a.yaml | 24 -- .../utils/navigator_tools/cfg/course_b.yaml | 24 -- .../utils/navigator_tools/cfg/course_c.yaml | 24 -- .../utils/navigator_tools/cfg/sim.yaml | 24 -- .../utils/navigator_tools/cfg/wauberg.yaml | 25 -- .../include/navigator_tools.hpp | 32 --- .../missing_perception_object.py | 10 - .../navigator_tools/move_helper.py | 49 ---- .../navigator_tools/object_database_helper.py | 265 ------------------ .../navigator_tools/self_check.py | 77 ----- .../utils/navigator_tools/nodes/boat_info.py | 70 ----- .../utils/navigator_tools/nodes/bounds.py | 90 ------ .../nodes/estimated_object_setter.py | 51 ---- .../nodes/fake_action_server.py | 101 ------- .../navigator_tools/src/navigator_tools.cpp | 56 ---- .../CMakeLists.txt | 0 .../mil_dev_tools}/__init__.py | 0 .../mil_dev_tools}/bag_crawler.py | 0 .../mil_dev_tools}/cv_debug.py | 0 .../mil_dev_tools}/general_helpers.py | 0 .../mil_dev_tools}/geometry_helpers.py | 0 .../mil_dev_tools}/image_helpers.py | 0 .../mil_dev_tools}/init_helpers.py | 0 .../mil_dev_tools}/msg_helpers.py | 0 .../mil_dev_tools}/rviz_helpers.py | 0 .../mil_dev_tools}/threading_helpers.py | 0 .../nodes/clicked_point_recorder.py | 0 .../nodes/coordinate_conversion_server.py | 0 .../nodes/ogrid_draw.py | 0 .../nodes/rviz_waypoint.py | 0 .../nodes/tf_fudger.py | 0 .../nodes/tf_to_gazebo.py | 0 .../nodes/video_player | 0 .../package.xml | 0 .../setup.py | 0 .../test}/math_helpers_test.py | 0 38 files changed, 961 deletions(-) delete mode 100755 .great_merge/utils/navigator_tools/cfg/BoundsConfig.cfg delete mode 100644 .great_merge/utils/navigator_tools/cfg/channel.yaml delete mode 100644 .great_merge/utils/navigator_tools/cfg/course_a.yaml delete mode 100644 .great_merge/utils/navigator_tools/cfg/course_b.yaml delete mode 100644 .great_merge/utils/navigator_tools/cfg/course_c.yaml delete mode 100644 .great_merge/utils/navigator_tools/cfg/sim.yaml delete mode 100644 .great_merge/utils/navigator_tools/cfg/wauberg.yaml delete mode 100644 .great_merge/utils/navigator_tools/include/navigator_tools.hpp delete mode 100644 .great_merge/utils/navigator_tools/navigator_tools/missing_perception_object.py delete mode 100755 .great_merge/utils/navigator_tools/navigator_tools/move_helper.py delete mode 100644 .great_merge/utils/navigator_tools/navigator_tools/object_database_helper.py delete mode 100755 .great_merge/utils/navigator_tools/navigator_tools/self_check.py delete mode 100755 .great_merge/utils/navigator_tools/nodes/boat_info.py delete mode 100755 .great_merge/utils/navigator_tools/nodes/bounds.py delete mode 100755 .great_merge/utils/navigator_tools/nodes/estimated_object_setter.py delete mode 100755 .great_merge/utils/navigator_tools/nodes/fake_action_server.py delete mode 100644 .great_merge/utils/navigator_tools/src/navigator_tools.cpp rename {.great_merge/utils/navigator_tools => mil_dev_tools}/CMakeLists.txt (100%) rename {.great_merge/utils/navigator_tools/navigator_tools => mil_dev_tools/mil_dev_tools}/__init__.py (100%) rename {.great_merge/utils/navigator_tools/navigator_tools => mil_dev_tools/mil_dev_tools}/bag_crawler.py (100%) rename {.great_merge/utils/navigator_tools/navigator_tools => mil_dev_tools/mil_dev_tools}/cv_debug.py (100%) rename {.great_merge/utils/navigator_tools/navigator_tools => mil_dev_tools/mil_dev_tools}/general_helpers.py (100%) rename {.great_merge/utils/navigator_tools/navigator_tools => mil_dev_tools/mil_dev_tools}/geometry_helpers.py (100%) rename {.great_merge/utils/navigator_tools/navigator_tools => mil_dev_tools/mil_dev_tools}/image_helpers.py (100%) rename {.great_merge/utils/navigator_tools/navigator_tools => mil_dev_tools/mil_dev_tools}/init_helpers.py (100%) rename {.great_merge/utils/navigator_tools/navigator_tools => mil_dev_tools/mil_dev_tools}/msg_helpers.py (100%) rename {.great_merge/utils/navigator_tools/navigator_tools => mil_dev_tools/mil_dev_tools}/rviz_helpers.py (100%) rename {.great_merge/utils/navigator_tools/navigator_tools => mil_dev_tools/mil_dev_tools}/threading_helpers.py (100%) rename {.great_merge/utils/navigator_tools => mil_dev_tools}/nodes/clicked_point_recorder.py (100%) rename {.great_merge/utils/navigator_tools => mil_dev_tools}/nodes/coordinate_conversion_server.py (100%) rename {.great_merge/utils/navigator_tools => mil_dev_tools}/nodes/ogrid_draw.py (100%) rename {.great_merge/utils/navigator_tools => mil_dev_tools}/nodes/rviz_waypoint.py (100%) rename {.great_merge/utils/navigator_tools => mil_dev_tools}/nodes/tf_fudger.py (100%) rename {.great_merge/utils/navigator_tools => mil_dev_tools}/nodes/tf_to_gazebo.py (100%) rename {.great_merge/utils/navigator_tools => mil_dev_tools}/nodes/video_player (100%) rename {.great_merge/utils/navigator_tools => mil_dev_tools}/package.xml (100%) rename {.great_merge/utils/navigator_tools => mil_dev_tools}/setup.py (100%) rename {.great_merge/utils/navigator_tools/tests => mil_dev_tools/test}/math_helpers_test.py (100%) diff --git a/.great_merge/utils/navigator_tools/cfg/BoundsConfig.cfg b/.great_merge/utils/navigator_tools/cfg/BoundsConfig.cfg deleted file mode 100755 index 2cdc6e05..00000000 --- a/.great_merge/utils/navigator_tools/cfg/BoundsConfig.cfg +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python -PACKAGE = "navigator_tools" - -from dynamic_reconfigure.parameter_generator_catkin import * - -gen = ParameterGenerator() - -gen.add("enforce", bool_t, 0, "Whether or not to remove the ogrid around the boat.", True) -gen.add("lla_1", str_t, 0, "'(Lat, Long)' point of corner 1.", "(29.535011, -82.303323)") -gen.add("lla_2", str_t, 0, "'(Lat, Long)' point of corner 2.", "(29.534647, -82.304280)") -gen.add("lla_3", str_t, 0, "'(Lat, Long)' point of corner 3.", "(29.533440, -82.303577)") -gen.add("lla_4", str_t, 0, "'(Lat, Long)' point of corner 4.", "(29.533803, -82.302639)") - -exit(gen.generate(PACKAGE, "navigator_tools", "Bounds")) diff --git a/.great_merge/utils/navigator_tools/cfg/channel.yaml b/.great_merge/utils/navigator_tools/cfg/channel.yaml deleted file mode 100644 index aad9510c..00000000 --- a/.great_merge/utils/navigator_tools/cfg/channel.yaml +++ /dev/null @@ -1,25 +0,0 @@ -!!python/object/new:dynamic_reconfigure.encoding.Config -dictitems: - enforce: true - groups: !!python/object/new:dynamic_reconfigure.encoding.Config - dictitems: - enforce: true - groups: !!python/object/new:dynamic_reconfigure.encoding.Config - state: [] - id: 0 - lla_1: (21.312393, -157.889992) - lla_2: (21.310304, -157.891569) - lla_3: (21.303058, -157.888007) - lla_4: (21.304017, -157.885561) - name: Default - parameters: !!python/object/new:dynamic_reconfigure.encoding.Config - state: [] - parent: 0 - state: true - type: '' - state: [] - lla_1: (21.312393, -157.889992) - lla_2: (21.310304, -157.891569) - lla_3: (21.303058, -157.888007) - lla_4: (21.304017, -157.885561) -state: [] diff --git a/.great_merge/utils/navigator_tools/cfg/course_a.yaml b/.great_merge/utils/navigator_tools/cfg/course_a.yaml deleted file mode 100644 index efcc6894..00000000 --- a/.great_merge/utils/navigator_tools/cfg/course_a.yaml +++ /dev/null @@ -1,24 +0,0 @@ -!!python/object/new:dynamic_reconfigure.encoding.Config -dictitems: - groups: !!python/object/new:dynamic_reconfigure.encoding.Config - dictitems: - enforce: true - groups: !!python/object/new:dynamic_reconfigure.encoding.Config - state: [] - id: 0 - lla_1: (21.31139511990995, -157.88932101141714) - lla_2: (21.310799034961416, -157.8908255966243) - lla_3: (21.30948497606932, -157.89037273981896) - lla_4: (21.310017869308005, -157.88873371841683) - name: Default - parameters: !!python/object/new:dynamic_reconfigure.encoding.Config - state: [] - parent: 0 - state: true - type: '' - state: [] - lla_1: (21.31139511990995, -157.88932101141714) - lla_2: (21.310799034961416, -157.8908255966243) - lla_3: (21.30948497606932, -157.89037273981896) - lla_4: (21.310017869308005, -157.88873371841683) -state: [] diff --git a/.great_merge/utils/navigator_tools/cfg/course_b.yaml b/.great_merge/utils/navigator_tools/cfg/course_b.yaml deleted file mode 100644 index 473044f1..00000000 --- a/.great_merge/utils/navigator_tools/cfg/course_b.yaml +++ /dev/null @@ -1,24 +0,0 @@ -!!python/object/new:dynamic_reconfigure.encoding.Config -dictitems: - groups: !!python/object/new:dynamic_reconfigure.encoding.Config - dictitems: - enforce: true - groups: !!python/object/new:dynamic_reconfigure.encoding.Config - state: [] - id: 0 - lla_1: (21.3093999977932, -157.88861615540685) - lla_2: (21.308890236491465, -157.88972622804025) - lla_3: (21.307914785432155, -157.88882585130506) - lla_4: (21.30865744423127, -157.88771508071076) - name: Default - parameters: !!python/object/new:dynamic_reconfigure.encoding.Config - state: [] - parent: 0 - state: true - type: '' - state: [] - lla_1: (21.3093999977932, -157.88861615540685) - lla_2: (21.308891236491465, -157.88972622804025) - lla_3: (21.307914785432155, -157.88882585130506) - lla_4: (21.30865744423127, -157.88771508071076) -state: [] diff --git a/.great_merge/utils/navigator_tools/cfg/course_c.yaml b/.great_merge/utils/navigator_tools/cfg/course_c.yaml deleted file mode 100644 index e322536d..00000000 --- a/.great_merge/utils/navigator_tools/cfg/course_c.yaml +++ /dev/null @@ -1,24 +0,0 @@ -!!python/object/new:dynamic_reconfigure.encoding.Config -dictitems: - groups: !!python/object/new:dynamic_reconfigure.encoding.Config - dictitems: - enforce: true - groups: !!python/object/new:dynamic_reconfigure.encoding.Config - state: [] - id: 0 - lla_1: (21.305748423277702, -157.88798305044537) - lla_2: (21.30639710664512, -157.88695004440788) - lla_3: (21.30770272224603, -157.88763716280926) - lla_4: (21.30705404, -157.88867017) - name: Default - parameters: !!python/object/new:dynamic_reconfigure.encoding.Config - state: [] - parent: 0 - state: true - type: '' - state: [] - lla_1: (21.305748423277702, -157.88798305044537) - lla_2: (21.30639710664512, -157.88695004440788) - lla_3: (21.30770272224603, -157.88763716280926) - lla_4: (21.30705404, -157.88867017) -state: [] diff --git a/.great_merge/utils/navigator_tools/cfg/sim.yaml b/.great_merge/utils/navigator_tools/cfg/sim.yaml deleted file mode 100644 index 6b167309..00000000 --- a/.great_merge/utils/navigator_tools/cfg/sim.yaml +++ /dev/null @@ -1,24 +0,0 @@ -!!python/object/new:dynamic_reconfigure.encoding.Config -dictitems: - groups: !!python/object/new:dynamic_reconfigure.encoding.Config - dictitems: - enforce: true - groups: !!python/object/new:dynamic_reconfigure.encoding.Config - state: [] - id: 0 - lla_1: (29.5358153431059, -82.30261036235201) - lla_2: (29.5358153387807, -82.30467366185054) - lla_3: (29.5340110111349, -82.30467363859142) - lla_4: (29.5340110154601, -82.30261037571948) - name: Default - parameters: !!python/object/new:dynamic_reconfigure.encoding.Config - state: [] - parent: 0 - state: true - type: '' - state: [] - lla_1: (29.5358153431059, -82.30261036235201) - lla_2: (29.5358153387807, -82.30467366185054) - lla_3: (29.5340110111349, -82.30467363859142) - lla_4: (29.5340110154601, -82.30261037571948) -state: [] diff --git a/.great_merge/utils/navigator_tools/cfg/wauberg.yaml b/.great_merge/utils/navigator_tools/cfg/wauberg.yaml deleted file mode 100644 index 99b81fd3..00000000 --- a/.great_merge/utils/navigator_tools/cfg/wauberg.yaml +++ /dev/null @@ -1,25 +0,0 @@ -!!python/object/new:dynamic_reconfigure.encoding.Config -dictitems: - enforce: true - groups: !!python/object/new:dynamic_reconfigure.encoding.Config - dictitems: - enforce: true - groups: !!python/object/new:dynamic_reconfigure.encoding.Config - state: [] - id: 0 - lla_1: (29.535011, -82.303323) - lla_2: (29.534647, -82.304280) - lla_3: (29.533440, -82.303577) - lla_4: (29.533803, -82.302639) - name: Default - parameters: !!python/object/new:dynamic_reconfigure.encoding.Config - state: [] - parent: 0 - state: true - type: '' - state: [] - lla_1: (29.535011, -82.303323) - lla_2: (29.534647, -82.304280) - lla_3: (29.533440, -82.303577) - lla_4: (29.533803, -82.302639) -state: [] diff --git a/.great_merge/utils/navigator_tools/include/navigator_tools.hpp b/.great_merge/utils/navigator_tools/include/navigator_tools.hpp deleted file mode 100644 index 504f22a2..00000000 --- a/.great_merge/utils/navigator_tools/include/navigator_tools.hpp +++ /dev/null @@ -1,32 +0,0 @@ -#pragma once - -#include -#include -#include -#include -#include -#include - -namespace nav{ - -static const double PI = 3.1415926535897932; - -namespace tools -{ - -// Returns a vector of topic names (std::string) that end with "image_rect_color" -// if color is false then it looks for those that end in "image_rect" -std::vector getRectifiedImageTopics(bool color = true); - -// converts raw string literals to std:string's -inline std::string operator "" _s(const char* str, size_t /*length*/) -{ - return std::string(str); -} - - -// Sorts contours by curve length -void sortContours(std::vector>& contour_vec, bool ascending=true); - -} // namespace nav::tools -} // namespace nav diff --git a/.great_merge/utils/navigator_tools/navigator_tools/missing_perception_object.py b/.great_merge/utils/navigator_tools/navigator_tools/missing_perception_object.py deleted file mode 100644 index 63d4054c..00000000 --- a/.great_merge/utils/navigator_tools/navigator_tools/missing_perception_object.py +++ /dev/null @@ -1,10 +0,0 @@ -class MissingPerceptionObject(Exception): - - def __init__(self, missing_object, message="An object from the database is missing"): - - # Call the base class constructor with the parameters it needs - super(MissingPerceptionObject, self).__init__(message) - - # Now for your custom code... - self.missing_object = missing_object - self.message = message \ No newline at end of file diff --git a/.great_merge/utils/navigator_tools/navigator_tools/move_helper.py b/.great_merge/utils/navigator_tools/navigator_tools/move_helper.py deleted file mode 100755 index e116ee56..00000000 --- a/.great_merge/utils/navigator_tools/navigator_tools/move_helper.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python - -import rospy -import roslib -import numpy,math,tf,threading -from tf import transformations as trns -from nav_msgs.msg import Odometry -from geometry_msgs.msg import PoseStamped, Point -from navigator_tools import geometry_helpers as gh - -rospy.init_node('move_helper') - -class move_helper(object): - - def __init__(self): - - self.odom = Odometry() - self.pose_pub = rospy.Publisher("/waypoint", PoseStamped, queue_size = 0) - rospy.Subscriber("/odom", Odometry, self.odom_cb) - rospy.Subscriber("/move_helper", Point, self.move_cb) - rospy.spin() - - def odom_cb(self, msg): - self.odom = msg - - def move_cb(self, msg): - - to_send = PoseStamped() - to_send.header.frame_id = "/enu" - - R = trns.quaternion_matrix([self.odom.pose.pose.orientation.x, self.odom.pose.pose.orientation.y, self.odom.pose.pose.orientation.z, self.odom.pose.pose.orientation.w])[:2, :2] - theta = gh.quat_to_euler(self.odom.pose.pose.orientation) - - current = numpy.array([self.odom.pose.pose.position.x, self.odom.pose.pose.position.y, numpy.rad2deg(theta[2])]) - shift = numpy.concatenate((R.dot([msg.x, msg.y]), [msg.z])) - desired = current + shift - desired_quaternion = trns.quaternion_from_euler(0, 0, numpy.deg2rad(desired[2])) - - to_send.pose.position.x = desired[0] - to_send.pose.position.y = desired[1] - to_send.pose.orientation.x, to_send.pose.orientation.y, to_send.pose.orientation.z, to_send.pose.orientation.w = desired_quaternion - - self.pose_pub.publish(to_send) - - - -if __name__ == "__main__": - - helper = move_helper() diff --git a/.great_merge/utils/navigator_tools/navigator_tools/object_database_helper.py b/.great_merge/utils/navigator_tools/navigator_tools/object_database_helper.py deleted file mode 100644 index 4822a856..00000000 --- a/.great_merge/utils/navigator_tools/navigator_tools/object_database_helper.py +++ /dev/null @@ -1,265 +0,0 @@ -"""Use the DBHelper class to interface with the Database without having to deal with ROS things.""" -from navigator_msgs.msg import PerceptionObjectArray -from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest -from nav_msgs.msg import Odometry -from twisted.internet import defer, threads -from txros import util -import time -import sys -from sets import Set -from missing_perception_object import MissingPerceptionObject -import navigator_tools as nt -import numpy as np -__author__ = "Tess Bianchi" - - -class DBHelper(object): - """DBHelper class.""" - - def __init__(self, nh): - """Initialize the DB helper class.""" - self.found = Set() - self.nh = nh - self.position = None - self.rot = None - self.new_object_subscriber = None - self.ensuring_objects = False - self.ensuring_object_dep = None - self.ensuring_object_cb = None - self.looking_for = None - self.is_found = False - - @util.cancellableInlineCallbacks - def init_(self, navigator=None): - """Initialize the txros parts of the DBHelper.""" - # self._sub_database = yield self.nh.subscribe('/database/objects', PerceptionObjectArray, self.object_cb) - self._database = yield self.nh.get_service_client("/database/requests", ObjectDBQuery) - self.navigator = navigator - if navigator is None: - self._odom_sub = yield self.nh.subscribe('/odom', Odometry, self._odom_cb) - else: - self.position = yield navigator.tx_pose - self.position = self.position[0] - defer.returnValue(self) - - def _odom_cb(self, odom): - self.position, self.rot = nt.odometry_to_numpy(odom)[0] - - @util.cancellableInlineCallbacks - def get_object_by_id(self, my_id): - print my_id - req = ObjectDBQueryRequest() - req.name = 'all' - resp = yield self._database(req) - ans = [obj for obj in resp.objects if obj.id == my_id][0] - defer.returnValue(ans) - - @util.cancellableInlineCallbacks - def begin_observing(self, cb): - """ - Get notified when a new object is added to the database. - - cb : The callback that is called when a new object is added to the database - """ - self.new_object_subscriber = cb - req = ObjectDBQueryRequest() - req.name = 'all' - resp = yield self._database(req) - req.name = 'All' - resp1 = yield self._database(req) - for o in resp.objects: - # The is upper call is because the first case is upper case if it is a 'fake' object... WHYYYYY - if o.name not in self.found: - self.found.add(o.name) - self.new_object_subscriber(o) - - for o in resp1.objects: - # The is upper call is because the first case is upper case if it is a 'fake' object... WHYYYYY - if o.name not in self.found: - self.found.add(o.name) - self.new_object_subscriber(o) - - @util.cancellableInlineCallbacks - def get_unknown_and_low_conf(self): - req = ObjectDBQueryRequest() - req.name = 'all' - resp = yield self._database(req) - m = [] - for o in resp.objects: - if o.name == 'unknown': - m.append(o) - elif o.confidence < 50: - pass - # m.append(o) - defer.returnValue(m) - - def set_looking_for(self, name): - self.looking_for = name - - def is_found_func(self): - if self.is_found: - self.looking_for = None - self.is_found = False - return True - return False - - def object_cb(self, perception_objects): - """Callback for the object database.""" - self.total_num = len(perception_objects.objects) - for o in perception_objects.objects: - if o.name == self.looking_for: - self.is_found = True - if o.name not in self.found: - self.found.add(o.name) - if self.new_object_subscriber is not None: - self.new_object_subscriber(o) - if self.ensuring_objects: - missings_objs = [] - names = (o.name for o in perception_objects.objects) - for o in self.ensuring_object_dep: - if o not in names: - missings_objs.append(o) - if len(missings_objs) > 0: - self.ensuring_object_cb(missings_objs) - - def remove_found(self, name): - """Remove an object that has been listed as found.""" - self.found.remove(name) - - def ensure_object_permanence(self, object_dep, cb): - """Ensure that all the objects in the object_dep list remain in the database. Call the callback if this isn't true.""" - if object_dep is None or cb is None: - return - self.ensuring_objects = True - self.ensuring_object_cb = cb - self.ensuring_object_dep = object_dep - - def stop_ensuring_object_permanence(self): - """Stop ensuring that objects remain in the database.""" - self.ensuring_objects = False - - def _wait_for_position(self, timeout=10): - count = 0 - while self.position is None: - if self.navigator is not None: - self.position = self.navigator.pose[0] - if count > timeout: - return False - count += 1 - time.sleep(1) - return True - - @util.cancellableInlineCallbacks - def get_closest_object(self, objects): - """Get the closest mission.""" - pobjs = [] - for obj in objects: - req = ObjectDBQueryRequest() - req.name = obj.name - resp = yield self._database(req) - if len(resp.objects) != 0: - pobjs.extend(resp.objects) - - if len(pobjs) == 0: - raise MissingPerceptionObject("All") - - min_dist = sys.maxint - min_obj = None - for o in pobjs: - dist = yield self._dist(o) - if dist < min_dist: - min_dist = dist - min_obj = o - - defer.returnValue(min_obj) - - @util.cancellableInlineCallbacks - def _dist(self, x): - if self.position is None: - success = yield threads.deferToThread(self._wait_for_position) - if not success: - raise Exception("There is a problem with odom.") - if self.navigator is not None: - position = yield self.navigator.tx_pose - position = position[0] - self.position = position - defer.returnValue(np.linalg.norm(nt.rosmsg_to_numpy(x.position) - self.position)) - - @util.cancellableInlineCallbacks - def get_object(self, object_name, volume_only=False, thresh=50, thresh_strict=50): - """Get an object from the database.""" - if volume_only: - req = ObjectDBQueryRequest() - req.name = object_name - resp = yield self._database(req) - if not resp.found: - raise MissingPerceptionObject(object_name) - defer.returnValue(resp.objects) - else: - req = ObjectDBQueryRequest() - req.name = "all" - resp = yield self._database(req) - closest_potential_object = None - min_dist = sys.maxint - actual_objects = [] - for o in resp.objects: - distance = yield self._dist(o) - if o.name == object_name and distance < thresh_strict: - actual_objects.append(o) - if distance < thresh and distance < min_dist: - min_dist = distance - closest_potential_object = o - # print [x.name for x in actual_objects] - # print closest_potential_object.name - # sys.exit() - - if len(actual_objects) == 0 and min_dist == sys.maxint: - raise MissingPerceptionObject(object_name) - - if len(actual_objects) > 1: - min_dist = sys.maxint - min_obj = None - for o in actual_objects: - dist = yield self._dist(o) - if dist < min_dist: - min_dist = dist - min_obj = o - defer.returnValue(min_obj) - - if len(actual_objects) == 1: - defer.returnValue(actual_objects[0]) - - defer.returnValue(closest_potential_object) - - def wait_for_additional_objects(self, timeout=60): - num_items = self.num_items - start = time() - while(timeout < time() - start): - if self.num_items > num_items: - return True - return False - - def set_color(self, color, name): - """Set the color of an object in the database.""" - raise NotImplementedError() - - def set_fake_position(self, pos): - """Set the position of a fake perception object.""" - raise NotImplementedError() - - @util.cancellableInlineCallbacks - def get_objects_in_radius(self, pos, radius, objects="all"): - req = ObjectDBQueryRequest() - req.name = 'all' - resp = yield self._database(req) - ans = [] - - if not resp.found: - defer.returnValue(ans) - - for o in resp.objects: - if objects == "all" or o.name in objects: - dist = np.linalg.norm(pos - nt.rosmsg_to_numpy(o.position)) - if dist < radius: - ans.append(o) - defer.returnValue(ans) diff --git a/.great_merge/utils/navigator_tools/navigator_tools/self_check.py b/.great_merge/utils/navigator_tools/navigator_tools/self_check.py deleted file mode 100755 index 118c35ea..00000000 --- a/.great_merge/utils/navigator_tools/navigator_tools/self_check.py +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/env python -from twisted.internet import defer, reactor -import traceback -import signal -import txros - - -class FancyPrint(object): - GOOD = '\033[32m' - BAD = '\033[31m' - NORMAL = '\033[0m' - BOLD = '\033[1m' - - @classmethod - def okay(self, text): - print self.GOOD + text + self.NORMAL - - @classmethod - def error(self, text): - print self.BAD + text + self.NORMAL - -from sensor_msgs.msg import Image -from sensor_msgs.msg import CompressedImage -def add_camera_feeds(nh, cam_name, image_type="image_raw"): - ''' Returns subscribers to the raw and compressed `image_type` ''' - - raw = '/{}/{}'.format(cam_name, image_type) - compressed = '/{}/{}/compressed'.format(cam_name, image_type) - return nh.subscribe(raw, Image).get_next_message(), \ - nh.subscribe(compressed, CompressedImage).get_next_message() - -@txros.util.cancellableInlineCallbacks -def main(): - nh = yield txros.NodeHandle.from_argv("self_checker") - # Add deferreds to this dict to be yieleded on and checked later - topics = {} - - # General Subs - from nav_msgs.msg import Odometry - topics['odom'] = nh.subscribe('odom', Odometry).get_next_message() - - from sensor_msgs.msg import Joy - topics['joy'] = nh.subscribe('joy', Joy).get_next_message() - - # Perception Subs - topics['right_images'], topics['right_compressed'] = add_camera_feeds(nh, "right") - topics['stereo_right_images'], topics['stereo_right_compressed'] = add_camera_feeds(nh, "stereo/right") - topics['stereo_left_images'], topics['stereo_left_compressed'] = add_camera_feeds(nh, "stereo/left") - - from sensor_msgs.msg import PointCloud2 - topics['velodyne'] = nh.subscribe('/velodyne_points', PointCloud2).get_next_message() - - # Thrusters - from roboteq_msgs.msg import Feedback - topics['BL_motor'] = nh.subscribe('/BL_motor/Feedback', Feedback).get_next_message() - topics['BR_motor'] = nh.subscribe('/BR_motor/Feedback', Feedback).get_next_message() - topics['FL_motor'] = nh.subscribe('/FL_motor/Feedback', Feedback).get_next_message() - topics['FR_motor'] = nh.subscribe('/FR_motor/Feedback', Feedback).get_next_message() - - for name, sub in topics.iteritems(): - try: - # Bold the name so it's distinct - fancy_name = FancyPrint.BOLD + name + FancyPrint.NORMAL - print " - - - - Testing for {}".format(fancy_name) - - result = yield txros.util.wrap_timeout(sub, 2) # 2 second timeout should be good - if result is None: - FancyPrint.error("[ FAIL ] Response was None from {}".format(fancy_name)) - else: - FancyPrint.okay("[ PASS ] Response found from {}".format(fancy_name)) - - except: - FancyPrint.error("[ FAIL ] No response from {}".format(fancy_name)) - - -if __name__ == '__main__': - txros.util.launch_main(main) \ No newline at end of file diff --git a/.great_merge/utils/navigator_tools/nodes/boat_info.py b/.great_merge/utils/navigator_tools/nodes/boat_info.py deleted file mode 100755 index bdfcdcb5..00000000 --- a/.great_merge/utils/navigator_tools/nodes/boat_info.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python -from geometry_msgs.msg import WrenchStamped -from navigator_msgs.srv import WrenchSelect -import rospy -from std_msgs.msg import Bool, String, Float32 -from visualization_msgs.msg import Marker, MarkerArray -from navigator_alarm import AlarmListener -import numpy as np - -class RvizStrings(object): - ID = 1337 - NS = "boat_info" - X_POS = -4.0 - - def __init__(self): - self.station_holding = False - self.kill_alarm = False - self.voltage = None - self.wrench = None - rospy.Subscriber("/wrench/current", String, self.wrench_current_cb) - rospy.Subscriber("/battery_monitor",Float32, self.battery_monitor_cb) - self.markers_pub = rospy.Publisher('/boat_info', Marker, queue_size=10) - self.kill_listener = AlarmListener('kill', self.kill_alarm_cb) - self.station_hold_listner = AlarmListener('station_hold', self.station_alarm_cb) - - def update(self): - marker = Marker() - marker.scale.x = 1 - marker.scale.y = 1 - marker.scale.z = 1 - marker.action = Marker.ADD; - marker.header.frame_id = "base_link" - marker.header.stamp = rospy.Time() - marker.type = Marker.TEXT_VIEW_FACING - marker.ns = self.NS - marker.pose.position.x = self.X_POS - marker.id = self.ID - marker.color.a = 1 - marker.color.b = 1 - if not self.voltage == None: - marker.text += "Voltage {}".format(self.voltage) - if not self.wrench == None: - marker.text += "\nWrench {}".format(self.wrench) - if self.station_holding: - marker.text += "\nStation Holding" - if self.kill_alarm: - marker.text += "\nKILLED" - self.markers_pub.publish(marker) - - def station_alarm_cb(self, alarm): - self.station_holding = not alarm.clear - self.update() - - def kill_alarm_cb(self, alarm): - self.kill_alarm = not alarm.clear - self.update() - - def wrench_current_cb(self,string): - self.wrench = string.data - self.update() - - def battery_monitor_cb(self,voltage): - self.voltage = np.round(voltage.data,2) - self.update() - - -if __name__ == "__main__": - rospy.init_node('info_markers') - arb = RvizStrings() - rospy.spin() diff --git a/.great_merge/utils/navigator_tools/nodes/bounds.py b/.great_merge/utils/navigator_tools/nodes/bounds.py deleted file mode 100755 index 671fd246..00000000 --- a/.great_merge/utils/navigator_tools/nodes/bounds.py +++ /dev/null @@ -1,90 +0,0 @@ -#!/usr/bin/env python -import numpy as np - -import rospy -import navigator_tools -from twisted.internet import defer - -from navigator_tools.cfg import BoundsConfig -from dynamic_reconfigure.server import Server - -from navigator_msgs.srv import Bounds, BoundsResponse, \ - CoordinateConversion, CoordinateConversionRequest - - -class BoundsServer(object): - def __init__(self): - self.convert = rospy.ServiceProxy('/convert', CoordinateConversion) - - self.enforce = False - self.bounds = [] - - rospy.set_param("/bounds/enu/", [[0,0,0],[0,0,0],[0,0,0],[0,0,0]]) - rospy.set_param("/bounds/enforce", self.enforce) - - #self.convert = Converter() - Server(BoundsConfig, self.update_config) - rospy.Service('/get_bounds', Bounds, self.got_request) - - def destringify(self, lat_long_string): - if any(c.isalpha() for c in lat_long_string): - return False - - floats = map(float, lat_long_string.replace('(', '').replace(')', '').split(',')) - - if len(floats) != 3: - return False - - return floats - - def update_config(self, config, level): - self.enforce = config['enforce'] - lla_bounds_str = [config['lla_1'] + ", 0", config['lla_2'] + ", 0", - config['lla_3'] + ", 0", config['lla_4'] + ", 0"] - lla_bounds = map(self.destringify, lla_bounds_str) - self.lla_bounds = lla_bounds - - # If any returned false, don't update - if all(lla_bounds): - try: - self.bounds = [self.convert(CoordinateConversionRequest(frame="lla", point=lla)) for lla in lla_bounds] - except: - print "No service provider found" - return config - - config['enu_1_lat'] = self.bounds[0].enu[0] - config['enu_1_long'] = self.bounds[0].enu[1] - - config['enu_2_lat'] = self.bounds[1].enu[0] - config['enu_2_long'] = self.bounds[1].enu[1] - - config['enu_3_lat'] = self.bounds[2].enu[0] - config['enu_3_long'] = self.bounds[2].enu[1] - - config['enu_4_lat'] = self.bounds[3].enu[0] - config['enu_4_long'] = self.bounds[3].enu[1] - - rospy.set_param("/bounds/enu/", map(lambda bound: bound.enu[:2], self.bounds)) - else: - rospy.set_param("/bounds/enu/", [[0,0,00],[0,0,0],[0,0,0],[0,0,0]]) - - rospy.set_param("/bounds/enforce", self.enforce) - return config - - def got_request(self, req): - to_frame = "enu" if req.to_frame == '' else req.to_frame - - resp = BoundsResponse(enforce=False) - - self.bounds = [self.convert(CoordinateConversionRequest(frame="lla", point=lla)) for lla in self.lla_bounds] - bounds = [navigator_tools.numpy_to_point(getattr(response, to_frame)) for response in self.bounds] - - resp.enforce = self.enforce - resp.bounds = bounds - - return resp - -if __name__ == "__main__": - rospy.init_node("bounds_server") - bs = BoundsServer() - rospy.spin() diff --git a/.great_merge/utils/navigator_tools/nodes/estimated_object_setter.py b/.great_merge/utils/navigator_tools/nodes/estimated_object_setter.py deleted file mode 100755 index 22854053..00000000 --- a/.great_merge/utils/navigator_tools/nodes/estimated_object_setter.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python -import numpy as np - -import ast -import sys -from argparse import RawTextHelpFormatter, ArgumentParser - -import txros -import navigator_tools -from twisted.internet import defer -from coordinate_conversion_server import Converter -from navigator_msgs.srv import CoordinateConversionRequest, ObjectDBQuery, ObjectDBQueryRequest - -@txros.util.cancellableInlineCallbacks -def main(name, lla): - nh = yield txros.NodeHandle.from_argv("esitmated_object_setter") - db = yield nh.get_service_client("/database/requests", ObjectDBQuery) - - convert = Converter() - yield convert.init(nh) - - # Convert the name to Be_Like_This - name = '_'.join(map(lambda txt: txt.title(), name.split("_"))) - - point = yield convert.request(CoordinateConversionRequest(frame="lla", point=lla)) - yield db(ObjectDBQueryRequest(cmd="{}={p[0]}, {p[1]}".format(name, p=point.enu))) - - -if __name__ == "__main__": - usage_msg = "Used to set the estimated position of objects in the database with lla positions." - desc_msg = "Pass the name of the database object and the lla position and this will set it's esimated position in the database. \n\ - ex. rosrun navigator_tools estimated_object_setter.py scan_the_code \"[85.3, -25.6]\" \n\ - ex. rosrun navigator_tools estimated_object_setter.py Shooter \"[82.32, -26.87, 2]\"" - - parser = ArgumentParser(usage=usage_msg, description=desc_msg, formatter_class=RawTextHelpFormatter) - parser.add_argument(dest='name', - help="Name of the object.") - parser.add_argument(dest='lat', type=float, - help="Latitude in degrees of the object of intrest.") - parser.add_argument(dest='long', type=float, - help="Longitude in degrees of the object of intrest.") - parser.add_argument('--alt', '-a', action='store', type=float, default=0.0, - help="Altitude in meters of the object of intrest (usually can be omitted).") - - args = parser.parse_args(sys.argv[1:]) - name = args.name - lat = args.lat - lon = args.long # `long` is python reserved :( - alt = args.alt - - txros.util.launch_main(lambda: main(name, [lat, lon, alt])) diff --git a/.great_merge/utils/navigator_tools/nodes/fake_action_server.py b/.great_merge/utils/navigator_tools/nodes/fake_action_server.py deleted file mode 100755 index c5090c57..00000000 --- a/.great_merge/utils/navigator_tools/nodes/fake_action_server.py +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env python -from __future__ import division - -import numpy as np - -import rospy -import actionlib -import tf.transformations as trns - -from nav_msgs.msg import OccupancyGrid -from geometry_msgs.msg import PoseStamped -from lqrrt_ros.msg import MoveAction, MoveFeedback, MoveResult - -import navigator_tools -from navigator_tools import fprint as _fprint -from behaviors import params - - -fprint = lambda *args, **kwargs: _fprint(title="FAKE_ACTION_SERVER", time="", *args, **kwargs) - -class FakeActionServer(object): - def __init__(self): - self.goal_pose_pub = rospy.Publisher("/lqrrt/goal", PoseStamped, queue_size=3) - - # Parameters to simulate lqrrt - self.blind = False - - self.ogrid = None - set_ogrid = lambda msg: setattr(self, "ogrid", msg) - rospy.Subscriber("/ogrid_master", OccupancyGrid, set_ogrid) - - self.move_server = actionlib.SimpleActionServer("/move_to", MoveAction, execute_cb=self.move_cb, auto_start=False) - self.move_server.start() - rospy.sleep(.1) - - fprint("Fake action server ready!", msg_color="green") - - def move_cb(self, msg): - fprint("Move request received!", msg_color="blue") - - if msg.move_type not in ['hold', 'drive', 'skid', 'circle']: - fprint("Move type '{}' not found".format(msg.move_type), msg_color='red') - self.move_server.set_aborted(MoveResult('move_type')) - return - - self.blind = msg.blind - - p = PoseStamped() - p.header = navigator_tools.make_header(frame="enu") - p.pose = msg.goal - self.goal_pose_pub.publish(p) - - # Sleep before you continue - rospy.sleep(1) - - yaw = trns.euler_from_quaternion(navigator_tools.rosmsg_to_numpy(msg.goal.orientation))[2] - if not self.is_feasible(np.array([msg.goal.position.x, msg.goal.position.y, yaw]), np.zeros(3)): - fprint("Not feasible", msg_color='red') - self.move_server.set_aborted(MoveResult('occupied')) - return - - fprint("Finished move!", newline=2) - self.move_server.set_succeeded(MoveResult('')) - - def is_feasible(self, x, u): - """ - Given a state x and effort u, returns a bool - that is only True if that (x, u) is feasible. - - """ - # If there's no ogrid yet or it's a blind move, anywhere is valid - if self.ogrid is None or self.blind: - return True - - # Body to world - c, s = np.cos(x[2]), np.sin(x[2]) - R = np.array([[c, -s], - [s, c]]) - - # Vehicle points in world frame - points = x[:2] + R.dot(params.vps).T - - # Check for collision - cpm = 1 / self.ogrid.info.resolution - origin = navigator_tools.pose_to_numpy(self.ogrid.info.origin)[0][:2] - indicies = (cpm * (points - origin)).astype(np.int64) - - try: - data = np.array(self.ogrid.data).reshape(self.ogrid.info.width, self.ogrid.info.height) - grid_values = data[indicies[:, 1], indicies[:, 0]] - except IndexError: - return False - - # Greater than threshold is a hit - return np.all(grid_values < 90) - - -if __name__ == "__main__": - rospy.init_node("fake_action_server") - f = FakeActionServer() - rospy.spin() diff --git a/.great_merge/utils/navigator_tools/src/navigator_tools.cpp b/.great_merge/utils/navigator_tools/src/navigator_tools.cpp deleted file mode 100644 index e5dfd960..00000000 --- a/.great_merge/utils/navigator_tools/src/navigator_tools.cpp +++ /dev/null @@ -1,56 +0,0 @@ -#include - -namespace nav{ -namespace tools{ - -using namespace std; - -vector getRectifiedImageTopics(bool color) -{ - // get all currently published topics from master - ros::master::V_TopicInfo master_topics; - ros::master::getTopics(master_topics); - - // Define lambda to determine if a topic is a rectified img topic - string target_pattern; - target_pattern = color? "image_rect_color" : "image_rect"; - auto isRectImgTopic = [&target_pattern](ros::master::TopicInfo topic_info) - { - // Find last slash - size_t final_slash = topic_info.name.rfind('/'); - - // Match end of topic name to image_rect pattern - if(final_slash == string::npos) - return false; - else - { - string topic_name_end = topic_info.name.substr(final_slash + 1); - return (topic_name_end.find(target_pattern) != string::npos)? true : false; - } - }; // end lambda isRectImgTopic - - // return list of rectified image topics - vector image_rect_topics; - for(ros::master::TopicInfo topic : master_topics) - { - if(isRectImgTopic(topic)) - image_rect_topics.push_back(topic.name); - } - return image_rect_topics; -} - - -void sortContours(vector>& contour_vec, bool ascending) -{ - auto comp_length = ascending? - [](vector const &contour1, vector const &contour2) - { return fabs(arcLength(contour1, true)) < fabs(arcLength(contour2, true)); } : - [](vector const &contour1, vector const &contour2) - { return fabs(arcLength(contour1, true)) > fabs(arcLength(contour2, true)); }; - - sort(contour_vec.begin(), contour_vec.end(), comp_length); -} - - -} // namespace nav::tools -} // namespace nav diff --git a/.great_merge/utils/navigator_tools/CMakeLists.txt b/mil_dev_tools/CMakeLists.txt similarity index 100% rename from .great_merge/utils/navigator_tools/CMakeLists.txt rename to mil_dev_tools/CMakeLists.txt diff --git a/.great_merge/utils/navigator_tools/navigator_tools/__init__.py b/mil_dev_tools/mil_dev_tools/__init__.py similarity index 100% rename from .great_merge/utils/navigator_tools/navigator_tools/__init__.py rename to mil_dev_tools/mil_dev_tools/__init__.py diff --git a/.great_merge/utils/navigator_tools/navigator_tools/bag_crawler.py b/mil_dev_tools/mil_dev_tools/bag_crawler.py similarity index 100% rename from .great_merge/utils/navigator_tools/navigator_tools/bag_crawler.py rename to mil_dev_tools/mil_dev_tools/bag_crawler.py diff --git a/.great_merge/utils/navigator_tools/navigator_tools/cv_debug.py b/mil_dev_tools/mil_dev_tools/cv_debug.py similarity index 100% rename from .great_merge/utils/navigator_tools/navigator_tools/cv_debug.py rename to mil_dev_tools/mil_dev_tools/cv_debug.py diff --git a/.great_merge/utils/navigator_tools/navigator_tools/general_helpers.py b/mil_dev_tools/mil_dev_tools/general_helpers.py similarity index 100% rename from .great_merge/utils/navigator_tools/navigator_tools/general_helpers.py rename to mil_dev_tools/mil_dev_tools/general_helpers.py diff --git a/.great_merge/utils/navigator_tools/navigator_tools/geometry_helpers.py b/mil_dev_tools/mil_dev_tools/geometry_helpers.py similarity index 100% rename from .great_merge/utils/navigator_tools/navigator_tools/geometry_helpers.py rename to mil_dev_tools/mil_dev_tools/geometry_helpers.py diff --git a/.great_merge/utils/navigator_tools/navigator_tools/image_helpers.py b/mil_dev_tools/mil_dev_tools/image_helpers.py similarity index 100% rename from .great_merge/utils/navigator_tools/navigator_tools/image_helpers.py rename to mil_dev_tools/mil_dev_tools/image_helpers.py diff --git a/.great_merge/utils/navigator_tools/navigator_tools/init_helpers.py b/mil_dev_tools/mil_dev_tools/init_helpers.py similarity index 100% rename from .great_merge/utils/navigator_tools/navigator_tools/init_helpers.py rename to mil_dev_tools/mil_dev_tools/init_helpers.py diff --git a/.great_merge/utils/navigator_tools/navigator_tools/msg_helpers.py b/mil_dev_tools/mil_dev_tools/msg_helpers.py similarity index 100% rename from .great_merge/utils/navigator_tools/navigator_tools/msg_helpers.py rename to mil_dev_tools/mil_dev_tools/msg_helpers.py diff --git a/.great_merge/utils/navigator_tools/navigator_tools/rviz_helpers.py b/mil_dev_tools/mil_dev_tools/rviz_helpers.py similarity index 100% rename from .great_merge/utils/navigator_tools/navigator_tools/rviz_helpers.py rename to mil_dev_tools/mil_dev_tools/rviz_helpers.py diff --git a/.great_merge/utils/navigator_tools/navigator_tools/threading_helpers.py b/mil_dev_tools/mil_dev_tools/threading_helpers.py similarity index 100% rename from .great_merge/utils/navigator_tools/navigator_tools/threading_helpers.py rename to mil_dev_tools/mil_dev_tools/threading_helpers.py diff --git a/.great_merge/utils/navigator_tools/nodes/clicked_point_recorder.py b/mil_dev_tools/nodes/clicked_point_recorder.py similarity index 100% rename from .great_merge/utils/navigator_tools/nodes/clicked_point_recorder.py rename to mil_dev_tools/nodes/clicked_point_recorder.py diff --git a/.great_merge/utils/navigator_tools/nodes/coordinate_conversion_server.py b/mil_dev_tools/nodes/coordinate_conversion_server.py similarity index 100% rename from .great_merge/utils/navigator_tools/nodes/coordinate_conversion_server.py rename to mil_dev_tools/nodes/coordinate_conversion_server.py diff --git a/.great_merge/utils/navigator_tools/nodes/ogrid_draw.py b/mil_dev_tools/nodes/ogrid_draw.py similarity index 100% rename from .great_merge/utils/navigator_tools/nodes/ogrid_draw.py rename to mil_dev_tools/nodes/ogrid_draw.py diff --git a/.great_merge/utils/navigator_tools/nodes/rviz_waypoint.py b/mil_dev_tools/nodes/rviz_waypoint.py similarity index 100% rename from .great_merge/utils/navigator_tools/nodes/rviz_waypoint.py rename to mil_dev_tools/nodes/rviz_waypoint.py diff --git a/.great_merge/utils/navigator_tools/nodes/tf_fudger.py b/mil_dev_tools/nodes/tf_fudger.py similarity index 100% rename from .great_merge/utils/navigator_tools/nodes/tf_fudger.py rename to mil_dev_tools/nodes/tf_fudger.py diff --git a/.great_merge/utils/navigator_tools/nodes/tf_to_gazebo.py b/mil_dev_tools/nodes/tf_to_gazebo.py similarity index 100% rename from .great_merge/utils/navigator_tools/nodes/tf_to_gazebo.py rename to mil_dev_tools/nodes/tf_to_gazebo.py diff --git a/.great_merge/utils/navigator_tools/nodes/video_player b/mil_dev_tools/nodes/video_player similarity index 100% rename from .great_merge/utils/navigator_tools/nodes/video_player rename to mil_dev_tools/nodes/video_player diff --git a/.great_merge/utils/navigator_tools/package.xml b/mil_dev_tools/package.xml similarity index 100% rename from .great_merge/utils/navigator_tools/package.xml rename to mil_dev_tools/package.xml diff --git a/.great_merge/utils/navigator_tools/setup.py b/mil_dev_tools/setup.py similarity index 100% rename from .great_merge/utils/navigator_tools/setup.py rename to mil_dev_tools/setup.py diff --git a/.great_merge/utils/navigator_tools/tests/math_helpers_test.py b/mil_dev_tools/test/math_helpers_test.py similarity index 100% rename from .great_merge/utils/navigator_tools/tests/math_helpers_test.py rename to mil_dev_tools/test/math_helpers_test.py From 2b54766939a67cfe0b67a9a91c38374550a37d25 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Fri, 17 Mar 2017 01:41:35 -0400 Subject: [PATCH 223/267] TOOLS: change naming to mil_dev_tools in imported tools --- .gitignore | 79 +++++++++++++++ mil_dev_tools/CMakeLists.txt | 2 +- mil_dev_tools/mil_dev_tools/__init__.py | 5 +- mil_dev_tools/mil_dev_tools/bag_crawler.py | 2 +- .../mil_dev_tools/general_helpers.py | 78 --------------- mil_dev_tools/mil_dev_tools/image_helpers.py | 4 +- mil_dev_tools/mil_dev_tools/rviz_helpers.py | 13 ++- .../nodes/coordinate_conversion_server.py | 97 ------------------- mil_dev_tools/nodes/tf_fudger.py | 3 + mil_dev_tools/nodes/video_player | 4 +- mil_dev_tools/package.xml | 1 + mil_dev_tools/test/math_helpers_test.py | 4 +- mil_dev_tools/test/test_ros_tools.py | 8 +- 13 files changed, 103 insertions(+), 197 deletions(-) create mode 100644 .gitignore delete mode 100644 mil_dev_tools/mil_dev_tools/general_helpers.py delete mode 100755 mil_dev_tools/nodes/coordinate_conversion_server.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..0c4e12d3 --- /dev/null +++ b/.gitignore @@ -0,0 +1,79 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Binaries +*.npz + +# PyCharm +.idea/ +CMakeLists.txt + +# Other +*.mtl +*.obj +!transdec.obj + +*.dic +pedometry.txt +.DS_STORE +# Autosave files from QtCreator +*.autosave +*~ + +# Vim swaps +*.swp diff --git a/mil_dev_tools/CMakeLists.txt b/mil_dev_tools/CMakeLists.txt index 8a0d0cf5..f3ca0e70 100644 --- a/mil_dev_tools/CMakeLists.txt +++ b/mil_dev_tools/CMakeLists.txt @@ -11,7 +11,7 @@ catkin_package() # Add folders to be run by python nosetests if(CATKIN_ENABLE_TESTING) - catkin_add_nosetests(test_ros_tools) + catkin_add_nosetests(test) endif() catkin_python_setup() diff --git a/mil_dev_tools/mil_dev_tools/__init__.py b/mil_dev_tools/mil_dev_tools/__init__.py index b03fcc47..1f1a8faf 100644 --- a/mil_dev_tools/mil_dev_tools/__init__.py +++ b/mil_dev_tools/mil_dev_tools/__init__.py @@ -4,11 +4,11 @@ import threading_helpers import geometry_helpers import rviz_helpers -import general_helpers import cv_debug import bag_crawler import download import func_helpers +import text_effects from init_helpers import * from image_helpers import * @@ -16,8 +16,7 @@ from threading_helpers import * from geometry_helpers import * from rviz_helpers import * -from general_helpers import * from cv_debug import CvDebug from bag_crawler import BagCrawler from download import download_and_unzip, download -import text_effects +from text_effects import fprint diff --git a/mil_dev_tools/mil_dev_tools/bag_crawler.py b/mil_dev_tools/mil_dev_tools/bag_crawler.py index 47401ec0..0ec37b79 100644 --- a/mil_dev_tools/mil_dev_tools/bag_crawler.py +++ b/mil_dev_tools/mil_dev_tools/bag_crawler.py @@ -63,7 +63,7 @@ def image_info_topics(self, cam="right"): if __name__ == '__main__': import cv2 - bag = '/home/jacob/catkin_ws/src/SubjuGator/gnc/sub8_perception/data/bag_test.bag' + bag = 'test.bag' bc = BagCrawler(bag) for image in bc.crawl(topic=bc.image_topics[0]): diff --git a/mil_dev_tools/mil_dev_tools/general_helpers.py b/mil_dev_tools/mil_dev_tools/general_helpers.py deleted file mode 100644 index fbd87b05..00000000 --- a/mil_dev_tools/mil_dev_tools/general_helpers.py +++ /dev/null @@ -1,78 +0,0 @@ -import rospy - - -class MissingPerceptionObject(): - def __init__(self, name): - self.name = name - -class Colors(): - # Some cool stuff could happen here - red = '\033[91m' - green = '\033[92m' - yellow = '\033[93m' - blue = '\033[94m' - bold = '\033[1m' - - reset = '\033[0m' - - def __getattr__(self, arg): - # If we get a non existent color, return the reset color - return self.reset - -class Seperator(): - # TODO - def __getattr__(self, *args, **kwargs): - return - - def equals(self, _len, blank_space=False): - text = "{}".format("=" * len) - if blank_space: - return "\n{}\n".format(text) - - return text - - -def fprint(msg, time=None, title=None, newline=True, msg_color=None): - time_header = False - title_header = False - - if title is not None: - title_header = "{C.blue}{C.bold}{title}{C.reset}".format(C=Colors, title=title) - - if msg_color is not None: - msg = "{color}{msg}{C.reset}".format(color=getattr(Colors(), msg_color), C=Colors, msg=msg) - - if time == "": - time_header = False - - elif time is None: - try: - time = rospy.Time.now().to_sec() - time_header = "{C.bold}{time}{C.reset}".format(C=Colors, time=time) - except rospy.exceptions.ROSInitException: - pass - else: - time_header = "{C.bold}{time}{C.reset}".format(C=Colors, time=time) - - if title_header and time_header: - # (TIME) HEADER: message - to_print = "{time} {title}: {msg}" - elif time_header: - # (TIME): message - to_print = "{time}: {msg}" - elif title_header: - # HEADER: message - to_print = "{title}: {msg}" - else: - # message - to_print = "{msg}" - - if newline: - if not isinstance(newline, bool): - msg += "\n" * (newline - 1) # Since printing implicitly adds a new line - print to_print.format(time=time_header, title=title_header, msg=msg) - else: - # Note, this adds a space at the end. - print to_print.format(time=time_header, title=title_header, msg=msg), - -print_t = fprint # For legacy diff --git a/mil_dev_tools/mil_dev_tools/image_helpers.py b/mil_dev_tools/mil_dev_tools/image_helpers.py index bec0c604..281f71de 100644 --- a/mil_dev_tools/mil_dev_tools/image_helpers.py +++ b/mil_dev_tools/mil_dev_tools/image_helpers.py @@ -9,7 +9,7 @@ from os import path from cv_bridge import CvBridge, CvBridgeError from sensor_msgs.msg import Image, CameraInfo -from navigator_tools.init_helpers import wait_for_param +from mil_dev_tools.init_helpers import wait_for_param def get_parameter_range(parameter_root): @@ -113,4 +113,4 @@ def convert(self, data): self.callback(image) except CvBridgeError, e: # Intentionally absorb CvBridge Errors - rospy.logerr(e) \ No newline at end of file + rospy.logerr(e) diff --git a/mil_dev_tools/mil_dev_tools/rviz_helpers.py b/mil_dev_tools/mil_dev_tools/rviz_helpers.py index ef730081..34f00171 100644 --- a/mil_dev_tools/mil_dev_tools/rviz_helpers.py +++ b/mil_dev_tools/mil_dev_tools/rviz_helpers.py @@ -7,22 +7,21 @@ from geometry_msgs.msg import Pose, Vector3, Point from std_msgs.msg import ColorRGBA -from uf_common.msg import Float64Stamped # This needs to be deprecated -import navigator_tools +import mil_dev_tools rviz_pub = rospy.Publisher("visualization", visualization_msgs.Marker, queue_size=3) def draw_sphere(position, color, scaling=(0.11, 0.11, 0.11), m_id=4, frame='/base_link'): pose = Pose( - position=navigator_tools.numpy_to_point(position), - orientation=navigator_tools.numpy_to_quaternion([0.0, 0.0, 0.0, 1.0]) + position=mil_dev_tools.numpy_to_point(position), + orientation=mil_dev_tools.numpy_to_quaternion([0.0, 0.0, 0.0, 1.0]) ) marker = visualization_msgs.Marker( ns='wamv', id=m_id, - header=navigator_tools.make_header(frame=frame), + header=mil_dev_tools.make_header(frame=frame), type=visualization_msgs.Marker.SPHERE, action=visualization_msgs.Marker.ADD, pose=pose, @@ -49,7 +48,7 @@ def make_ray(base, direction, length, color, frame='/base_link', m_id=0, **kwarg marker = visualization_msgs.Marker( ns='wamv', id=m_id, - header=navigator_tools.make_header(frame=frame), + header=mil_dev_tools.make_header(frame=frame), type=visualization_msgs.Marker.LINE_STRIP, action=visualization_msgs.Marker.ADD, color=ColorRGBA(*color), @@ -58,4 +57,4 @@ def make_ray(base, direction, length, color, frame='/base_link', m_id=0, **kwarg lifetime=rospy.Duration(), **kwargs ) - return marker \ No newline at end of file + return marker diff --git a/mil_dev_tools/nodes/coordinate_conversion_server.py b/mil_dev_tools/nodes/coordinate_conversion_server.py deleted file mode 100755 index a811ca53..00000000 --- a/mil_dev_tools/nodes/coordinate_conversion_server.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python -from __future__ import division - -import txros -import numpy as np -import navigator_tools -from twisted.internet import defer -from rawgps_common.gps import ecef_from_latlongheight, enu_from_ecef_tf, latlongheight_from_ecef - -from nav_msgs.msg import Odometry -from navigator_msgs.srv import CoordinateConversion, CoordinateConversionResponse - -class Converter(object): - @txros.util.cancellableInlineCallbacks - def init(self, nh, advertise_service=False): - self.nh = nh - self.odom_sub = yield self.nh.subscribe('odom', Odometry) - self.abs_odom_sub = yield self.nh.subscribe('absodom', Odometry) - - if advertise_service: - yield self.nh.advertise_service('/convert', CoordinateConversion, self.request) - - def __getattr__(self, attr): - print "Frame '{}' not found!".format(attr) - return self.not_found - - def not_found(self): - return [[np.nan, np.nan, np.nan]] * 3 - - @txros.util.cancellableInlineCallbacks - def request(self, srv): - self.point = np.array(srv.point) - - last_odom = yield self.odom_sub.get_last_message() - last_absodom = yield self.abs_odom_sub.get_last_message() - - if last_odom is None or last_absodom is None: - defer.returnValue(CoordinateConversionResponse()) - - self.enu_pos = navigator_tools.odometry_to_numpy(last_odom)[0][0] - self.ecef_pos = navigator_tools.odometry_to_numpy(last_absodom)[0][0] - - enu, ecef, lla = getattr(self, srv.frame)() - - distance = np.linalg.norm(enu - self.enu_pos) - defer.returnValue(CoordinateConversionResponse(enu=enu, ecef=ecef, lla=lla, distance=distance)) - - def lla(self): - lla = self.point - - # to ecef - ecef = ecef_from_latlongheight(*np.radians(lla)) - - # to enu - ecef_vector = ecef - self.ecef_pos - enu_vector = enu_from_ecef_tf(self.ecef_pos).dot(ecef_vector) - enu = enu_vector + self.enu_pos - - return enu, ecef, lla - - def ecef(self): - ecef = self.point - - # to lla - lla = np.degrees(latlongheight_from_ecef(ecef)) - - # to enu - ecef_vector = ecef - self.ecef_pos - enu_vector = enu_from_ecef_tf(self.ecef_pos).dot(ecef_vector) - enu = enu_vector + self.enu_pos - - return enu, ecef, lla - - def enu(self): - enu = self.point - - # to ecef - enu_vector = enu - self.enu_pos - ecef_vector = enu_from_ecef_tf(self.ecef_pos).T.dot(enu_vector) - ecef = ecef_vector + self.ecef_pos - - # to lla - lla = np.degrees(latlongheight_from_ecef(ecef)) - - return enu, ecef, lla - - -@txros.util.cancellableInlineCallbacks -def main(): - nh = yield txros.NodeHandle.from_argv("coordinate_conversion_server") - c = Converter() - yield c.init(nh, advertise_service=True) - - yield defer.Deferred() # never exit - -if __name__ == "__main__": - txros.util.launch_main(main) diff --git a/mil_dev_tools/nodes/tf_fudger.py b/mil_dev_tools/nodes/tf_fudger.py index a2a81dc8..ab7943b6 100755 --- a/mil_dev_tools/nodes/tf_fudger.py +++ b/mil_dev_tools/nodes/tf_fudger.py @@ -122,6 +122,8 @@ def reset(): if k == ord('z'): set_bars((0, 0, 0), (0, 0, 0)) + # This functionality was configured to replace lines in navigator's tf launch file, should refactor to be general later + ''' if k == ord('s'): # Save the transform in navigator_launch/launch/tf.launch replacing the line import rospkg @@ -162,6 +164,7 @@ def reset(): f.write(line) break + ''' br.sendTransform(p, q, rospy.Time.now(), args.tf_child, args.tf_parent) diff --git a/mil_dev_tools/nodes/video_player b/mil_dev_tools/nodes/video_player index 88a010bd..1b4e14bc 100755 --- a/mil_dev_tools/nodes/video_player +++ b/mil_dev_tools/nodes/video_player @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Usage: rosrun navigator_tools video_player _filename:=MyVideoFile.mp4 - rosrun navigator_tools video_player MyVideoFile.mp4 +Usage: rosrun mil_dev_tools video_player _filename:=MyVideoFile.mp4 + rosrun mil_dev_tools video_player MyVideoFile.mp4 Utility to play video files into a ros topic with some added conveniences like pausing and diff --git a/mil_dev_tools/package.xml b/mil_dev_tools/package.xml index 09f0e9dd..e807bd88 100644 --- a/mil_dev_tools/package.xml +++ b/mil_dev_tools/package.xml @@ -5,6 +5,7 @@ Development tools used in MIL projects Kevin Allen David Soto + Matthew Langford MIT catkin rostest diff --git a/mil_dev_tools/test/math_helpers_test.py b/mil_dev_tools/test/math_helpers_test.py index 8f3a7ad7..5ba9027b 100644 --- a/mil_dev_tools/test/math_helpers_test.py +++ b/mil_dev_tools/test/math_helpers_test.py @@ -2,8 +2,8 @@ import unittest import numpy as np from geometry_msgs.msg import Quaternion -from navigator_tools import quat_to_euler, euler_to_quat, normalize -from navigator_tools import compose_transformation +from mil_dev_tools import quat_to_euler, euler_to_quat, normalize +from mil_dev_tools import compose_transformation class TestROSTools(unittest.TestCase): diff --git a/mil_dev_tools/test/test_ros_tools.py b/mil_dev_tools/test/test_ros_tools.py index 7ced2bf7..221d2dfb 100644 --- a/mil_dev_tools/test/test_ros_tools.py +++ b/mil_dev_tools/test/test_ros_tools.py @@ -3,10 +3,10 @@ import numpy as np from geometry_msgs.msg import Quaternion, Vector3, Pose2D from sensor_msgs.msg import Image -from sub8_ros_tools.image_helpers import make_image_msg, get_image_msg -from sub8_ros_tools.msg_helpers import rosmsg_to_numpy, make_wrench_stamped -from sub8_ros_tools.threading_helpers import thread_lock -from sub8_ros_tools.geometry_helpers import skew_symmetric_cross, make_rotation, normalize +from mil_dev_tools.image_helpers import make_image_msg, get_image_msg +from mil_dev_tools.msg_helpers import rosmsg_to_numpy, make_wrench_stamped +from mil_dev_tools.threading_helpers import thread_lock +from mil_dev_tools.geometry_helpers import skew_symmetric_cross, make_rotation, normalize class TestROSTools(unittest.TestCase): From bf0cd42c88599aae0f09ae87e8834bd4a256869b Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Fri, 17 Mar 2017 01:47:49 -0400 Subject: [PATCH 224/267] TOOLS: fix import error in geometry helpers --- mil_dev_tools/mil_dev_tools/geometry_helpers.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mil_dev_tools/mil_dev_tools/geometry_helpers.py b/mil_dev_tools/mil_dev_tools/geometry_helpers.py index 4d72ec96..f8fdd962 100644 --- a/mil_dev_tools/mil_dev_tools/geometry_helpers.py +++ b/mil_dev_tools/mil_dev_tools/geometry_helpers.py @@ -1,7 +1,8 @@ from __future__ import division import numpy as np -from tf import transformations -import tf +from tf.transformations import quaternion_from_euler, euler_from_quaternion, random_quaternion +from msg_helpers import numpy_quat_pair_to_pose +from geometry_msgs.msg import Quaternion def rotate_vect_by_quat(v, q): From bcadfb99dbbd467b82603ec69d82c3133b59369d Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Fri, 17 Mar 2017 01:53:35 -0400 Subject: [PATCH 225/267] TOOLS: fix typo in msg helpers --- mil_dev_tools/mil_dev_tools/msg_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mil_dev_tools/mil_dev_tools/msg_helpers.py b/mil_dev_tools/mil_dev_tools/msg_helpers.py index d357bdfe..15cca0b1 100644 --- a/mil_dev_tools/mil_dev_tools/msg_helpers.py +++ b/mil_dev_tools/mil_dev_tools/msg_helpers.py @@ -96,7 +96,7 @@ def wrench_to_numpy(wrench): return force, torque -def numpy_to_point(np_vector): +def numpy_to_point(vector): np_vector = np.array(vector) if np_vector.size == 2: np_vector = np.append(np_vector, 0) # Assume user is give 2d point From 8d5c82675efdaa3583e8eff28b5cb79ff4a5973c Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Mon, 20 Mar 2017 19:27:28 -0400 Subject: [PATCH 226/267] TOOLS: rename mil_dev_tools to mil_tools for ease more sensible to have tools package as it already contains non-development tools --- mil_dev_tools/mil_dev_tools/__init__.py | 22 ------------------- {mil_dev_tools => mil_tools}/CMakeLists.txt | 2 +- mil_tools/mil_misc_tools/__init__.py | 2 ++ .../mil_misc_tools}/download.py | 0 .../mil_misc_tools}/text_effects.py | 0 mil_tools/mil_ros_tools/__init__.py | 8 +++++++ .../mil_ros_tools}/bag_crawler.py | 0 .../mil_ros_tools}/cv_debug.py | 0 .../mil_ros_tools}/func_helpers.py | 0 .../mil_ros_tools}/geometry_helpers.py | 0 .../mil_ros_tools}/image_helpers.py | 2 +- .../mil_ros_tools}/init_helpers.py | 0 .../mil_ros_tools}/msg_helpers.py | 0 .../mil_ros_tools}/rviz_helpers.py | 10 ++++----- .../mil_ros_tools}/threading_helpers.py | 0 mil_tools/mil_tools/__init__.py | 2 ++ .../nodes/clicked_point_recorder.py | 0 .../nodes/ogrid_draw.py | 0 .../nodes/rviz_waypoint.py | 0 .../nodes/tf_fudger.py | 0 .../nodes/tf_to_gazebo.py | 0 .../nodes/video_player | 0 {mil_dev_tools => mil_tools}/package.xml | 2 +- {mil_dev_tools => mil_tools}/readme.md | 0 {mil_dev_tools => mil_tools}/setup.py | 2 +- .../test/math_helpers_test.py | 4 ++-- .../test/test_ros_tools.py | 8 +++---- 27 files changed, 27 insertions(+), 37 deletions(-) delete mode 100644 mil_dev_tools/mil_dev_tools/__init__.py rename {mil_dev_tools => mil_tools}/CMakeLists.txt (92%) create mode 100644 mil_tools/mil_misc_tools/__init__.py rename {mil_dev_tools/mil_dev_tools => mil_tools/mil_misc_tools}/download.py (100%) rename {mil_dev_tools/mil_dev_tools => mil_tools/mil_misc_tools}/text_effects.py (100%) create mode 100644 mil_tools/mil_ros_tools/__init__.py rename {mil_dev_tools/mil_dev_tools => mil_tools/mil_ros_tools}/bag_crawler.py (100%) rename {mil_dev_tools/mil_dev_tools => mil_tools/mil_ros_tools}/cv_debug.py (100%) rename {mil_dev_tools/mil_dev_tools => mil_tools/mil_ros_tools}/func_helpers.py (100%) rename {mil_dev_tools/mil_dev_tools => mil_tools/mil_ros_tools}/geometry_helpers.py (100%) rename {mil_dev_tools/mil_dev_tools => mil_tools/mil_ros_tools}/image_helpers.py (98%) rename {mil_dev_tools/mil_dev_tools => mil_tools/mil_ros_tools}/init_helpers.py (100%) rename {mil_dev_tools/mil_dev_tools => mil_tools/mil_ros_tools}/msg_helpers.py (100%) rename {mil_dev_tools/mil_dev_tools => mil_tools/mil_ros_tools}/rviz_helpers.py (86%) rename {mil_dev_tools/mil_dev_tools => mil_tools/mil_ros_tools}/threading_helpers.py (100%) create mode 100644 mil_tools/mil_tools/__init__.py rename {mil_dev_tools => mil_tools}/nodes/clicked_point_recorder.py (100%) rename {mil_dev_tools => mil_tools}/nodes/ogrid_draw.py (100%) rename {mil_dev_tools => mil_tools}/nodes/rviz_waypoint.py (100%) rename {mil_dev_tools => mil_tools}/nodes/tf_fudger.py (100%) rename {mil_dev_tools => mil_tools}/nodes/tf_to_gazebo.py (100%) rename {mil_dev_tools => mil_tools}/nodes/video_player (100%) rename {mil_dev_tools => mil_tools}/package.xml (94%) rename {mil_dev_tools => mil_tools}/readme.md (100%) rename {mil_dev_tools => mil_tools}/setup.py (80%) rename {mil_dev_tools => mil_tools}/test/math_helpers_test.py (95%) rename {mil_dev_tools => mil_tools}/test/test_ros_tools.py (94%) diff --git a/mil_dev_tools/mil_dev_tools/__init__.py b/mil_dev_tools/mil_dev_tools/__init__.py deleted file mode 100644 index 1f1a8faf..00000000 --- a/mil_dev_tools/mil_dev_tools/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -import image_helpers -import init_helpers -import msg_helpers -import threading_helpers -import geometry_helpers -import rviz_helpers -import cv_debug -import bag_crawler -import download -import func_helpers -import text_effects - -from init_helpers import * -from image_helpers import * -from msg_helpers import * -from threading_helpers import * -from geometry_helpers import * -from rviz_helpers import * -from cv_debug import CvDebug -from bag_crawler import BagCrawler -from download import download_and_unzip, download -from text_effects import fprint diff --git a/mil_dev_tools/CMakeLists.txt b/mil_tools/CMakeLists.txt similarity index 92% rename from mil_dev_tools/CMakeLists.txt rename to mil_tools/CMakeLists.txt index f3ca0e70..d02f9113 100644 --- a/mil_dev_tools/CMakeLists.txt +++ b/mil_tools/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.8.3) -project(mil_dev_tools) +project(mil_tools) find_package(catkin REQUIRED COMPONENTS rostest std_msgs diff --git a/mil_tools/mil_misc_tools/__init__.py b/mil_tools/mil_misc_tools/__init__.py new file mode 100644 index 00000000..fb9f9b64 --- /dev/null +++ b/mil_tools/mil_misc_tools/__init__.py @@ -0,0 +1,2 @@ +from download import * +from text_effects import * diff --git a/mil_dev_tools/mil_dev_tools/download.py b/mil_tools/mil_misc_tools/download.py similarity index 100% rename from mil_dev_tools/mil_dev_tools/download.py rename to mil_tools/mil_misc_tools/download.py diff --git a/mil_dev_tools/mil_dev_tools/text_effects.py b/mil_tools/mil_misc_tools/text_effects.py similarity index 100% rename from mil_dev_tools/mil_dev_tools/text_effects.py rename to mil_tools/mil_misc_tools/text_effects.py diff --git a/mil_tools/mil_ros_tools/__init__.py b/mil_tools/mil_ros_tools/__init__.py new file mode 100644 index 00000000..b52b2b05 --- /dev/null +++ b/mil_tools/mil_ros_tools/__init__.py @@ -0,0 +1,8 @@ +from init_helpers import * +from image_helpers import * +from msg_helpers import * +from threading_helpers import * +from geometry_helpers import * +from rviz_helpers import * +from cv_debug import * +from bag_crawler import * diff --git a/mil_dev_tools/mil_dev_tools/bag_crawler.py b/mil_tools/mil_ros_tools/bag_crawler.py similarity index 100% rename from mil_dev_tools/mil_dev_tools/bag_crawler.py rename to mil_tools/mil_ros_tools/bag_crawler.py diff --git a/mil_dev_tools/mil_dev_tools/cv_debug.py b/mil_tools/mil_ros_tools/cv_debug.py similarity index 100% rename from mil_dev_tools/mil_dev_tools/cv_debug.py rename to mil_tools/mil_ros_tools/cv_debug.py diff --git a/mil_dev_tools/mil_dev_tools/func_helpers.py b/mil_tools/mil_ros_tools/func_helpers.py similarity index 100% rename from mil_dev_tools/mil_dev_tools/func_helpers.py rename to mil_tools/mil_ros_tools/func_helpers.py diff --git a/mil_dev_tools/mil_dev_tools/geometry_helpers.py b/mil_tools/mil_ros_tools/geometry_helpers.py similarity index 100% rename from mil_dev_tools/mil_dev_tools/geometry_helpers.py rename to mil_tools/mil_ros_tools/geometry_helpers.py diff --git a/mil_dev_tools/mil_dev_tools/image_helpers.py b/mil_tools/mil_ros_tools/image_helpers.py similarity index 98% rename from mil_dev_tools/mil_dev_tools/image_helpers.py rename to mil_tools/mil_ros_tools/image_helpers.py index 281f71de..d40b2fe7 100644 --- a/mil_dev_tools/mil_dev_tools/image_helpers.py +++ b/mil_tools/mil_ros_tools/image_helpers.py @@ -9,7 +9,7 @@ from os import path from cv_bridge import CvBridge, CvBridgeError from sensor_msgs.msg import Image, CameraInfo -from mil_dev_tools.init_helpers import wait_for_param +from mil_ros_tools import wait_for_param def get_parameter_range(parameter_root): diff --git a/mil_dev_tools/mil_dev_tools/init_helpers.py b/mil_tools/mil_ros_tools/init_helpers.py similarity index 100% rename from mil_dev_tools/mil_dev_tools/init_helpers.py rename to mil_tools/mil_ros_tools/init_helpers.py diff --git a/mil_dev_tools/mil_dev_tools/msg_helpers.py b/mil_tools/mil_ros_tools/msg_helpers.py similarity index 100% rename from mil_dev_tools/mil_dev_tools/msg_helpers.py rename to mil_tools/mil_ros_tools/msg_helpers.py diff --git a/mil_dev_tools/mil_dev_tools/rviz_helpers.py b/mil_tools/mil_ros_tools/rviz_helpers.py similarity index 86% rename from mil_dev_tools/mil_dev_tools/rviz_helpers.py rename to mil_tools/mil_ros_tools/rviz_helpers.py index 34f00171..61bb0381 100644 --- a/mil_dev_tools/mil_dev_tools/rviz_helpers.py +++ b/mil_tools/mil_ros_tools/rviz_helpers.py @@ -8,20 +8,20 @@ from geometry_msgs.msg import Pose, Vector3, Point from std_msgs.msg import ColorRGBA -import mil_dev_tools +import mil_ros_tools rviz_pub = rospy.Publisher("visualization", visualization_msgs.Marker, queue_size=3) def draw_sphere(position, color, scaling=(0.11, 0.11, 0.11), m_id=4, frame='/base_link'): pose = Pose( - position=mil_dev_tools.numpy_to_point(position), - orientation=mil_dev_tools.numpy_to_quaternion([0.0, 0.0, 0.0, 1.0]) + position=mil_ros_tools.numpy_to_point(position), + orientation=mil_ros_tools.numpy_to_quaternion([0.0, 0.0, 0.0, 1.0]) ) marker = visualization_msgs.Marker( ns='wamv', id=m_id, - header=mil_dev_tools.make_header(frame=frame), + header=mil_ros_tools.make_header(frame=frame), type=visualization_msgs.Marker.SPHERE, action=visualization_msgs.Marker.ADD, pose=pose, @@ -48,7 +48,7 @@ def make_ray(base, direction, length, color, frame='/base_link', m_id=0, **kwarg marker = visualization_msgs.Marker( ns='wamv', id=m_id, - header=mil_dev_tools.make_header(frame=frame), + header=mil_ros_tools.make_header(frame=frame), type=visualization_msgs.Marker.LINE_STRIP, action=visualization_msgs.Marker.ADD, color=ColorRGBA(*color), diff --git a/mil_dev_tools/mil_dev_tools/threading_helpers.py b/mil_tools/mil_ros_tools/threading_helpers.py similarity index 100% rename from mil_dev_tools/mil_dev_tools/threading_helpers.py rename to mil_tools/mil_ros_tools/threading_helpers.py diff --git a/mil_tools/mil_tools/__init__.py b/mil_tools/mil_tools/__init__.py new file mode 100644 index 00000000..ebd6cc7f --- /dev/null +++ b/mil_tools/mil_tools/__init__.py @@ -0,0 +1,2 @@ +from mil_ros_tools import * +from mil_misc_tools import * diff --git a/mil_dev_tools/nodes/clicked_point_recorder.py b/mil_tools/nodes/clicked_point_recorder.py similarity index 100% rename from mil_dev_tools/nodes/clicked_point_recorder.py rename to mil_tools/nodes/clicked_point_recorder.py diff --git a/mil_dev_tools/nodes/ogrid_draw.py b/mil_tools/nodes/ogrid_draw.py similarity index 100% rename from mil_dev_tools/nodes/ogrid_draw.py rename to mil_tools/nodes/ogrid_draw.py diff --git a/mil_dev_tools/nodes/rviz_waypoint.py b/mil_tools/nodes/rviz_waypoint.py similarity index 100% rename from mil_dev_tools/nodes/rviz_waypoint.py rename to mil_tools/nodes/rviz_waypoint.py diff --git a/mil_dev_tools/nodes/tf_fudger.py b/mil_tools/nodes/tf_fudger.py similarity index 100% rename from mil_dev_tools/nodes/tf_fudger.py rename to mil_tools/nodes/tf_fudger.py diff --git a/mil_dev_tools/nodes/tf_to_gazebo.py b/mil_tools/nodes/tf_to_gazebo.py similarity index 100% rename from mil_dev_tools/nodes/tf_to_gazebo.py rename to mil_tools/nodes/tf_to_gazebo.py diff --git a/mil_dev_tools/nodes/video_player b/mil_tools/nodes/video_player similarity index 100% rename from mil_dev_tools/nodes/video_player rename to mil_tools/nodes/video_player diff --git a/mil_dev_tools/package.xml b/mil_tools/package.xml similarity index 94% rename from mil_dev_tools/package.xml rename to mil_tools/package.xml index e807bd88..c80f0061 100644 --- a/mil_dev_tools/package.xml +++ b/mil_tools/package.xml @@ -1,6 +1,6 @@ - mil_dev_tools + mil_tools 1.0.0 Development tools used in MIL projects Kevin Allen diff --git a/mil_dev_tools/readme.md b/mil_tools/readme.md similarity index 100% rename from mil_dev_tools/readme.md rename to mil_tools/readme.md diff --git a/mil_dev_tools/setup.py b/mil_tools/setup.py similarity index 80% rename from mil_dev_tools/setup.py rename to mil_tools/setup.py index 4139c044..0c9533d6 100644 --- a/mil_dev_tools/setup.py +++ b/mil_tools/setup.py @@ -5,7 +5,7 @@ # fetch values from package.xml setup_args = generate_distutils_setup( - packages=['mil_dev_tools'], + packages=['mil_ros_tools', 'mil_misc_tools', 'mil_tools'], ) setup(**setup_args) diff --git a/mil_dev_tools/test/math_helpers_test.py b/mil_tools/test/math_helpers_test.py similarity index 95% rename from mil_dev_tools/test/math_helpers_test.py rename to mil_tools/test/math_helpers_test.py index 5ba9027b..cbf79480 100644 --- a/mil_dev_tools/test/math_helpers_test.py +++ b/mil_tools/test/math_helpers_test.py @@ -2,8 +2,8 @@ import unittest import numpy as np from geometry_msgs.msg import Quaternion -from mil_dev_tools import quat_to_euler, euler_to_quat, normalize -from mil_dev_tools import compose_transformation +from mil_ros_tools import quat_to_euler, euler_to_quat, normalize +from mil_ros_tools import compose_transformation class TestROSTools(unittest.TestCase): diff --git a/mil_dev_tools/test/test_ros_tools.py b/mil_tools/test/test_ros_tools.py similarity index 94% rename from mil_dev_tools/test/test_ros_tools.py rename to mil_tools/test/test_ros_tools.py index 221d2dfb..6c4a518c 100644 --- a/mil_dev_tools/test/test_ros_tools.py +++ b/mil_tools/test/test_ros_tools.py @@ -3,10 +3,10 @@ import numpy as np from geometry_msgs.msg import Quaternion, Vector3, Pose2D from sensor_msgs.msg import Image -from mil_dev_tools.image_helpers import make_image_msg, get_image_msg -from mil_dev_tools.msg_helpers import rosmsg_to_numpy, make_wrench_stamped -from mil_dev_tools.threading_helpers import thread_lock -from mil_dev_tools.geometry_helpers import skew_symmetric_cross, make_rotation, normalize +from mil_ros_tools import make_image_msg, get_image_msg +from mil_ros_tools import rosmsg_to_numpy, make_wrench_stamped +from mil_ros_tools import thread_lock +from mil_ros_tools import skew_symmetric_cross, make_rotation, normalize class TestROSTools(unittest.TestCase): From 143e366e5e12aecc63b073f1fce5e5803eda719c Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Mon, 20 Mar 2017 20:45:11 -0400 Subject: [PATCH 227/267] REPO: add CATKIN_IGNORE to greatmerge temp dir --- .great_merge/CATKIN_IGNORE | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 .great_merge/CATKIN_IGNORE diff --git a/.great_merge/CATKIN_IGNORE b/.great_merge/CATKIN_IGNORE new file mode 100644 index 00000000..e69de29b From e417de141e07050ef7881ee70341dc69c34b0238 Mon Sep 17 00:00:00 2001 From: kingkevlar Date: Tue, 7 Mar 2017 00:21:33 -0500 Subject: [PATCH 228/267] UTILS: Add online_bagger --- .../online_bagger/CMakeLists.txt | 10 ++ .../config/topics_to_stream.yaml | 13 ++ .../online_bagger/launch/online_bagger.launch | 4 + .../online_bagger/nodes/online_bagger.py | 162 ++++++++++++++++++ utils/mil_ros_tools/online_bagger/package.xml | 13 ++ .../scripts/online_bagging_client.py | 20 +++ 6 files changed, 222 insertions(+) create mode 100644 utils/mil_ros_tools/online_bagger/CMakeLists.txt create mode 100644 utils/mil_ros_tools/online_bagger/config/topics_to_stream.yaml create mode 100644 utils/mil_ros_tools/online_bagger/launch/online_bagger.launch create mode 100755 utils/mil_ros_tools/online_bagger/nodes/online_bagger.py create mode 100644 utils/mil_ros_tools/online_bagger/package.xml create mode 100755 utils/mil_ros_tools/online_bagger/scripts/online_bagging_client.py diff --git a/utils/mil_ros_tools/online_bagger/CMakeLists.txt b/utils/mil_ros_tools/online_bagger/CMakeLists.txt new file mode 100644 index 00000000..9cfd4e99 --- /dev/null +++ b/utils/mil_ros_tools/online_bagger/CMakeLists.txt @@ -0,0 +1,10 @@ +cmake_minimum_required(VERSION 2.8.3) +project(mil_ros_tools) + +find_package(catkin REQUIRED COMPONENTS + rospy + std_srvs +) + +catkin_package() + diff --git a/utils/mil_ros_tools/online_bagger/config/topics_to_stream.yaml b/utils/mil_ros_tools/online_bagger/config/topics_to_stream.yaml new file mode 100644 index 00000000..697befb8 --- /dev/null +++ b/utils/mil_ros_tools/online_bagger/config/topics_to_stream.yaml @@ -0,0 +1,13 @@ +# list an arbitrary set of topics to subscribe to and stream: + # ram_limit is not yet implemented + +stream_time: 30 # seconds + # down_stream_rate: 10 Hz +ram_limit: 3 # gb +message_topics: ["/odom", + "/absodom", + "/stereo/left/camera_info", + "/stereo/left/image_raw", + "/stereo/right/camera_info", + "/stereo/right/image_raw", + "/velodyne_points"] diff --git a/utils/mil_ros_tools/online_bagger/launch/online_bagger.launch b/utils/mil_ros_tools/online_bagger/launch/online_bagger.launch new file mode 100644 index 00000000..76d6da6e --- /dev/null +++ b/utils/mil_ros_tools/online_bagger/launch/online_bagger.launch @@ -0,0 +1,4 @@ + + + + diff --git a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py new file mode 100755 index 00000000..6de84b72 --- /dev/null +++ b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python + +import rospy +import os +import rosbag +import rostopic +from collections import deque +import yaml +import rospkg +from std_srvs.srv import SetBool + +''' +To call this service call the '/online_bagger' service with the SetBool Parameter set to True. +Doing this will generate a rosbag with the last x number of seconds saved + +for example: + + bagging_server = rospy.ServiceProxy('/online_bagger', SetBool) + bag_status = bagging_server(True) +''' + +class OnlineBagger(object): + def __init__(self): + + ''' + - makes dictionary of dequeues + - subscribes to set of topics defined by the yaml file in directory + - have a streaming option that can be turned on and off in the callback + - option to bag data + - add a re subscribing option for anything that failed? maybe...? + ''' + + self.failed_topics = [] + self.n = 0 # number of iterations + self.streaming = True + self.dumping = False + # self.resubscribe + + self.get_params() + self.make_dicts() + self.subscribe() + + + self.bagging_service = rospy.Service('/online_bagger', SetBool, self.start_bagging) + + def get_params(self): + + self.topics = rospy.get_param('/online_bagger/message_topics') + self.ram_limit = rospy.get_param('/online_bagger/ram_limit') + self.stream_time = rospy.get_param('/online_bagger/stream_time') + + rospy.loginfo('stream_time: %s', self.stream_time) + # rospy.loginfo('ram_limit: %s', self.ram_limit) + + def make_dicts(self): + ''' + make dictionaries with deques() that will be filled with topics + with all of the 5topics defined by the yaml + ''' + + self.topic_list = {} + + for topic in self.topics: + self.topic_list[topic] = deque() + + rospy.loginfo('topics: %s', self.topic_list.keys()) + def subscribe(self): + ''' + Immediately initiated with instance of the BagStream class: + subscribes to the set of topics defined in the yaml configuration file + ''' + # use failed topics list that can be resubscribed to at a later time? + + for topic in self.topic_list.keys(): + msg_class = rostopic.get_topic_class(topic) + if msg_class[1] is None: + self.failed_topics.append(topic) + else: + rospy.Subscriber(topic, msg_class[0], lambda msg, _topic=topic: self.callback(msg, _topic)) + + rospy.loginfo('failed topics: %s', self.failed_topics) + + def get_oldest_topic_time(self, topic): + ''' + returns oldest timestamp for a given topic + ''' + return self.topic_list[topic][0][0] + + def get_newest_topic_time(self, topic): + ''' + returns newest timestamp for a given topic + ''' + return self.topic_list[topic][-1][0] + + + def callback(self, msg, topic): + ''' + streaming callback function, stops streaming during bagging process + also pops off msgs from dequeue if length of stream is longer than + stream time specified in yaml parameters + + stores all data in a dictionary. + the keys are topic names + the values are dequeues of tuples + each tuple is a time stamp of rostime and the message for that topic + + if any errors are occuring you can uncomment the lines with + if self.n % 50 == 0 to display every 50th message, time diff and topic + this will not give every 50th message on each topic, it provides every + 50th message accross all topics. + + stream, callback function does nothing if streaming is not active + ''' + + if not(self.streaming): + return + + self.n = self.n + 1 + self.topic_list[topic].append((rospy.get_rostime(), msg)) + + time_diff = self.get_newest_topic_time(topic) - self.get_oldest_topic_time(topic) + + # verify streaming is popping off and recording topics + # if self.n % 50 == 0: + # rospy.loginfo('time_diff: %s', time_diff.to_sec()) + # rospy.loginfo('topic: %s', topic) + + if time_diff > rospy.Duration(self.stream_time): + self.topic_list[topic].popleft() + else: + pass + + def start_bagging(self, req): + ''' + dumps all data in dictionary to bags, temporarily stops streaming + during the bagging process, resumes streaming when over + ''' + self.dumping = req.data + self.streaming = False + bag = rosbag.Bag('ooka.bag', 'w') + + rospy.loginfo('dumping value: %s', self.dumping) + rospy.loginfo('bagging commencing!') + + for topic in self.topic_list.keys(): + rospy.loginfo('topic: %s', topic) + for msgs in self.topic_list[topic]: + + bag.write(topic, msgs[1], t=msgs[0]) + # rospy.loginfo('topic: %s, message type: %s, time: %s', topic, type(msgs[1]), type(msgs[0])) + + bag.close() + rospy.loginfo('bagging finished!') + self.streaming = True + + message = 'bagging finished' + return (True, message) + +if __name__ == "__main__": + rospy.init_node('online_bagger') + stream = OnlineBagger() + rospy.spin() diff --git a/utils/mil_ros_tools/online_bagger/package.xml b/utils/mil_ros_tools/online_bagger/package.xml new file mode 100644 index 00000000..93456a41 --- /dev/null +++ b/utils/mil_ros_tools/online_bagger/package.xml @@ -0,0 +1,13 @@ + + + mil_ros_tools + 1.0.0 + The mil_ros_tools package + Israelle Widjaja + MIT + + catkin + rospy + rospy + + diff --git a/utils/mil_ros_tools/online_bagger/scripts/online_bagging_client.py b/utils/mil_ros_tools/online_bagger/scripts/online_bagging_client.py new file mode 100755 index 00000000..8c80d270 --- /dev/null +++ b/utils/mil_ros_tools/online_bagger/scripts/online_bagging_client.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python + +import rospy +from std_srvs.srv import SetBool + +''' +Bags the current stream of topics defined in the +online_bagger using the service '/bagging server' +will return a success message along with the list of topics that were bagged +''' +def online_bagging_client(): + rospy.wait_for_service('/online_bagger') + try: + bagging_server = rospy.ServiceProxy('/online_bagger', SetBool) + bag_status = bagging_server(True) + except rospy.ServiceException, e: + print "/online_bagger service failed: %s" % e + + +online_bagging_client() From c5d1f96695125618786825cd4f0a36622897ad78 Mon Sep 17 00:00:00 2001 From: kingkevlar Date: Tue, 7 Mar 2017 18:17:02 -0500 Subject: [PATCH 229/267] ONLINE_BAGGER: Add resubscribe on failed topics to callback --- .../config/topics_to_stream.yaml | 2 +- .../online_bagger/nodes/online_bagger.py | 74 +++++++++++++------ .../scripts/online_bagging_client.py | 4 +- 3 files changed, 53 insertions(+), 27 deletions(-) diff --git a/utils/mil_ros_tools/online_bagger/config/topics_to_stream.yaml b/utils/mil_ros_tools/online_bagger/config/topics_to_stream.yaml index 697befb8..5b154e5d 100644 --- a/utils/mil_ros_tools/online_bagger/config/topics_to_stream.yaml +++ b/utils/mil_ros_tools/online_bagger/config/topics_to_stream.yaml @@ -3,7 +3,7 @@ stream_time: 30 # seconds # down_stream_rate: 10 Hz -ram_limit: 3 # gb +ram_limit: 0.5 # gb message_topics: ["/odom", "/absodom", "/stereo/left/camera_info", diff --git a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py index 6de84b72..423cbfea 100755 --- a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py +++ b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +import resource import rospy import os import rosbag @@ -19,9 +20,9 @@ bag_status = bagging_server(True) ''' + class OnlineBagger(object): def __init__(self): - ''' - makes dictionary of dequeues - subscribes to set of topics defined by the yaml file in directory @@ -29,8 +30,7 @@ def __init__(self): - option to bag data - add a re subscribing option for anything that failed? maybe...? ''' - - self.failed_topics = [] + self.n = 0 # number of iterations self.streaming = True self.dumping = False @@ -39,18 +39,26 @@ def __init__(self): self.get_params() self.make_dicts() self.subscribe() + rospy.loginfo('subscriber success: %s', self.subscriber_list) - self.bagging_service = rospy.Service('/online_bagger', SetBool, self.start_bagging) + self.bagging_service = rospy.Service('/online_bagger/dump', SetBool, self.start_bagging) def get_params(self): self.topics = rospy.get_param('/online_bagger/message_topics') - self.ram_limit = rospy.get_param('/online_bagger/ram_limit') + self.ram_limit = rospy.get_param('/online_bagger/ram_limit')*1.073e9 self.stream_time = rospy.get_param('/online_bagger/stream_time') - rospy.loginfo('stream_time: %s', self.stream_time) - # rospy.loginfo('ram_limit: %s', self.ram_limit) + rospy.loginfo('stream_time: %s seconds', self.stream_time) + rospy.loginfo('ram_limit: %s gb', self.ram_limit) + + # def set_ram_limit(self) + + # # http://stackoverflow.com/questions/30269238/limit-memory-usage + # # _, hard = resource.getrlimit(resource.RLIMIT_DATA) + # resource.setrlimit(resource.RLIMIT_DATA, (self.ram_limit, hard)) + def make_dicts(self): ''' @@ -59,26 +67,37 @@ def make_dicts(self): ''' self.topic_list = {} - + self.subscriber_list = [] for topic in self.topics: self.topic_list[topic] = deque() + self.subscriber_list.append([topic, False]) + + rospy.loginfo('failed topics: %s', self.subscriber_list) - rospy.loginfo('topics: %s', self.topic_list.keys()) def subscribe(self): ''' Immediately initiated with instance of the BagStream class: subscribes to the set of topics defined in the yaml configuration file + + Function checks subscription status True/False of each topic + if True: topic has already been sucessfully subscribed to + if False: topic still needs to be subscribed to and + subscriber will be run + + Each element in self.subscriber list is a list [topic, Bool] + where the Bool tells the current status of the subscriber (sucess/failure) ''' - # use failed topics list that can be resubscribed to at a later time? - for topic in self.topic_list.keys(): - msg_class = rostopic.get_topic_class(topic) - if msg_class[1] is None: - self.failed_topics.append(topic) + for topic in self.subscriber_list: + if topic[1] is True: + pass else: - rospy.Subscriber(topic, msg_class[0], lambda msg, _topic=topic: self.callback(msg, _topic)) - - rospy.loginfo('failed topics: %s', self.failed_topics) + msg_class = rostopic.get_topic_class(topic[0]) + if msg_class[1] is None: + pass + else: + rospy.Subscriber(topic[0], msg_class[0], lambda msg, _topic=topic[0]: self.callback(msg, _topic)) + topic[1] = True # successful subscription def get_oldest_topic_time(self, topic): ''' @@ -92,7 +111,6 @@ def get_newest_topic_time(self, topic): ''' return self.topic_list[topic][-1][0] - def callback(self, msg, topic): ''' streaming callback function, stops streaming during bagging process @@ -108,8 +126,10 @@ def callback(self, msg, topic): if self.n % 50 == 0 to display every 50th message, time diff and topic this will not give every 50th message on each topic, it provides every 50th message accross all topics. - + stream, callback function does nothing if streaming is not active + + Continually resubscribes to any failed topics. ''' if not(self.streaming): @@ -121,15 +141,19 @@ def callback(self, msg, topic): time_diff = self.get_newest_topic_time(topic) - self.get_oldest_topic_time(topic) # verify streaming is popping off and recording topics - # if self.n % 50 == 0: - # rospy.loginfo('time_diff: %s', time_diff.to_sec()) - # rospy.loginfo('topic: %s', topic) + if self.n % 50 == 0: + rospy.loginfo('time_diff: %s', time_diff.to_sec()) + rospy.loginfo('topic: %s', topic) if time_diff > rospy.Duration(self.stream_time): self.topic_list[topic].popleft() else: pass + self.subscribe() + + + def start_bagging(self, req): ''' dumps all data in dictionary to bags, temporarily stops streaming @@ -144,10 +168,12 @@ def start_bagging(self, req): for topic in self.topic_list.keys(): rospy.loginfo('topic: %s', topic) + self.m = 0 # message number in a given topic for msgs in self.topic_list[topic]: - + self.m = self.m + 1 bag.write(topic, msgs[1], t=msgs[0]) - # rospy.loginfo('topic: %s, message type: %s, time: %s', topic, type(msgs[1]), type(msgs[0])) + if self.m % 50 == 0: # print every 50th topic, type and time + rospy.loginfo('topic: %s, message type: %s, time: %s', topic, type(msgs[1]), type(msgs[0])) bag.close() rospy.loginfo('bagging finished!') diff --git a/utils/mil_ros_tools/online_bagger/scripts/online_bagging_client.py b/utils/mil_ros_tools/online_bagger/scripts/online_bagging_client.py index 8c80d270..5974cdb0 100755 --- a/utils/mil_ros_tools/online_bagger/scripts/online_bagging_client.py +++ b/utils/mil_ros_tools/online_bagger/scripts/online_bagging_client.py @@ -9,9 +9,9 @@ will return a success message along with the list of topics that were bagged ''' def online_bagging_client(): - rospy.wait_for_service('/online_bagger') + rospy.wait_for_service('/online_bagger/dump') try: - bagging_server = rospy.ServiceProxy('/online_bagger', SetBool) + bagging_server = rospy.ServiceProxy('/online_bagger/dump', SetBool) bag_status = bagging_server(True) except rospy.ServiceException, e: print "/online_bagger service failed: %s" % e From 9aed2d2eb44a7b1e616406833df0bd016ccab3cb Mon Sep 17 00:00:00 2001 From: kingkevlar Date: Tue, 21 Mar 2017 03:26:12 -0400 Subject: [PATCH 230/267] ONLINE BAGGER: Add ram limit --- .../online_bagger/nodes/online_bagger.py | 241 ++++++++++++------ 1 file changed, 170 insertions(+), 71 deletions(-) diff --git a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py index 423cbfea..ecf1ce87 100755 --- a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py +++ b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py @@ -1,16 +1,19 @@ #!/usr/bin/env python -import resource +from __future__ import division import rospy -import os import rosbag import rostopic -from collections import deque -import yaml -import rospkg from std_srvs.srv import SetBool +import os +import rospkg +import time +import math +import resource +from collections import deque +import itertools -''' +""" To call this service call the '/online_bagger' service with the SetBool Parameter set to True. Doing this will generate a rosbag with the last x number of seconds saved @@ -18,101 +21,134 @@ bagging_server = rospy.ServiceProxy('/online_bagger', SetBool) bag_status = bagging_server(True) -''' +""" class OnlineBagger(object): + def __init__(self): - ''' - - makes dictionary of dequeues - - subscribes to set of topics defined by the yaml file in directory - - have a streaming option that can be turned on and off in the callback - - option to bag data - - add a re subscribing option for anything that failed? maybe...? - ''' + """ + - Make dictionary of dequeues. + - Subscribe to set of topics defined by the yaml file in directory + - Have a streaming option that can be turned on and off in the callback + - Bag last x seconds of data with client + """ self.n = 0 # number of iterations self.streaming = True self.dumping = False - # self.resubscribe self.get_params() self.make_dicts() self.subscribe() rospy.loginfo('subscriber success: %s', self.subscriber_list) - - self.bagging_service = rospy.Service('/online_bagger/dump', SetBool, self.start_bagging) + self.bagging_service = rospy.Service('/online_bagger/dump', SetBool, + self.start_bagging) def get_params(self): + """Retrieve parameters from param server.""" self.topics = rospy.get_param('/online_bagger/message_topics') - self.ram_limit = rospy.get_param('/online_bagger/ram_limit')*1.073e9 + self.ram_limit = rospy.get_param('/online_bagger/ram_limit') * 1.073e9 # gigabytes to bytes self.stream_time = rospy.get_param('/online_bagger/stream_time') + self.dir = rospy.get_param('/online_bagger/bag_package_path') rospy.loginfo('stream_time: %s seconds', self.stream_time) - rospy.loginfo('ram_limit: %s gb', self.ram_limit) - - # def set_ram_limit(self) - - # # http://stackoverflow.com/questions/30269238/limit-memory-usage - # # _, hard = resource.getrlimit(resource.RLIMIT_DATA) - # resource.setrlimit(resource.RLIMIT_DATA, (self.ram_limit, hard)) - + rospy.loginfo('ram_limit: %f gb', rospy.get_param('/online_bagger/ram_limit')) def make_dicts(self): - ''' + """ make dictionaries with deques() that will be filled with topics - with all of the 5topics defined by the yaml - ''' + with all of the topics defined by the yaml. + """ + class sliceable_deque(deque): + def __getitem__(self, index): + if isinstance(index, slice): + return type(self)(itertools.islice(self, index.start, + index.stop, index.step)) + return deque.__getitem__(self, index) + + + self.topic_messages = {} + self.subscriber_list = [] - self.topic_list = {} - self.subscriber_list = [] for topic in self.topics: - self.topic_list[topic] = deque() + self.topic_messages[topic] = sliceable_deque(deque()) self.subscriber_list.append([topic, False]) rospy.loginfo('failed topics: %s', self.subscriber_list) def subscribe(self): - ''' + """ Immediately initiated with instance of the BagStream class: subscribes to the set of topics defined in the yaml configuration file Function checks subscription status True/False of each topic if True: topic has already been sucessfully subscribed to - if False: topic still needs to be subscribed to and - subscriber will be run + if False: topic still needs to be subscribed to and + subscriber will be run. Each element in self.subscriber list is a list [topic, Bool] - where the Bool tells the current status of the subscriber (sucess/failure) - ''' + where the Bool tells the current status of the subscriber (sucess/failure). + """ for topic in self.subscriber_list: - if topic[1] is True: + if topic[1]: pass else: msg_class = rostopic.get_topic_class(topic[0]) - if msg_class[1] is None: + if msg_class[1] == None: pass else: - rospy.Subscriber(topic[0], msg_class[0], lambda msg, _topic=topic[0]: self.callback(msg, _topic)) - topic[1] = True # successful subscription + rospy.Subscriber(topic[0], msg_class[0], + lambda msg, _topic=topic[0]: self.bagger_callback(msg, _topic)) + + topic[1] = True # successful subscription def get_oldest_topic_time(self, topic): - ''' - returns oldest timestamp for a given topic - ''' - return self.topic_list[topic][0][0] + """ + Returns oldest timestamp for a given topic as a rospy.Time type + """ + return self.topic_messages[topic][0][0] def get_newest_topic_time(self, topic): - ''' - returns newest timestamp for a given topic - ''' - return self.topic_list[topic][-1][0] + """ + Returns newest timestamp for a given topic as a rospy.Time type + """ + return self.topic_messages[topic][-1][0] + + def get_topic_duration(self, topic): + """ + Returns current time duration of topic + """ + + return self.get_newest_topic_time(topic) - self.get_oldest_topic_time(topic) + + def get_header_time(self, msg): + if hasattr(msg, 'header'): + return msg.header.stamp + else: + return rospy.get_rostime() + + def get_ram_usage(self): + """ + return current ram usage + """ + self.usage = resource.getrusage(resource.RUSAGE_SELF) + self.ram_usage = getattr(self.usage, 'ru_maxrss') + - def callback(self, msg, topic): - ''' + return self.ram_usage*1e3 + + def set_ram_limit(self, topic): + ''' Limit ram usage by removing oldest messages ''' + + if self.get_ram_usage() > self.ram_limit: + self.topic_messages[topic].popleft() + + def bagger_callback(self, msg, topic): + """ streaming callback function, stops streaming during bagging process also pops off msgs from dequeue if length of stream is longer than stream time specified in yaml parameters @@ -130,51 +166,114 @@ def callback(self, msg, topic): stream, callback function does nothing if streaming is not active Continually resubscribes to any failed topics. - ''' + """ + if not(self.streaming): return self.n = self.n + 1 - self.topic_list[topic].append((rospy.get_rostime(), msg)) + time = self.get_header_time(msg) - time_diff = self.get_newest_topic_time(topic) - self.get_oldest_topic_time(topic) + self.topic_messages[topic].append((time, msg)) # verify streaming is popping off and recording topics - if self.n % 50 == 0: - rospy.loginfo('time_diff: %s', time_diff.to_sec()) - rospy.loginfo('topic: %s', topic) + # if not self.n % 100: + # rospy.loginfo('time_diff: %s', time_diff.to_sec()) + # rospy.loginfo('topic: %s', topic) + # rospy.loginfo('topic type: %s', type(msg)) + # rospy.loginfo('ram size: %s', self.ram_usage/1e3) + # rospy.loginfo('number of messages: %s', self.get_topic_message_count(topic)) - if time_diff > rospy.Duration(self.stream_time): - self.topic_list[topic].popleft() - else: - pass + self.set_ram_limit(topic) self.subscribe() + def get_subscribed_topics(self): + + return 'ooka' + + def get_topic_message_count(self, topic): + """ + Return number of messages available in a topic + """ + return len(self.topic_messages[topic]) + + def get_all_message_count(self): + # returns total number of messages across all topics + message_count = 0 + for topic in self.topic_messages.keys(): + message_count = message_count + get_topic_message_count(topic) + return message_count + + def set_bag_directory(self): + """ + create ros bag save directory + """ + rospack = rospkg.RosPack() + self.directory = os.path.join(rospack.get_path(self.dir), 'bags/') + rospy.loginfo('bag directory: %s', self.directory) + if not os.path.exists(self.directory): + os.makedirs(self.directory) + + def get_time_index(self, topic, requested_seconds): + """ + Returns the index for the time index for a topic at 'n' seconds from the end of the dequeue + + For example, to bag the last 10 seconds of data, the index for 10 seconds back from the most + recent message can be obtained with this function + + If the desired time length of the bag is greater than the available messages it will output a + message and return how ever many seconds of data are avaiable at the moment. + + seconds is of a number type (not a rospy.Time type) (ie. int, float) + """ + + message_count = self.get_topic_message_count(topic) + + topic_duration = self.get_topic_duration(topic).to_sec() + + ratio = requested_seconds / topic_duration + + index = math.floor(message_count * (1 - min(ratio, 1))) + + self.bag_report = ('The requested %s seconds were bagged', requested_seconds) + + if not index: + self.bag_report = ('Only %s seconds of the request %s seconds were available, all messages were bagged', topic_duration, requested_seconds) + return int(index) + def start_bagging(self, req): - ''' - dumps all data in dictionary to bags, temporarily stops streaming - during the bagging process, resumes streaming when over - ''' + """ + Dump all data in dictionary to bags, temporarily stops streaming + during the bagging process, resumes streaming when over. + """ self.dumping = req.data self.streaming = False - bag = rosbag.Bag('ooka.bag', 'w') + + self.set_bag_directory() + + bag = rosbag.Bag(os.path.join(self.directory, str(int(time.time())) + '.bag'), 'w') rospy.loginfo('dumping value: %s', self.dumping) rospy.loginfo('bagging commencing!') - for topic in self.topic_list.keys(): + self.requested_seconds = int(10) + + for topic in self.topic_messages.keys(): rospy.loginfo('topic: %s', topic) - self.m = 0 # message number in a given topic - for msgs in self.topic_list[topic]: + self.i = self.get_time_index(topic, self.requested_seconds) + self.m = 0 # message number in a given topic + for msgs in self.topic_messages[topic][self.i:]: self.m = self.m + 1 bag.write(topic, msgs[1], t=msgs[0]) - if self.m % 50 == 0: # print every 50th topic, type and time - rospy.loginfo('topic: %s, message type: %s, time: %s', topic, type(msgs[1]), type(msgs[0])) + if self.m % 100 == 0: # print every 100th topic, type and time + rospy.loginfo('topic: %s, message type: %s, time: %s', + topic, type(msgs[1]), type(msgs[0])) + rospy.loginfo('Bag Report: %s', self.bag_report) bag.close() rospy.loginfo('bagging finished!') self.streaming = True From 18b0605ea86f475b21815fb0a47461b27a35460b Mon Sep 17 00:00:00 2001 From: kingkevlar Date: Tue, 21 Mar 2017 08:55:51 -0400 Subject: [PATCH 231/267] ONLINE_BAGGER: Enforce PEP8 --- .../online_bagger/nodes/online_bagger.py | 184 ++++++++++++------ 1 file changed, 124 insertions(+), 60 deletions(-) diff --git a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py index ecf1ce87..ab3bf948 100755 --- a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py +++ b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -from __future__ import division +from __future__ import division import rospy import rosbag import rostopic @@ -12,10 +12,12 @@ import resource from collections import deque import itertools +import datetime """ -To call this service call the '/online_bagger' service with the SetBool Parameter set to True. -Doing this will generate a rosbag with the last x number of seconds saved +To call call service call the '/online_bagger' service with the SetBool +Parameter set to True. Doing this will generate a rosbag with the +last x number of seconds saved for example: @@ -28,10 +30,10 @@ class OnlineBagger(object): def __init__(self): """ - - Make dictionary of dequeues. - - Subscribe to set of topics defined by the yaml file in directory - - Have a streaming option that can be turned on and off in the callback - - Bag last x seconds of data with client + Make dictionary of dequeues. + Subscribe to set of topics defined by the yaml file in directory + Stream topics up to a given ram limit, dump oldest messages when limit is reached + Set up service to bag n seconds of data default to all of available data """ self.n = 0 # number of iterations @@ -43,25 +45,56 @@ def __init__(self): self.subscribe() rospy.loginfo('subscriber success: %s', self.subscriber_list) - self.bagging_service = rospy.Service('/online_bagger/dump', SetBool, - self.start_bagging) + self.bagging_service = rospy.Service('/online_bagger/dump', SetBool, + self.start_bagging) def get_params(self): - """Retrieve parameters from param server.""" + + """ + Retrieve parameters from param server. + """ self.topics = rospy.get_param('/online_bagger/message_topics') self.ram_limit = rospy.get_param('/online_bagger/ram_limit') * 1.073e9 # gigabytes to bytes - self.stream_time = rospy.get_param('/online_bagger/stream_time') + # self.stream_time = rospy.get_param('/online_bagger/stream_time') self.dir = rospy.get_param('/online_bagger/bag_package_path') + self.bag_name = rospy.get_param('/online_bagger/bag_name') + + if not self.bag_name: + self.bag_name = str(datetime.date.today()) + '-' + str(datetime.datetime.now().time())[0:8] - rospy.loginfo('stream_time: %s seconds', self.stream_time) + # rospy.loginfo('stream_time: %s seconds', self.stream_time) rospy.loginfo('ram_limit: %f gb', rospy.get_param('/online_bagger/ram_limit')) def make_dicts(self): + """ - make dictionaries with deques() that will be filled with topics - with all of the topics defined by the yaml. + Make dictionary with sliceable deques() that will be filled with messages and time stamps. + + Subscriber list contains all of the topics and their subscription status: + A True status for a given topic corresponds to a successful subscription + A False status indicates a failed subscription + + For Example: + self.subscriber_list[0] = [['/odom', False], ['/absodom', True]] + + Indicates that '/odom' has not been subscribed to, but '/absodom' has been subscribed to + + self.topic_messages is a dictionary of deques containing a list of tuples. + Dictionary Keys contain topic names + Each value for each topic contains a deque + Each deque contains a list of tuples + Each tuple contains a message and its associated time stamp + + For example: + '/odom' is a potential topic name + self.topic_message['/odom'] is a deque + self.topic_message['/odom'][0] is the oldest message available in the deque + and its time stamp if available """ + + # Courtesy Kindall + # http://stackoverflow.com/questions/10003143/how-to-slice-a-deque class sliceable_deque(deque): def __getitem__(self, index): if isinstance(index, slice): @@ -69,9 +102,9 @@ def __getitem__(self, index): index.stop, index.step)) return deque.__getitem__(self, index) - self.topic_messages = {} - self.subscriber_list = [] + self.subscriber_list = [] + for topic in self.topics: self.topic_messages[topic] = sliceable_deque(deque()) @@ -80,9 +113,9 @@ def __getitem__(self, index): rospy.loginfo('failed topics: %s', self.subscriber_list) def subscribe(self): + """ - Immediately initiated with instance of the BagStream class: - subscribes to the set of topics defined in the yaml configuration file + Subscribe to the topics defined in the yaml configuration file Function checks subscription status True/False of each topic if True: topic has already been sucessfully subscribed to @@ -94,55 +127,69 @@ def subscribe(self): """ for topic in self.subscriber_list: - if topic[1]: - pass - else: + if not topic[1]: msg_class = rostopic.get_topic_class(topic[0]) - if msg_class[1] == None: + if msg_class[1] is None: pass else: - rospy.Subscriber(topic[0], msg_class[0], - lambda msg, _topic=topic[0]: self.bagger_callback(msg, _topic)) + rospy.Subscriber(topic[0], msg_class[0], + lambda msg, _topic=topic[0]: self.bagger_callback(msg, _topic)) topic[1] = True # successful subscription def get_oldest_topic_time(self, topic): + """ - Returns oldest timestamp for a given topic as a rospy.Time type + Return oldest timestamp for a given topic as a rospy.Time type """ + return self.topic_messages[topic][0][0] def get_newest_topic_time(self, topic): + """ - Returns newest timestamp for a given topic as a rospy.Time type + Return newest timestamp for a given topic as a rospy.Time type """ + return self.topic_messages[topic][-1][0] def get_topic_duration(self, topic): + """ - Returns current time duration of topic + Return current time duration of topic """ return self.get_newest_topic_time(topic) - self.get_oldest_topic_time(topic) def get_header_time(self, msg): + + """ + Retrieve header time if available + """ + if hasattr(msg, 'header'): return msg.header.stamp else: return rospy.get_rostime() - + def get_ram_usage(self): + """ - return current ram usage + Return current ram usage in bytes """ + self.usage = resource.getrusage(resource.RUSAGE_SELF) - self.ram_usage = getattr(self.usage, 'ru_maxrss') + # getattr returns ram usage in kilobytes + self.ram_usage = getattr(self.usage, 'ru_maxrss') return self.ram_usage*1e3 def set_ram_limit(self, topic): - ''' Limit ram usage by removing oldest messages ''' + + """ + Limit ram usage by removing oldest messages + """ if self.get_ram_usage() > self.ram_limit: self.topic_messages[topic].popleft() @@ -168,8 +215,7 @@ def bagger_callback(self, msg, topic): Continually resubscribes to any failed topics. """ - - if not(self.streaming): + if not self.streaming: return self.n = self.n + 1 @@ -177,74 +223,94 @@ def bagger_callback(self, msg, topic): self.topic_messages[topic].append((time, msg)) + self.set_ram_limit(topic) + # verify streaming is popping off and recording topics - # if not self.n % 100: + if not self.n % 100: # rospy.loginfo('time_diff: %s', time_diff.to_sec()) # rospy.loginfo('topic: %s', topic) # rospy.loginfo('topic type: %s', type(msg)) - # rospy.loginfo('ram size: %s', self.ram_usage/1e3) + rospy.loginfo('ram size: %s', self.ram_usage/1e3) # rospy.loginfo('number of messages: %s', self.get_topic_message_count(topic)) - self.set_ram_limit(topic) - self.subscribe() + # self.subscribe() # loop def get_subscribed_topics(self): - return 'ooka' + """ + Return subscribed and failed topics + """ + + return self.subscriber_list + + def get_number_subscribed_topics(self): + pass def get_topic_message_count(self, topic): + """ Return number of messages available in a topic """ + return len(self.topic_messages[topic]) def get_all_message_count(self): - # returns total number of messages across all topics + + """ + Returns total number of messages across all topics + """ + message_count = 0 for topic in self.topic_messages.keys(): message_count = message_count + get_topic_message_count(topic) return message_count def set_bag_directory(self): + """ - create ros bag save directory + Create ros bag save directory """ + rospack = rospkg.RosPack() self.directory = os.path.join(rospack.get_path(self.dir), 'bags/') rospy.loginfo('bag directory: %s', self.directory) + rospy.loginfo('bag name: %s', self.bag_name) if not os.path.exists(self.directory): os.makedirs(self.directory) + self.bag = rosbag.Bag(os.path.join(self.directory, self.bag_name + '.bag'), 'w') + def get_time_index(self, topic, requested_seconds): """ - Returns the index for the time index for a topic at 'n' seconds from the end of the dequeue - + Return the index for the time index for a topic at 'n' seconds from the end of the dequeue + For example, to bag the last 10 seconds of data, the index for 10 seconds back from the most - recent message can be obtained with this function - - If the desired time length of the bag is greater than the available messages it will output a + recent message can be obtained with this function. + + The number of requested seconds should be the number of seoncds desired from + the end of deque. (ie. requested_seconds = 10 ) + + If the desired time length of the bag is greater than the available messages it will output a message and return how ever many seconds of data are avaiable at the moment. - seconds is of a number type (not a rospy.Time type) (ie. int, float) + Seconds is of a number type (not a rospy.Time type) (ie. int, float) """ - message_count = self.get_topic_message_count(topic) - topic_duration = self.get_topic_duration(topic).to_sec() ratio = requested_seconds / topic_duration - - index = math.floor(message_count * (1 - min(ratio, 1))) + index = math.floor(self.get_topic_message_count(topic) * (1 - min(ratio, 1))) - self.bag_report = ('The requested %s seconds were bagged', requested_seconds) + self.bag_report = 'The requested %s seconds were bagged' % requested_seconds if not index: - self.bag_report = ('Only %s seconds of the request %s seconds were available, all messages were bagged', topic_duration, requested_seconds) + self.bag_report = 'Only %s seconds of the request %s seconds were available, all \ + messages were bagged' % (topic_duration, requested_seconds) return int(index) - + def start_bagging(self, req): """ Dump all data in dictionary to bags, temporarily stops streaming @@ -255,8 +321,6 @@ def start_bagging(self, req): self.set_bag_directory() - bag = rosbag.Bag(os.path.join(self.directory, str(int(time.time())) + '.bag'), 'w') - rospy.loginfo('dumping value: %s', self.dumping) rospy.loginfo('bagging commencing!') @@ -268,13 +332,13 @@ def start_bagging(self, req): self.m = 0 # message number in a given topic for msgs in self.topic_messages[topic][self.i:]: self.m = self.m + 1 - bag.write(topic, msgs[1], t=msgs[0]) - if self.m % 100 == 0: # print every 100th topic, type and time - rospy.loginfo('topic: %s, message type: %s, time: %s', - topic, type(msgs[1]), type(msgs[0])) + self.bag.write(topic, msgs[1], t=msgs[0]) + if not self.m % 100: # print every 100th topic, type and time + rospy.loginfo('topic: %s, message type: %s, time: %s', + topic, type(msgs[1]), type(msgs[0])) rospy.loginfo('Bag Report: %s', self.bag_report) - bag.close() + self.bag.close() rospy.loginfo('bagging finished!') self.streaming = True From dd3237f20d2b726d1ff70e7d0209c9f350a75bf4 Mon Sep 17 00:00:00 2001 From: kingkevlar Date: Tue, 21 Mar 2017 18:09:53 -0400 Subject: [PATCH 232/267] ONLINE_BAGGER: Add custom service BaggerCommands.srv --- .../online_bagger/CMakeLists.txt | 9 +- .../config/online_bagger_example.yaml | 16 ++ .../config/topics_to_stream.yaml | 13 -- .../online_bagger/launch/online_bagger.launch | 4 - .../launch/online_bagger_example.launch | 4 + .../online_bagger/nodes/online_bagger.py | 153 ++++++++++++------ .../scripts/online_bagger_client_example.py | 38 +++++ .../scripts/online_bagging_client.py | 20 --- .../online_bagger/srv/BaggerCommands.srv | 4 + 9 files changed, 171 insertions(+), 90 deletions(-) create mode 100644 utils/mil_ros_tools/online_bagger/config/online_bagger_example.yaml delete mode 100644 utils/mil_ros_tools/online_bagger/config/topics_to_stream.yaml delete mode 100644 utils/mil_ros_tools/online_bagger/launch/online_bagger.launch create mode 100644 utils/mil_ros_tools/online_bagger/launch/online_bagger_example.launch create mode 100755 utils/mil_ros_tools/online_bagger/scripts/online_bagger_client_example.py delete mode 100755 utils/mil_ros_tools/online_bagger/scripts/online_bagging_client.py create mode 100644 utils/mil_ros_tools/online_bagger/srv/BaggerCommands.srv diff --git a/utils/mil_ros_tools/online_bagger/CMakeLists.txt b/utils/mil_ros_tools/online_bagger/CMakeLists.txt index 9cfd4e99..782eb679 100644 --- a/utils/mil_ros_tools/online_bagger/CMakeLists.txt +++ b/utils/mil_ros_tools/online_bagger/CMakeLists.txt @@ -3,8 +3,15 @@ project(mil_ros_tools) find_package(catkin REQUIRED COMPONENTS rospy - std_srvs + message_generation + +) +add_service_files( + DIRECTORY srv + FILES + BaggerCommands.srv ) +generate_messages() catkin_package() diff --git a/utils/mil_ros_tools/online_bagger/config/online_bagger_example.yaml b/utils/mil_ros_tools/online_bagger/config/online_bagger_example.yaml new file mode 100644 index 00000000..ce268b78 --- /dev/null +++ b/utils/mil_ros_tools/online_bagger/config/online_bagger_example.yaml @@ -0,0 +1,16 @@ +# list an arbitrary set of topics to subscribe to and stream: + + # down_stream_rate: 10 Hz +ram_limit: 3.0 # gb +topics: ["/odom", + "/absodom", + "/stereo/left/camera_info", + "/stereo/left/image_raw", + "/stereo/right/camera_info", + "/stereo/right/image_raw", + "/velodyne_points"] + +bag_package_path: 'mil_ros_tools' + +# leave bag name empty if default +bag_name: '' diff --git a/utils/mil_ros_tools/online_bagger/config/topics_to_stream.yaml b/utils/mil_ros_tools/online_bagger/config/topics_to_stream.yaml deleted file mode 100644 index 5b154e5d..00000000 --- a/utils/mil_ros_tools/online_bagger/config/topics_to_stream.yaml +++ /dev/null @@ -1,13 +0,0 @@ -# list an arbitrary set of topics to subscribe to and stream: - # ram_limit is not yet implemented - -stream_time: 30 # seconds - # down_stream_rate: 10 Hz -ram_limit: 0.5 # gb -message_topics: ["/odom", - "/absodom", - "/stereo/left/camera_info", - "/stereo/left/image_raw", - "/stereo/right/camera_info", - "/stereo/right/image_raw", - "/velodyne_points"] diff --git a/utils/mil_ros_tools/online_bagger/launch/online_bagger.launch b/utils/mil_ros_tools/online_bagger/launch/online_bagger.launch deleted file mode 100644 index 76d6da6e..00000000 --- a/utils/mil_ros_tools/online_bagger/launch/online_bagger.launch +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/utils/mil_ros_tools/online_bagger/launch/online_bagger_example.launch b/utils/mil_ros_tools/online_bagger/launch/online_bagger_example.launch new file mode 100644 index 00000000..e693f616 --- /dev/null +++ b/utils/mil_ros_tools/online_bagger/launch/online_bagger_example.launch @@ -0,0 +1,4 @@ + + + + diff --git a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py index ab3bf948..87bdba21 100755 --- a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py +++ b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py @@ -4,7 +4,6 @@ import rospy import rosbag import rostopic -from std_srvs.srv import SetBool import os import rospkg import time @@ -13,18 +12,21 @@ from collections import deque import itertools import datetime +from mil_ros_tools.srv import BaggerCommands """ -To call call service call the '/online_bagger' service with the SetBool -Parameter set to True. Doing this will generate a rosbag with the +To call call service call the '/online_bagger/dump' service with the BaggerCommands.srv +Amount and Unit. Doing this will generate a rosbag with the last x number of seconds saved -for example: - - bagging_server = rospy.ServiceProxy('/online_bagger', SetBool) - bag_status = bagging_server(True) -""" +For example: + bagging_server = rospy.ServiceProxy('/online_bagger/dump', BaggerCommands) + bag_status = bagging_server(x, 's') + bag_status = bagging_server(Amount, Unit) + amount = float32 (leave blank to dump entire bag) + unit = string (select units 's' or 'm') + """ class OnlineBagger(object): @@ -39,14 +41,14 @@ def __init__(self): self.n = 0 # number of iterations self.streaming = True self.dumping = False - self.get_params() self.make_dicts() - self.subscribe() - rospy.loginfo('subscriber success: %s', self.subscriber_list) - self.bagging_service = rospy.Service('/online_bagger/dump', SetBool, - self.start_bagging) + self.bagging_service = rospy.Service('/online_bagger/dump', BaggerCommands, + self.check_action) + + self.subscribe_loop() + rospy.loginfo('subscriber success: %s', self.subscriber_list) def get_params(self): @@ -54,7 +56,7 @@ def get_params(self): Retrieve parameters from param server. """ - self.topics = rospy.get_param('/online_bagger/message_topics') + self.topics = rospy.get_param('/online_bagger/topics') self.ram_limit = rospy.get_param('/online_bagger/ram_limit') * 1.073e9 # gigabytes to bytes # self.stream_time = rospy.get_param('/online_bagger/stream_time') self.dir = rospy.get_param('/online_bagger/bag_package_path') @@ -112,6 +114,19 @@ def __getitem__(self, index): rospy.loginfo('failed topics: %s', self.subscriber_list) + def subscribe_loop(self): + + """ + Continue to subscribe until no topics fail + """ + + i = 0 + while self.subscribe(): + i = i + 1 + self.subscribe + if not i % 1000: + rospy.loginfo('still subscribing!') + def subscribe(self): """ @@ -124,19 +139,25 @@ def subscribe(self): Each element in self.subscriber list is a list [topic, Bool] where the Bool tells the current status of the subscriber (sucess/failure). + + Return number of topics that failed subscription """ + i = 0 # failed subscriptions + for topic in self.subscriber_list: if not topic[1]: msg_class = rostopic.get_topic_class(topic[0]) if msg_class[1] is None: - pass + i = i + 1 else: rospy.Subscriber(topic[0], msg_class[0], lambda msg, _topic=topic[0]: self.bagger_callback(msg, _topic)) topic[1] = True # successful subscription + return i + def get_oldest_topic_time(self, topic): """ @@ -180,10 +201,10 @@ def get_ram_usage(self): self.usage = resource.getrusage(resource.RUSAGE_SELF) - # getattr returns ram usage in kilobytes - self.ram_usage = getattr(self.usage, 'ru_maxrss') + # getattr returns ram usage in kilobytes multiply by 1e3 to obtain megabytes + self.ram_usage = getattr(self.usage, 'ru_maxrss')*1e3 - return self.ram_usage*1e3 + return self.ram_usage def set_ram_limit(self, topic): @@ -195,24 +216,12 @@ def set_ram_limit(self, topic): self.topic_messages[topic].popleft() def bagger_callback(self, msg, topic): + """ streaming callback function, stops streaming during bagging process - also pops off msgs from dequeue if length of stream is longer than - stream time specified in yaml parameters - - stores all data in a dictionary. - the keys are topic names - the values are dequeues of tuples - each tuple is a time stamp of rostime and the message for that topic - - if any errors are occuring you can uncomment the lines with - if self.n % 50 == 0 to display every 50th message, time diff and topic - this will not give every 50th message on each topic, it provides every - 50th message accross all topics. + also pops off msgs from dequeue if stream size is greater than specified ram limit stream, callback function does nothing if streaming is not active - - Continually resubscribes to any failed topics. """ if not self.streaming: @@ -226,26 +235,26 @@ def bagger_callback(self, msg, topic): self.set_ram_limit(topic) # verify streaming is popping off and recording topics - if not self.n % 100: - # rospy.loginfo('time_diff: %s', time_diff.to_sec()) - # rospy.loginfo('topic: %s', topic) - # rospy.loginfo('topic type: %s', type(msg)) - rospy.loginfo('ram size: %s', self.ram_usage/1e3) - # rospy.loginfo('number of messages: %s', self.get_topic_message_count(topic)) - - - # self.subscribe() # loop + if not self.n % 50: + rospy.logdebug('time_difference: %s', self.get_topic_duration(topic).to_sec()) + rospy.logdebug('topic: %s', topic) + rospy.logdebug('topic type: %s', type(msg)) + rospy.logdebug('ram usage: %s mb', self.get_ram_usage()/1e6) + rospy.logdebug('number of topic messages: %s', self.get_topic_message_count(topic)) def get_subscribed_topics(self): """ - Return subscribed and failed topics + Return subscribed topics """ return self.subscriber_list def get_number_subscribed_topics(self): - pass + """ + Return number of subscribed topics + """ + return len(self.subscriber_list) def get_topic_message_count(self, topic): @@ -255,16 +264,17 @@ def get_topic_message_count(self, topic): return len(self.topic_messages[topic]) - def get_all_message_count(self): + def get_total_message_count(self): """ Returns total number of messages across all topics """ - message_count = 0 + self.total_message_count = 0 for topic in self.topic_messages.keys(): - message_count = message_count + get_topic_message_count(topic) - return message_count + self.total_message_count = self.total_message_count + self.get_topic_message_count(topic) + + return self.total_message_count def set_bag_directory(self): @@ -311,12 +321,48 @@ def get_time_index(self, topic, requested_seconds): messages were bagged' % (topic_duration, requested_seconds) return int(index) + def check_action(self, req): + + """ + Determine units for OnlineBagger + Typos result in assuming minutes instead of seconds, which is more conservative when bagging + + Print Status Topic if Requested: + """ + + if req.unit == 'status': + self.display_status() + self.message = "Status Displayed: " + return self.message + elif not req.amount: + self.start_bagging(req) + elif req.unit[0] == 's': + self.requested_seconds = req.amount + else: + self.requested_seconds = 60*req.amount + + self.start_bagging(req) + + return self.message + + def display_status(self): + print '\n' + rospy.loginfo('Number of Topics Subscribed To: %s', self.get_number_subscribed_topics()) + rospy.loginfo('Topics Subscribed to: %s', self.subscriber_list) + rospy.loginfo('Message Count: %s', self.get_total_message_count()) + rospy.loginfo('Memory Usage: %s Mb\n', self.get_ram_usage()/1e6) + print '\n' + def start_bagging(self, req): + """ Dump all data in dictionary to bags, temporarily stops streaming during the bagging process, resumes streaming when over. """ - self.dumping = req.data + + rospy.loginfo('bagging %s %s', req.amount, req.unit) + + self.dumping = True self.streaming = False self.set_bag_directory() @@ -324,11 +370,14 @@ def start_bagging(self, req): rospy.loginfo('dumping value: %s', self.dumping) rospy.loginfo('bagging commencing!') - self.requested_seconds = int(10) for topic in self.topic_messages.keys(): rospy.loginfo('topic: %s', topic) - self.i = self.get_time_index(topic, self.requested_seconds) + if not req.amount: + self.i = 0 + self.bag_report = 'All messages were bagged' + else: + self.i = self.get_time_index(topic, self.requested_seconds) self.m = 0 # message number in a given topic for msgs in self.topic_messages[topic][self.i:]: self.m = self.m + 1 @@ -342,8 +391,8 @@ def start_bagging(self, req): rospy.loginfo('bagging finished!') self.streaming = True - message = 'bagging finished' - return (True, message) + self.message = 'bagging finished' + if __name__ == "__main__": rospy.init_node('online_bagger') diff --git a/utils/mil_ros_tools/online_bagger/scripts/online_bagger_client_example.py b/utils/mil_ros_tools/online_bagger/scripts/online_bagger_client_example.py new file mode 100755 index 00000000..d0473274 --- /dev/null +++ b/utils/mil_ros_tools/online_bagger/scripts/online_bagger_client_example.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +import rospy +from mil_ros_tools.srv import BaggerCommands + +''' +Bags the current stream of topics defined in the +online_bagger using the service '/bagging server' +will return a success message along with the list of topics that were bagged + +For example: + bag_status = bagging_server(#, 'units') will bag # units as long as units is seconds/minutes + bag_status = bagging_server() will bag everything + bag_status = bagging_server(10, 'seconds') will bag the last 10 seconds + bag_status = bagging_server(5, 'minutes') will bag last 5 minutes + bag_status = bagging_server(#, 'status') will display the current status - No bagging: + + Units for seconds may be 's', 'sec', 'second', 'seconds' + Units for minutes may be 'm', 'min', 'minute', 'minutes' + Units for # is any float number + +In Particular the current status will be: +- Number of Topics Subscribed To +- Name of Topics Subscribed To +- Total Message Count Across all Topics +- Current Memory Usage + +''' +def online_bagging_client(amount=0, unit=''): + rospy.wait_for_service('/online_bagger/dump') + try: + bagging_server = rospy.ServiceProxy('/online_bagger/dump', BaggerCommands) + bag_status = bagging_server(amount, unit) + except rospy.ServiceException, e: + print "/online_bagger service failed: %s" % e + + +# online_bagging_client(30, 'm') diff --git a/utils/mil_ros_tools/online_bagger/scripts/online_bagging_client.py b/utils/mil_ros_tools/online_bagger/scripts/online_bagging_client.py deleted file mode 100755 index 5974cdb0..00000000 --- a/utils/mil_ros_tools/online_bagger/scripts/online_bagging_client.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python - -import rospy -from std_srvs.srv import SetBool - -''' -Bags the current stream of topics defined in the -online_bagger using the service '/bagging server' -will return a success message along with the list of topics that were bagged -''' -def online_bagging_client(): - rospy.wait_for_service('/online_bagger/dump') - try: - bagging_server = rospy.ServiceProxy('/online_bagger/dump', SetBool) - bag_status = bagging_server(True) - except rospy.ServiceException, e: - print "/online_bagger service failed: %s" % e - - -online_bagging_client() diff --git a/utils/mil_ros_tools/online_bagger/srv/BaggerCommands.srv b/utils/mil_ros_tools/online_bagger/srv/BaggerCommands.srv new file mode 100644 index 00000000..892f4471 --- /dev/null +++ b/utils/mil_ros_tools/online_bagger/srv/BaggerCommands.srv @@ -0,0 +1,4 @@ +float32 amount +string unit +--- +string success From 89f3ea59c61d4b55ca714eccdca233abf8d1f4d1 Mon Sep 17 00:00:00 2001 From: kingkevlar Date: Tue, 21 Mar 2017 18:14:04 -0400 Subject: [PATCH 233/267] ONLINE_BAGGER: Remove Client Example --- .../scripts/online_bagger_client_example.py | 38 ------------------- 1 file changed, 38 deletions(-) delete mode 100755 utils/mil_ros_tools/online_bagger/scripts/online_bagger_client_example.py diff --git a/utils/mil_ros_tools/online_bagger/scripts/online_bagger_client_example.py b/utils/mil_ros_tools/online_bagger/scripts/online_bagger_client_example.py deleted file mode 100755 index d0473274..00000000 --- a/utils/mil_ros_tools/online_bagger/scripts/online_bagger_client_example.py +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env python - -import rospy -from mil_ros_tools.srv import BaggerCommands - -''' -Bags the current stream of topics defined in the -online_bagger using the service '/bagging server' -will return a success message along with the list of topics that were bagged - -For example: - bag_status = bagging_server(#, 'units') will bag # units as long as units is seconds/minutes - bag_status = bagging_server() will bag everything - bag_status = bagging_server(10, 'seconds') will bag the last 10 seconds - bag_status = bagging_server(5, 'minutes') will bag last 5 minutes - bag_status = bagging_server(#, 'status') will display the current status - No bagging: - - Units for seconds may be 's', 'sec', 'second', 'seconds' - Units for minutes may be 'm', 'min', 'minute', 'minutes' - Units for # is any float number - -In Particular the current status will be: -- Number of Topics Subscribed To -- Name of Topics Subscribed To -- Total Message Count Across all Topics -- Current Memory Usage - -''' -def online_bagging_client(amount=0, unit=''): - rospy.wait_for_service('/online_bagger/dump') - try: - bagging_server = rospy.ServiceProxy('/online_bagger/dump', BaggerCommands) - bag_status = bagging_server(amount, unit) - except rospy.ServiceException, e: - print "/online_bagger service failed: %s" % e - - -# online_bagging_client(30, 'm') From 674fcc76e8f9215221e220b9b28e014acc2060c2 Mon Sep 17 00:00:00 2001 From: kingkevlar Date: Tue, 21 Mar 2017 20:18:40 -0400 Subject: [PATCH 234/267] ONLINE_BAGGER: Fix failed topic subscription --- .../online_bagger/nodes/online_bagger.py | 45 +++++++++++++++---- 1 file changed, 36 insertions(+), 9 deletions(-) diff --git a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py index 87bdba21..a33c7e7c 100755 --- a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py +++ b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py @@ -31,6 +31,7 @@ class OnlineBagger(object): def __init__(self): + """ Make dictionary of dequeues. Subscribe to set of topics defined by the yaml file in directory @@ -38,6 +39,7 @@ def __init__(self): Set up service to bag n seconds of data default to all of available data """ + self.i = 0 # successful subscriptions self.n = 0 # number of iterations self.streaming = True self.dumping = False @@ -117,15 +119,15 @@ def __getitem__(self, index): def subscribe_loop(self): """ - Continue to subscribe until no topics fail + Continue to subscribe until at least one topic is successful """ i = 0 - while self.subscribe(): + while not self.subscribe(): i = i + 1 self.subscribe if not i % 1000: - rospy.loginfo('still subscribing!') + rospy.logdebug('still subscribing!') def subscribe(self): @@ -143,20 +145,20 @@ def subscribe(self): Return number of topics that failed subscription """ - i = 0 # failed subscriptions for topic in self.subscriber_list: if not topic[1]: msg_class = rostopic.get_topic_class(topic[0]) if msg_class[1] is None: - i = i + 1 + pass else: + self.i = self.i + 1 rospy.Subscriber(topic[0], msg_class[0], lambda msg, _topic=topic[0]: self.bagger_callback(msg, _topic)) topic[1] = True # successful subscription - return i + return self.i def get_oldest_topic_time(self, topic): @@ -245,16 +247,26 @@ def bagger_callback(self, msg, topic): def get_subscribed_topics(self): """ - Return subscribed topics + Return subscribed and failed topic list """ return self.subscriber_list def get_number_subscribed_topics(self): + """ Return number of subscribed topics """ - return len(self.subscriber_list) + + return self.subscribe() + + def get_number_failed_topics(self): + + """ + Return number of failed topics for subscription + """ + + return len(self.subscriber_list) - self.i def get_topic_message_count(self, topic): @@ -294,6 +306,7 @@ def set_bag_directory(self): self.bag = rosbag.Bag(os.path.join(self.directory, self.bag_name + '.bag'), 'w') def get_time_index(self, topic, requested_seconds): + """ Return the index for the time index for a topic at 'n' seconds from the end of the dequeue @@ -346,8 +359,14 @@ def check_action(self, req): return self.message def display_status(self): + + """ + Print status of online bagger + + """ print '\n' rospy.loginfo('Number of Topics Subscribed To: %s', self.get_number_subscribed_topics()) + rospy.loginfo('Number of Failed Topics: %s', self.get_number_failed_topics()) rospy.loginfo('Topics Subscribed to: %s', self.subscriber_list) rospy.loginfo('Message Count: %s', self.get_total_message_count()) rospy.loginfo('Memory Usage: %s Mb\n', self.get_ram_usage()/1e6) @@ -370,14 +389,22 @@ def start_bagging(self, req): rospy.loginfo('dumping value: %s', self.dumping) rospy.loginfo('bagging commencing!') + for topic in self.subscriber_list: + if topic[1] == False: + continue - for topic in self.topic_messages.keys(): + topic = topic[0] rospy.loginfo('topic: %s', topic) + + # if no number is provided or a zero is given, bag all messages if not req.amount: self.i = 0 self.bag_report = 'All messages were bagged' + + # get time index the normal way else: self.i = self.get_time_index(topic, self.requested_seconds) + self.m = 0 # message number in a given topic for msgs in self.topic_messages[topic][self.i:]: self.m = self.m + 1 From b90eec439f3c3996a107fe02241da6d953e469e1 Mon Sep 17 00:00:00 2001 From: kingkevlar Date: Thu, 23 Mar 2017 16:26:06 -0400 Subject: [PATCH 235/267] ONLINE_BAGGER: Add bag naming feature to client --- .../config/online_bagger_example.yaml | 14 +- .../online_bagger/nodes/online_bagger.py | 236 ++++++------------ .../online_bagger/srv/BaggerCommands.srv | 6 +- 3 files changed, 82 insertions(+), 174 deletions(-) diff --git a/utils/mil_ros_tools/online_bagger/config/online_bagger_example.yaml b/utils/mil_ros_tools/online_bagger/config/online_bagger_example.yaml index ce268b78..5c62e291 100644 --- a/utils/mil_ros_tools/online_bagger/config/online_bagger_example.yaml +++ b/utils/mil_ros_tools/online_bagger/config/online_bagger_example.yaml @@ -1,16 +1,18 @@ # list an arbitrary set of topics to subscribe to and stream: - # down_stream_rate: 10 Hz -ram_limit: 3.0 # gb +# the more conservative between the two, stream_time and ram_limit are always in place +# the more conservative will be enforced +stream_time: 10 # seconds +# ram_limit: 0.25 # gb soft limit not hard enforced topics: ["/odom", "/absodom", "/stereo/left/camera_info", "/stereo/left/image_raw", "/stereo/right/camera_info", "/stereo/right/image_raw", - "/velodyne_points"] + "/velodyne_points", + "/ooka"] -bag_package_path: 'mil_ros_tools' +# comment out if default home/user/online_bagger is desired +# bag_package_path: '' -# leave bag name empty if default -bag_name: '' diff --git a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py index a33c7e7c..9a22ef17 100755 --- a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py +++ b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py @@ -5,14 +5,11 @@ import rosbag import rostopic import os -import rospkg -import time -import math import resource from collections import deque import itertools import datetime -from mil_ros_tools.srv import BaggerCommands +from mil_ros_tools.srv import BaggerCommands """ To call call service call the '/online_bagger/dump' service with the BaggerCommands.srv @@ -22,13 +19,13 @@ For example: bagging_server = rospy.ServiceProxy('/online_bagger/dump', BaggerCommands) - bag_status = bagging_server(x, 's') - bag_status = bagging_server(Amount, Unit) - amount = float32 (leave blank to dump entire bag) - unit = string (select units 's' or 'm') - """ + bag_status = bagging_server(x, 'name') + bag_status = bagging_server(bag_time, bag_name) + bag_time = float32 (leave blank to dump entire bag) + bag_name = name of bag (leave blank to use default name: current date and time) +""" -class OnlineBagger(object): +class OnlineBagger(): def __init__(self): @@ -39,15 +36,14 @@ def __init__(self): Set up service to bag n seconds of data default to all of available data """ - self.i = 0 # successful subscriptions - self.n = 0 # number of iterations + self.successful_subscription_count = 0 # successful subscriptions + self.iteration_count = 0 # number of iterations self.streaming = True - self.dumping = False self.get_params() self.make_dicts() self.bagging_service = rospy.Service('/online_bagger/dump', BaggerCommands, - self.check_action) + self.start_bagging) self.subscribe_loop() rospy.loginfo('subscriber success: %s', self.subscriber_list) @@ -59,16 +55,10 @@ def get_params(self): """ self.topics = rospy.get_param('/online_bagger/topics') - self.ram_limit = rospy.get_param('/online_bagger/ram_limit') * 1.073e9 # gigabytes to bytes - # self.stream_time = rospy.get_param('/online_bagger/stream_time') - self.dir = rospy.get_param('/online_bagger/bag_package_path') - self.bag_name = rospy.get_param('/online_bagger/bag_name') + self.dir = rospy.get_param('/online_bagger/bag_package_path', default=os.environ['HOME']) + self.stream_time = rospy.get_param('/online_bagger/stream_time', default=30) # seconds - if not self.bag_name: - self.bag_name = str(datetime.date.today()) + '-' + str(datetime.datetime.now().time())[0:8] - - # rospy.loginfo('stream_time: %s seconds', self.stream_time) - rospy.loginfo('ram_limit: %f gb', rospy.get_param('/online_bagger/ram_limit')) + rospy.loginfo('stream_time: %s seconds', self.stream_time) def make_dicts(self): @@ -94,12 +84,12 @@ def make_dicts(self): '/odom' is a potential topic name self.topic_message['/odom'] is a deque self.topic_message['/odom'][0] is the oldest message available in the deque - and its time stamp if available + and its time stamp if available. It is a tuple with each element: (msg, time_stamp) """ # Courtesy Kindall # http://stackoverflow.com/questions/10003143/how-to-slice-a-deque - class sliceable_deque(deque): + class SliceableDeque(deque): def __getitem__(self, index): if isinstance(index, slice): return type(self)(itertools.islice(self, index.start, @@ -109,9 +99,8 @@ def __getitem__(self, index): self.topic_messages = {} self.subscriber_list = [] - for topic in self.topics: - self.topic_messages[topic] = sliceable_deque(deque()) + self.topic_messages[topic] = SliceableDeque(deque()) self.subscriber_list.append([topic, False]) rospy.loginfo('failed topics: %s', self.subscriber_list) @@ -119,14 +108,17 @@ def __getitem__(self, index): def subscribe_loop(self): """ - Continue to subscribe until at least one topic is successful + Continue to subscribe until at least one topic is successful, + then break out of loop and be called in the callback function to prevent the function + from locking up. """ i = 0 - while not self.subscribe(): + while self.successful_subscription_count == 0 and not rospy.is_shutdown(): + self.subscribe() + rospy.sleep(0.1) i = i + 1 - self.subscribe - if not i % 1000: + if i % 1000 == 0: rospy.logdebug('still subscribing!') def subscribe(self): @@ -145,44 +137,23 @@ def subscribe(self): Return number of topics that failed subscription """ - for topic in self.subscriber_list: if not topic[1]: msg_class = rostopic.get_topic_class(topic[0]) - if msg_class[1] is None: - pass - else: - self.i = self.i + 1 + if msg_class[1] is not None: + self.successful_subscription_count = self.successful_subscription_count + 1 rospy.Subscriber(topic[0], msg_class[0], lambda msg, _topic=topic[0]: self.bagger_callback(msg, _topic)) topic[1] = True # successful subscription - return self.i - - def get_oldest_topic_time(self, topic): - - """ - Return oldest timestamp for a given topic as a rospy.Time type - """ - - return self.topic_messages[topic][0][0] - - def get_newest_topic_time(self, topic): - - """ - Return newest timestamp for a given topic as a rospy.Time type - """ - - return self.topic_messages[topic][-1][0] - def get_topic_duration(self, topic): """ Return current time duration of topic """ - return self.get_newest_topic_time(topic) - self.get_oldest_topic_time(topic) + return self.topic_messages[topic][-1][0] - self.topic_messages[topic][0][0] def get_header_time(self, msg): @@ -195,78 +166,34 @@ def get_header_time(self, msg): else: return rospy.get_rostime() - def get_ram_usage(self): - - """ - Return current ram usage in bytes - """ - - self.usage = resource.getrusage(resource.RUSAGE_SELF) - - # getattr returns ram usage in kilobytes multiply by 1e3 to obtain megabytes - self.ram_usage = getattr(self.usage, 'ru_maxrss')*1e3 - - return self.ram_usage - - def set_ram_limit(self, topic): - - """ - Limit ram usage by removing oldest messages - """ - - if self.get_ram_usage() > self.ram_limit: - self.topic_messages[topic].popleft() - def bagger_callback(self, msg, topic): """ - streaming callback function, stops streaming during bagging process + Streaming callback function, stops streaming during bagging process also pops off msgs from dequeue if stream size is greater than specified ram limit - stream, callback function does nothing if streaming is not active + Stream, callback function does nothing if streaming is not active """ if not self.streaming: return - self.n = self.n + 1 + self.iteration_count = self.iteration_count + 1 time = self.get_header_time(msg) self.topic_messages[topic].append((time, msg)) - self.set_ram_limit(topic) + time_diff = self.get_topic_duration(topic) # verify streaming is popping off and recording topics - if not self.n % 50: - rospy.logdebug('time_difference: %s', self.get_topic_duration(topic).to_sec()) + if self.iteration_count % 100 == 0: + rospy.logdebug('time_difference: %s', time_diff.to_sec()) rospy.logdebug('topic: %s', topic) rospy.logdebug('topic type: %s', type(msg)) - rospy.logdebug('ram usage: %s mb', self.get_ram_usage()/1e6) rospy.logdebug('number of topic messages: %s', self.get_topic_message_count(topic)) - def get_subscribed_topics(self): - - """ - Return subscribed and failed topic list - """ - - return self.subscriber_list - - def get_number_subscribed_topics(self): - - """ - Return number of subscribed topics - """ - - return self.subscribe() - - def get_number_failed_topics(self): - - """ - Return number of failed topics for subscription - """ - - return len(self.subscriber_list) - self.i + if time_diff > rospy.Duration(self.stream_time): + self.topic_messages[topic].popleft() def get_topic_message_count(self, topic): @@ -282,28 +209,34 @@ def get_total_message_count(self): Returns total number of messages across all topics """ - self.total_message_count = 0 + total_message_count = 0 for topic in self.topic_messages.keys(): - self.total_message_count = self.total_message_count + self.get_topic_message_count(topic) + total_message_count = total_message_count + self.get_topic_message_count(topic) - return self.total_message_count + return total_message_count - def set_bag_directory(self): + def set_bag_directory(self,bag_name=''): """ Create ros bag save directory + + If no bag name is provided, the current date/time is used as default. """ - rospack = rospkg.RosPack() - self.directory = os.path.join(rospack.get_path(self.dir), 'bags/') + types = ('', None) + + if bag_name in types: + bag_name = str(datetime.date.today()) + '-' + str(datetime.datetime.now().time())[0:8] + + directory = os.path.join(self.dir, 'online_bagger/') - rospy.loginfo('bag directory: %s', self.directory) - rospy.loginfo('bag name: %s', self.bag_name) + rospy.loginfo('bag directory: %s', directory) + rospy.loginfo('bag name: %s', bag_name) - if not os.path.exists(self.directory): - os.makedirs(self.directory) + if not os.path.exists(directory): + os.makedirs(directory) - self.bag = rosbag.Bag(os.path.join(self.directory, self.bag_name + '.bag'), 'w') + self.bag = rosbag.Bag(os.path.join(directory, bag_name + '.bag'), 'w') def get_time_index(self, topic, requested_seconds): @@ -325,52 +258,23 @@ def get_time_index(self, topic, requested_seconds): topic_duration = self.get_topic_duration(topic).to_sec() ratio = requested_seconds / topic_duration - index = math.floor(self.get_topic_message_count(topic) * (1 - min(ratio, 1))) + index = int(math.floor(self.get_topic_message_count(topic) * (1 - min(ratio, 1)))) self.bag_report = 'The requested %s seconds were bagged' % requested_seconds - if not index: + if index == 0: self.bag_report = 'Only %s seconds of the request %s seconds were available, all \ messages were bagged' % (topic_duration, requested_seconds) - return int(index) + return index - def check_action(self, req): - - """ - Determine units for OnlineBagger - Typos result in assuming minutes instead of seconds, which is more conservative when bagging - - Print Status Topic if Requested: - """ - - if req.unit == 'status': - self.display_status() - self.message = "Status Displayed: " - return self.message - elif not req.amount: - self.start_bagging(req) - elif req.unit[0] == 's': - self.requested_seconds = req.amount - else: - self.requested_seconds = 60*req.amount - - self.start_bagging(req) - - return self.message - - def display_status(self): + def set_bagger_status(self): """ Print status of online bagger - """ - print '\n' - rospy.loginfo('Number of Topics Subscribed To: %s', self.get_number_subscribed_topics()) - rospy.loginfo('Number of Failed Topics: %s', self.get_number_failed_topics()) - rospy.loginfo('Topics Subscribed to: %s', self.subscriber_list) - rospy.loginfo('Message Count: %s', self.get_total_message_count()) - rospy.loginfo('Memory Usage: %s Mb\n', self.get_ram_usage()/1e6) - print '\n' + + self.bagger_status = ('Subscriber List: ' + str(self.subscriber_list) + ' Message Count: ' + + str(self.get_total_message_count())) def start_bagging(self, req): @@ -379,15 +283,13 @@ def start_bagging(self, req): during the bagging process, resumes streaming when over. """ - rospy.loginfo('bagging %s %s', req.amount, req.unit) + rospy.loginfo('bagging %s s', req.bag_time) - self.dumping = True self.streaming = False + self.set_bag_directory(req.bag_name) + requested_seconds = req.bag_time - self.set_bag_directory() - - rospy.loginfo('dumping value: %s', self.dumping) - rospy.loginfo('bagging commencing!') + self.set_bagger_status() for topic in self.subscriber_list: if topic[1] == False: @@ -397,13 +299,14 @@ def start_bagging(self, req): rospy.loginfo('topic: %s', topic) # if no number is provided or a zero is given, bag all messages - if not req.amount: + types = (0, 0.0, None) + if req.bag_time in types: self.i = 0 self.bag_report = 'All messages were bagged' # get time index the normal way else: - self.i = self.get_time_index(topic, self.requested_seconds) + self.i = self.get_time_index(topic, requested_seconds) self.m = 0 # message number in a given topic for msgs in self.topic_messages[topic][self.i:]: @@ -413,13 +316,16 @@ def start_bagging(self, req): rospy.loginfo('topic: %s, message type: %s, time: %s', topic, type(msgs[1]), type(msgs[0])) - rospy.loginfo('Bag Report: %s', self.bag_report) + # empty deque when done writing to bag + self.topic_messages[topic].clear() + + rospy.loginfo('Bag Report: %s', self.bagger_status) self.bag.close() rospy.loginfo('bagging finished!') - self.streaming = True - self.message = 'bagging finished' + self.streaming = True + return self.bagger_status if __name__ == "__main__": rospy.init_node('online_bagger') diff --git a/utils/mil_ros_tools/online_bagger/srv/BaggerCommands.srv b/utils/mil_ros_tools/online_bagger/srv/BaggerCommands.srv index 892f4471..cc52d84e 100644 --- a/utils/mil_ros_tools/online_bagger/srv/BaggerCommands.srv +++ b/utils/mil_ros_tools/online_bagger/srv/BaggerCommands.srv @@ -1,4 +1,4 @@ -float32 amount -string unit +string bag_name +float32 bag_time --- -string success +string status From 1a550bb01aa02bcb9eaf345a80c3834ace53a74d Mon Sep 17 00:00:00 2001 From: kingkevlar Date: Thu, 23 Mar 2017 16:41:59 -0400 Subject: [PATCH 236/267] ONLINE_BAGGER: Fix variable naming conventions --- .../online_bagger/nodes/online_bagger.py | 21 +++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py index 9a22ef17..ac7f9ecd 100755 --- a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py +++ b/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py @@ -19,13 +19,12 @@ For example: bagging_server = rospy.ServiceProxy('/online_bagger/dump', BaggerCommands) - bag_status = bagging_server(x, 'name') - bag_status = bagging_server(bag_time, bag_name) - bag_time = float32 (leave blank to dump entire bag) + bag_status = bagging_server(bag_name, bag_time) bag_name = name of bag (leave blank to use default name: current date and time) + bag_time = float32 (leave blank to dump entire bag) """ -class OnlineBagger(): +class OnlineBagger(object): def __init__(self): @@ -258,7 +257,7 @@ def get_time_index(self, topic, requested_seconds): topic_duration = self.get_topic_duration(topic).to_sec() ratio = requested_seconds / topic_duration - index = int(math.floor(self.get_topic_message_count(topic) * (1 - min(ratio, 1)))) + index = int(self.get_topic_message_count(topic) * (1 - min(ratio, 1))) self.bag_report = 'The requested %s seconds were bagged' % requested_seconds @@ -301,18 +300,18 @@ def start_bagging(self, req): # if no number is provided or a zero is given, bag all messages types = (0, 0.0, None) if req.bag_time in types: - self.i = 0 + bag_index = 0 self.bag_report = 'All messages were bagged' # get time index the normal way else: - self.i = self.get_time_index(topic, requested_seconds) + bag_index = self.get_time_index(topic, requested_seconds) - self.m = 0 # message number in a given topic - for msgs in self.topic_messages[topic][self.i:]: - self.m = self.m + 1 + messages = 0 # message number in a given topic + for msgs in self.topic_messages[topic][bag_index:]: + messages = messages + 1 self.bag.write(topic, msgs[1], t=msgs[0]) - if not self.m % 100: # print every 100th topic, type and time + if messages % 100 == 0: # print every 100th topic, type and time rospy.loginfo('topic: %s, message type: %s, time: %s', topic, type(msgs[1]), type(msgs[0])) From 34d1710763a612927e611122fa2e0f117d0869f1 Mon Sep 17 00:00:00 2001 From: David Soto Date: Fri, 17 Mar 2017 19:24:51 -0400 Subject: [PATCH 237/267] GREAT MERGE: rename navigator_vision to mil_vision navigator_vision is moved from the hidden '.great_merge' directory to the top-level 'perception' directory. navigator_vision was a superset of sub8_perception, 2 tools need to be added and vehicle particular code should be removed. --- .../mil_vision}/CMakeLists.txt | 0 .../mil_vision}/README.md | 0 .../mil_vision}/calibration.xml | 0 .../config/underwater_shape_detection.yaml | 0 .../mil_vision}/exFAST_SparseStereo/GPL.txt | 0 .../mil_vision}/exFAST_SparseStereo/README.txt | 0 .../example-data/calibration.xml | 0 .../exFAST_SparseStereo/example-data/left.png | Bin .../exFAST_SparseStereo/example-data/right.png | Bin .../exFAST_SparseStereo/src/example/example.cpp | 0 .../src/sparsestereo/CMakeLists.txt | 0 .../src/sparsestereo/calibrationresult.cpp | 0 .../src/sparsestereo/calibrationresult.h | 0 .../src/sparsestereo/census-inl.h | 0 .../exFAST_SparseStereo/src/sparsestereo/census.cpp | 0 .../exFAST_SparseStereo/src/sparsestereo/census.h | 0 .../src/sparsestereo/censuswindow.h | 0 .../src/sparsestereo/exception.h | 0 .../src/sparsestereo/extendedfast-inl.h | 0 .../src/sparsestereo/extendedfast.cpp | 0 .../src/sparsestereo/extendedfast.h | 0 .../src/sparsestereo/fast9-inl.h | 0 .../exFAST_SparseStereo/src/sparsestereo/fast9.h | 0 .../src/sparsestereo/hammingdistance.cpp | 0 .../src/sparsestereo/hammingdistance.h | 0 .../src/sparsestereo/imageconversion.cpp | 0 .../src/sparsestereo/imageconversion.h | 0 .../exFAST_SparseStereo/src/sparsestereo/simd.cpp | 0 .../exFAST_SparseStereo/src/sparsestereo/simd.h | 0 .../src/sparsestereo/sparsematch.h | 0 .../src/sparsestereo/sparserectification.cpp | 0 .../src/sparsestereo/sparserectification.h | 0 .../src/sparsestereo/sparsestereo-inl.h | 0 .../src/sparsestereo/sparsestereo.h | 0 .../src/sparsestereo/stereorectification.cpp | 0 .../src/sparsestereo/stereorectification.h | 0 .../missions/underwater_shape_identification.hpp | 0 .../navigator_vision_lib/active_contours.hpp | 0 .../colorizer/camera_observer.hpp | 0 .../colorizer/color_observation.hpp | 0 .../navigator_vision_lib/colorizer/common.hpp | 0 .../colorizer/pcd_colorizer.hpp | 0 .../colorizer/single_cloud_processor.hpp | 0 .../include/navigator_vision_lib/cv_tools.hpp | 0 .../image_acquisition/camera_frame.hpp | 0 .../image_acquisition/camera_frame_sequence.hpp | 0 .../image_acquisition/camera_model.hpp | 0 .../image_acquisition/ros_camera_stream.hpp | 0 .../navigator_vision_lib/image_filtering.hpp | 0 .../navigator_vision_lib/pcd_sub_pub_algorithm.hpp | 0 .../navigator_vision_lib/point_cloud_algorithms.hpp | 0 .../include/navigator_vision_lib/visualization.hpp | 0 .../mil_vision}/nodes/A_C_test_vision.cpp | 0 .../mil_vision}/nodes/camera_stream_demo.cpp | 0 .../camera_to_lidar/CameraLidarTransformer.cpp | 0 .../camera_to_lidar/CameraLidarTransformer.hpp | 0 .../mil_vision}/nodes/camera_to_lidar/main.cpp | 0 .../mil_vision}/nodes/database_color.py | 0 .../mil_vision}/nodes/object_classifier.py | 0 .../mil_vision}/nodes/pinger_finder.py | 0 .../mil_vision}/nodes/pinger_locator.py | 0 .../nodes/shape_identification/DockShapeVision.cpp | 0 .../nodes/shape_identification/DockShapeVision.h | 0 .../GrayscaleContour/GrayscaleContour.cpp | 0 .../GrayscaleContour/GrayscaleContour.h | 0 .../nodes/shape_identification/ShapeTracker.cpp | 0 .../nodes/shape_identification/ShapeTracker.h | 0 .../nodes/shape_identification/TrackedShape.cpp | 0 .../nodes/shape_identification/TrackedShape.h | 0 .../mil_vision}/nodes/shape_identification/main.cpp | 0 .../mil_vision}/nodes/shooter_finder.py | 0 .../mil_vision}/nodes/start_gate_manual.py | 0 .../mil_vision}/nodes/stereo_point_cloud_driver.cpp | 0 .../underwater_shape_identification_vision.cpp | 0 .../mil_vision}/nodes/velodyne_pcd_colorizer.cpp | 0 .../object_classification/HOG_descriptor.py | 0 .../object_classification/SVM_classifier.py | 0 .../mil_vision}/object_classification/__init__.py | 0 .../mil_vision}/object_classification/depickler.py | 0 .../mil_vision}/object_classification/depicklify.py | 0 .../object_classification/lidar_to_image.py | 0 .../object_classification/median_flow.py | 0 .../object_classification/object_classification.py | 0 .../object_classification/roi_generator.py | 0 .../mil_vision}/object_classification/stc_train.py | 0 .../mil_vision}/package.xml | 0 .../mil_vision}/ros_tools/on_the_fly_thresholder.py | 0 .../mil_vision}/setup.py | 0 .../missions/underwater_shape_identification.cpp | 0 .../src/navigator_vision_lib/active_contours.cpp | 0 .../colorizer/camera_observer.cpp | 0 .../colorizer/color_observation.cpp | 0 .../colorizer/pcd_colorizer.cpp | 0 .../colorizer/single_cloud_processor.cpp | 0 .../src/navigator_vision_lib/cv_utils.cc | 0 .../src/navigator_vision_lib/image_filtering.cpp | 0 .../navigator_vision_lib/point_cloud_algorithms.cc | 0 .../src/navigator_vision_lib/visualization.cc | 0 .../underwater_shape_identification/circle.png | Bin .../underwater_shape_identification/cross.png | Bin .../underwater_shape_identification/triangle.png | Bin 101 files changed, 0 insertions(+), 0 deletions(-) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/CMakeLists.txt (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/README.md (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/calibration.xml (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/config/underwater_shape_detection.yaml (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/GPL.txt (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/README.txt (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/example-data/calibration.xml (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/example-data/left.png (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/example-data/right.png (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/example/example.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/census-inl.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/census.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/census.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/censuswindow.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/exception.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/extendedfast.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/fast9.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/imageconversion.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/simd.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/simd.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/sparsematch.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/sparserectification.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/exFAST_SparseStereo/src/sparsestereo/stereorectification.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/missions/underwater_shape_identification.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/active_contours.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/colorizer/camera_observer.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/colorizer/color_observation.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/colorizer/common.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/cv_tools.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/image_acquisition/camera_frame.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/image_acquisition/camera_model.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/image_filtering.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/point_cloud_algorithms.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/include/navigator_vision_lib/visualization.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/A_C_test_vision.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/camera_stream_demo.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/camera_to_lidar/CameraLidarTransformer.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/camera_to_lidar/CameraLidarTransformer.hpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/camera_to_lidar/main.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/database_color.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/object_classifier.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/pinger_finder.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/pinger_locator.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/shape_identification/DockShapeVision.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/shape_identification/DockShapeVision.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/shape_identification/ShapeTracker.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/shape_identification/ShapeTracker.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/shape_identification/TrackedShape.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/shape_identification/TrackedShape.h (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/shape_identification/main.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/shooter_finder.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/start_gate_manual.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/stereo_point_cloud_driver.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/underwater_shape_identification_vision.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/nodes/velodyne_pcd_colorizer.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/object_classification/HOG_descriptor.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/object_classification/SVM_classifier.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/object_classification/__init__.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/object_classification/depickler.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/object_classification/depicklify.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/object_classification/lidar_to_image.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/object_classification/median_flow.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/object_classification/object_classification.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/object_classification/roi_generator.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/object_classification/stc_train.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/package.xml (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/ros_tools/on_the_fly_thresholder.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/setup.py (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/src/missions/underwater_shape_identification.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/src/navigator_vision_lib/active_contours.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/src/navigator_vision_lib/colorizer/camera_observer.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/src/navigator_vision_lib/colorizer/color_observation.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/src/navigator_vision_lib/cv_utils.cc (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/src/navigator_vision_lib/image_filtering.cpp (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/src/navigator_vision_lib/point_cloud_algorithms.cc (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/src/navigator_vision_lib/visualization.cc (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/templates/underwater_shape_identification/circle.png (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/templates/underwater_shape_identification/cross.png (100%) rename {.great_merge/perception/navigator_vision => perception/mil_vision}/templates/underwater_shape_identification/triangle.png (100%) diff --git a/.great_merge/perception/navigator_vision/CMakeLists.txt b/perception/mil_vision/CMakeLists.txt similarity index 100% rename from .great_merge/perception/navigator_vision/CMakeLists.txt rename to perception/mil_vision/CMakeLists.txt diff --git a/.great_merge/perception/navigator_vision/README.md b/perception/mil_vision/README.md similarity index 100% rename from .great_merge/perception/navigator_vision/README.md rename to perception/mil_vision/README.md diff --git a/.great_merge/perception/navigator_vision/calibration.xml b/perception/mil_vision/calibration.xml similarity index 100% rename from .great_merge/perception/navigator_vision/calibration.xml rename to perception/mil_vision/calibration.xml diff --git a/.great_merge/perception/navigator_vision/config/underwater_shape_detection.yaml b/perception/mil_vision/config/underwater_shape_detection.yaml similarity index 100% rename from .great_merge/perception/navigator_vision/config/underwater_shape_detection.yaml rename to perception/mil_vision/config/underwater_shape_detection.yaml diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/GPL.txt b/perception/mil_vision/exFAST_SparseStereo/GPL.txt similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/GPL.txt rename to perception/mil_vision/exFAST_SparseStereo/GPL.txt diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/README.txt b/perception/mil_vision/exFAST_SparseStereo/README.txt similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/README.txt rename to perception/mil_vision/exFAST_SparseStereo/README.txt diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/example-data/calibration.xml b/perception/mil_vision/exFAST_SparseStereo/example-data/calibration.xml similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/example-data/calibration.xml rename to perception/mil_vision/exFAST_SparseStereo/example-data/calibration.xml diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/example-data/left.png b/perception/mil_vision/exFAST_SparseStereo/example-data/left.png similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/example-data/left.png rename to perception/mil_vision/exFAST_SparseStereo/example-data/left.png diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/example-data/right.png b/perception/mil_vision/exFAST_SparseStereo/example-data/right.png similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/example-data/right.png rename to perception/mil_vision/exFAST_SparseStereo/example-data/right.png diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/example/example.cpp b/perception/mil_vision/exFAST_SparseStereo/src/example/example.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/example/example.cpp rename to perception/mil_vision/exFAST_SparseStereo/src/example/example.cpp diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/CMakeLists.txt diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.cpp diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/calibrationresult.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census-inl.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/census-inl.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census-inl.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/census-inl.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.cpp b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/census.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.cpp rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/census.cpp diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/census.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/census.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/census.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/censuswindow.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/censuswindow.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/censuswindow.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/censuswindow.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/exception.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/exception.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/exception.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/exception.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast-inl.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.cpp diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/extendedfast.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/fast9-inl.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/fast9.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/fast9.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/fast9.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.cpp diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/hammingdistance.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.cpp diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/imageconversion.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.cpp b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/simd.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.cpp rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/simd.cpp diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/simd.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/simd.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/simd.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsematch.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/sparsematch.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsematch.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/sparsematch.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.cpp diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/sparserectification.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo-inl.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/sparsestereo.h diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.cpp diff --git a/.great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.h b/perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.h similarity index 100% rename from .great_merge/perception/navigator_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.h rename to perception/mil_vision/exFAST_SparseStereo/src/sparsestereo/stereorectification.h diff --git a/.great_merge/perception/navigator_vision/include/missions/underwater_shape_identification.hpp b/perception/mil_vision/include/missions/underwater_shape_identification.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/missions/underwater_shape_identification.hpp rename to perception/mil_vision/include/missions/underwater_shape_identification.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp b/perception/mil_vision/include/navigator_vision_lib/active_contours.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/active_contours.hpp rename to perception/mil_vision/include/navigator_vision_lib/active_contours.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp b/perception/mil_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp rename to perception/mil_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/color_observation.hpp b/perception/mil_vision/include/navigator_vision_lib/colorizer/color_observation.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/color_observation.hpp rename to perception/mil_vision/include/navigator_vision_lib/colorizer/color_observation.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/common.hpp b/perception/mil_vision/include/navigator_vision_lib/colorizer/common.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/common.hpp rename to perception/mil_vision/include/navigator_vision_lib/colorizer/common.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp b/perception/mil_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp rename to perception/mil_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp b/perception/mil_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp rename to perception/mil_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/cv_tools.hpp b/perception/mil_vision/include/navigator_vision_lib/cv_tools.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/cv_tools.hpp rename to perception/mil_vision/include/navigator_vision_lib/cv_tools.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp b/perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp rename to perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp b/perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp rename to perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp b/perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp rename to perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp b/perception/mil_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp rename to perception/mil_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp b/perception/mil_vision/include/navigator_vision_lib/image_filtering.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/image_filtering.hpp rename to perception/mil_vision/include/navigator_vision_lib/image_filtering.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp b/perception/mil_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp rename to perception/mil_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp b/perception/mil_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp rename to perception/mil_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp diff --git a/.great_merge/perception/navigator_vision/include/navigator_vision_lib/visualization.hpp b/perception/mil_vision/include/navigator_vision_lib/visualization.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/include/navigator_vision_lib/visualization.hpp rename to perception/mil_vision/include/navigator_vision_lib/visualization.hpp diff --git a/.great_merge/perception/navigator_vision/nodes/A_C_test_vision.cpp b/perception/mil_vision/nodes/A_C_test_vision.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/A_C_test_vision.cpp rename to perception/mil_vision/nodes/A_C_test_vision.cpp diff --git a/.great_merge/perception/navigator_vision/nodes/camera_stream_demo.cpp b/perception/mil_vision/nodes/camera_stream_demo.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/camera_stream_demo.cpp rename to perception/mil_vision/nodes/camera_stream_demo.cpp diff --git a/.great_merge/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp b/perception/mil_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp rename to perception/mil_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp diff --git a/.great_merge/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp b/perception/mil_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp rename to perception/mil_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp diff --git a/.great_merge/perception/navigator_vision/nodes/camera_to_lidar/main.cpp b/perception/mil_vision/nodes/camera_to_lidar/main.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/camera_to_lidar/main.cpp rename to perception/mil_vision/nodes/camera_to_lidar/main.cpp diff --git a/.great_merge/perception/navigator_vision/nodes/database_color.py b/perception/mil_vision/nodes/database_color.py similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/database_color.py rename to perception/mil_vision/nodes/database_color.py diff --git a/.great_merge/perception/navigator_vision/nodes/object_classifier.py b/perception/mil_vision/nodes/object_classifier.py similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/object_classifier.py rename to perception/mil_vision/nodes/object_classifier.py diff --git a/.great_merge/perception/navigator_vision/nodes/pinger_finder.py b/perception/mil_vision/nodes/pinger_finder.py similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/pinger_finder.py rename to perception/mil_vision/nodes/pinger_finder.py diff --git a/.great_merge/perception/navigator_vision/nodes/pinger_locator.py b/perception/mil_vision/nodes/pinger_locator.py similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/pinger_locator.py rename to perception/mil_vision/nodes/pinger_locator.py diff --git a/.great_merge/perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp b/perception/mil_vision/nodes/shape_identification/DockShapeVision.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/shape_identification/DockShapeVision.cpp rename to perception/mil_vision/nodes/shape_identification/DockShapeVision.cpp diff --git a/.great_merge/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h b/perception/mil_vision/nodes/shape_identification/DockShapeVision.h similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/shape_identification/DockShapeVision.h rename to perception/mil_vision/nodes/shape_identification/DockShapeVision.h diff --git a/.great_merge/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp b/perception/mil_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp rename to perception/mil_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp diff --git a/.great_merge/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h b/perception/mil_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h rename to perception/mil_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h diff --git a/.great_merge/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp b/perception/mil_vision/nodes/shape_identification/ShapeTracker.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/shape_identification/ShapeTracker.cpp rename to perception/mil_vision/nodes/shape_identification/ShapeTracker.cpp diff --git a/.great_merge/perception/navigator_vision/nodes/shape_identification/ShapeTracker.h b/perception/mil_vision/nodes/shape_identification/ShapeTracker.h similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/shape_identification/ShapeTracker.h rename to perception/mil_vision/nodes/shape_identification/ShapeTracker.h diff --git a/.great_merge/perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp b/perception/mil_vision/nodes/shape_identification/TrackedShape.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/shape_identification/TrackedShape.cpp rename to perception/mil_vision/nodes/shape_identification/TrackedShape.cpp diff --git a/.great_merge/perception/navigator_vision/nodes/shape_identification/TrackedShape.h b/perception/mil_vision/nodes/shape_identification/TrackedShape.h similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/shape_identification/TrackedShape.h rename to perception/mil_vision/nodes/shape_identification/TrackedShape.h diff --git a/.great_merge/perception/navigator_vision/nodes/shape_identification/main.cpp b/perception/mil_vision/nodes/shape_identification/main.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/shape_identification/main.cpp rename to perception/mil_vision/nodes/shape_identification/main.cpp diff --git a/.great_merge/perception/navigator_vision/nodes/shooter_finder.py b/perception/mil_vision/nodes/shooter_finder.py similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/shooter_finder.py rename to perception/mil_vision/nodes/shooter_finder.py diff --git a/.great_merge/perception/navigator_vision/nodes/start_gate_manual.py b/perception/mil_vision/nodes/start_gate_manual.py similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/start_gate_manual.py rename to perception/mil_vision/nodes/start_gate_manual.py diff --git a/.great_merge/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp b/perception/mil_vision/nodes/stereo_point_cloud_driver.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/stereo_point_cloud_driver.cpp rename to perception/mil_vision/nodes/stereo_point_cloud_driver.cpp diff --git a/.great_merge/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp b/perception/mil_vision/nodes/underwater_shape_identification_vision.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/underwater_shape_identification_vision.cpp rename to perception/mil_vision/nodes/underwater_shape_identification_vision.cpp diff --git a/.great_merge/perception/navigator_vision/nodes/velodyne_pcd_colorizer.cpp b/perception/mil_vision/nodes/velodyne_pcd_colorizer.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/nodes/velodyne_pcd_colorizer.cpp rename to perception/mil_vision/nodes/velodyne_pcd_colorizer.cpp diff --git a/.great_merge/perception/navigator_vision/object_classification/HOG_descriptor.py b/perception/mil_vision/object_classification/HOG_descriptor.py similarity index 100% rename from .great_merge/perception/navigator_vision/object_classification/HOG_descriptor.py rename to perception/mil_vision/object_classification/HOG_descriptor.py diff --git a/.great_merge/perception/navigator_vision/object_classification/SVM_classifier.py b/perception/mil_vision/object_classification/SVM_classifier.py similarity index 100% rename from .great_merge/perception/navigator_vision/object_classification/SVM_classifier.py rename to perception/mil_vision/object_classification/SVM_classifier.py diff --git a/.great_merge/perception/navigator_vision/object_classification/__init__.py b/perception/mil_vision/object_classification/__init__.py similarity index 100% rename from .great_merge/perception/navigator_vision/object_classification/__init__.py rename to perception/mil_vision/object_classification/__init__.py diff --git a/.great_merge/perception/navigator_vision/object_classification/depickler.py b/perception/mil_vision/object_classification/depickler.py similarity index 100% rename from .great_merge/perception/navigator_vision/object_classification/depickler.py rename to perception/mil_vision/object_classification/depickler.py diff --git a/.great_merge/perception/navigator_vision/object_classification/depicklify.py b/perception/mil_vision/object_classification/depicklify.py similarity index 100% rename from .great_merge/perception/navigator_vision/object_classification/depicklify.py rename to perception/mil_vision/object_classification/depicklify.py diff --git a/.great_merge/perception/navigator_vision/object_classification/lidar_to_image.py b/perception/mil_vision/object_classification/lidar_to_image.py similarity index 100% rename from .great_merge/perception/navigator_vision/object_classification/lidar_to_image.py rename to perception/mil_vision/object_classification/lidar_to_image.py diff --git a/.great_merge/perception/navigator_vision/object_classification/median_flow.py b/perception/mil_vision/object_classification/median_flow.py similarity index 100% rename from .great_merge/perception/navigator_vision/object_classification/median_flow.py rename to perception/mil_vision/object_classification/median_flow.py diff --git a/.great_merge/perception/navigator_vision/object_classification/object_classification.py b/perception/mil_vision/object_classification/object_classification.py similarity index 100% rename from .great_merge/perception/navigator_vision/object_classification/object_classification.py rename to perception/mil_vision/object_classification/object_classification.py diff --git a/.great_merge/perception/navigator_vision/object_classification/roi_generator.py b/perception/mil_vision/object_classification/roi_generator.py similarity index 100% rename from .great_merge/perception/navigator_vision/object_classification/roi_generator.py rename to perception/mil_vision/object_classification/roi_generator.py diff --git a/.great_merge/perception/navigator_vision/object_classification/stc_train.py b/perception/mil_vision/object_classification/stc_train.py similarity index 100% rename from .great_merge/perception/navigator_vision/object_classification/stc_train.py rename to perception/mil_vision/object_classification/stc_train.py diff --git a/.great_merge/perception/navigator_vision/package.xml b/perception/mil_vision/package.xml similarity index 100% rename from .great_merge/perception/navigator_vision/package.xml rename to perception/mil_vision/package.xml diff --git a/.great_merge/perception/navigator_vision/ros_tools/on_the_fly_thresholder.py b/perception/mil_vision/ros_tools/on_the_fly_thresholder.py similarity index 100% rename from .great_merge/perception/navigator_vision/ros_tools/on_the_fly_thresholder.py rename to perception/mil_vision/ros_tools/on_the_fly_thresholder.py diff --git a/.great_merge/perception/navigator_vision/setup.py b/perception/mil_vision/setup.py similarity index 100% rename from .great_merge/perception/navigator_vision/setup.py rename to perception/mil_vision/setup.py diff --git a/.great_merge/perception/navigator_vision/src/missions/underwater_shape_identification.cpp b/perception/mil_vision/src/missions/underwater_shape_identification.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/src/missions/underwater_shape_identification.cpp rename to perception/mil_vision/src/missions/underwater_shape_identification.cpp diff --git a/.great_merge/perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp b/perception/mil_vision/src/navigator_vision_lib/active_contours.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/src/navigator_vision_lib/active_contours.cpp rename to perception/mil_vision/src/navigator_vision_lib/active_contours.cpp diff --git a/.great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp b/perception/mil_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp rename to perception/mil_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp diff --git a/.great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/color_observation.cpp b/perception/mil_vision/src/navigator_vision_lib/colorizer/color_observation.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/color_observation.cpp rename to perception/mil_vision/src/navigator_vision_lib/colorizer/color_observation.cpp diff --git a/.great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp b/perception/mil_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp rename to perception/mil_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp diff --git a/.great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp b/perception/mil_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp rename to perception/mil_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp diff --git a/.great_merge/perception/navigator_vision/src/navigator_vision_lib/cv_utils.cc b/perception/mil_vision/src/navigator_vision_lib/cv_utils.cc similarity index 100% rename from .great_merge/perception/navigator_vision/src/navigator_vision_lib/cv_utils.cc rename to perception/mil_vision/src/navigator_vision_lib/cv_utils.cc diff --git a/.great_merge/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp b/perception/mil_vision/src/navigator_vision_lib/image_filtering.cpp similarity index 100% rename from .great_merge/perception/navigator_vision/src/navigator_vision_lib/image_filtering.cpp rename to perception/mil_vision/src/navigator_vision_lib/image_filtering.cpp diff --git a/.great_merge/perception/navigator_vision/src/navigator_vision_lib/point_cloud_algorithms.cc b/perception/mil_vision/src/navigator_vision_lib/point_cloud_algorithms.cc similarity index 100% rename from .great_merge/perception/navigator_vision/src/navigator_vision_lib/point_cloud_algorithms.cc rename to perception/mil_vision/src/navigator_vision_lib/point_cloud_algorithms.cc diff --git a/.great_merge/perception/navigator_vision/src/navigator_vision_lib/visualization.cc b/perception/mil_vision/src/navigator_vision_lib/visualization.cc similarity index 100% rename from .great_merge/perception/navigator_vision/src/navigator_vision_lib/visualization.cc rename to perception/mil_vision/src/navigator_vision_lib/visualization.cc diff --git a/.great_merge/perception/navigator_vision/templates/underwater_shape_identification/circle.png b/perception/mil_vision/templates/underwater_shape_identification/circle.png similarity index 100% rename from .great_merge/perception/navigator_vision/templates/underwater_shape_identification/circle.png rename to perception/mil_vision/templates/underwater_shape_identification/circle.png diff --git a/.great_merge/perception/navigator_vision/templates/underwater_shape_identification/cross.png b/perception/mil_vision/templates/underwater_shape_identification/cross.png similarity index 100% rename from .great_merge/perception/navigator_vision/templates/underwater_shape_identification/cross.png rename to perception/mil_vision/templates/underwater_shape_identification/cross.png diff --git a/.great_merge/perception/navigator_vision/templates/underwater_shape_identification/triangle.png b/perception/mil_vision/templates/underwater_shape_identification/triangle.png similarity index 100% rename from .great_merge/perception/navigator_vision/templates/underwater_shape_identification/triangle.png rename to perception/mil_vision/templates/underwater_shape_identification/triangle.png From d1759a3dcf1cdddecf46d10abf847a559bdd35c1 Mon Sep 17 00:00:00 2001 From: David Soto Date: Fri, 17 Mar 2017 20:02:01 -0400 Subject: [PATCH 238/267] GREAT MERGE: remove files that were specific to a robot --- perception/mil_vision/README.md | 1 - perception/mil_vision/calibration.xml | 94 ---- .../config/underwater_shape_detection.yaml | 15 - .../underwater_shape_identification.hpp | 80 --- .../mil_vision/nodes/A_C_test_vision.cpp | 79 --- perception/mil_vision/nodes/pinger_locator.py | 45 -- .../shape_identification/DockShapeVision.cpp | 16 - .../shape_identification/DockShapeVision.h | 17 - .../GrayscaleContour/GrayscaleContour.cpp | 342 ------------ .../GrayscaleContour/GrayscaleContour.h | 80 --- .../shape_identification/ShapeTracker.cpp | 81 --- .../nodes/shape_identification/ShapeTracker.h | 24 - .../shape_identification/TrackedShape.cpp | 84 --- .../nodes/shape_identification/TrackedShape.h | 27 - .../nodes/shape_identification/main.cpp | 141 ----- .../mil_vision/nodes/start_gate_manual.py | 308 ----------- .../nodes/stereo_point_cloud_driver.cpp | 500 ------------------ ...underwater_shape_identification_vision.cpp | 28 - .../nodes/velodyne_pcd_colorizer.cpp | 26 - .../object_classification/stc_train.py | 208 -------- perception/mil_vision/setup.py | 2 +- .../underwater_shape_identification.cpp | 108 ---- .../circle.png | Bin 2590 -> 0 bytes .../underwater_shape_identification/cross.png | Bin 761 -> 0 bytes .../triangle.png | Bin 1961 -> 0 bytes 25 files changed, 1 insertion(+), 2305 deletions(-) delete mode 100644 perception/mil_vision/README.md delete mode 100644 perception/mil_vision/calibration.xml delete mode 100644 perception/mil_vision/config/underwater_shape_detection.yaml delete mode 100644 perception/mil_vision/include/missions/underwater_shape_identification.hpp delete mode 100644 perception/mil_vision/nodes/A_C_test_vision.cpp delete mode 100644 perception/mil_vision/nodes/pinger_locator.py delete mode 100644 perception/mil_vision/nodes/shape_identification/DockShapeVision.cpp delete mode 100644 perception/mil_vision/nodes/shape_identification/DockShapeVision.h delete mode 100644 perception/mil_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp delete mode 100644 perception/mil_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h delete mode 100644 perception/mil_vision/nodes/shape_identification/ShapeTracker.cpp delete mode 100644 perception/mil_vision/nodes/shape_identification/ShapeTracker.h delete mode 100644 perception/mil_vision/nodes/shape_identification/TrackedShape.cpp delete mode 100644 perception/mil_vision/nodes/shape_identification/TrackedShape.h delete mode 100644 perception/mil_vision/nodes/shape_identification/main.cpp delete mode 100755 perception/mil_vision/nodes/start_gate_manual.py delete mode 100644 perception/mil_vision/nodes/stereo_point_cloud_driver.cpp delete mode 100644 perception/mil_vision/nodes/underwater_shape_identification_vision.cpp delete mode 100644 perception/mil_vision/nodes/velodyne_pcd_colorizer.cpp delete mode 100644 perception/mil_vision/object_classification/stc_train.py delete mode 100644 perception/mil_vision/src/missions/underwater_shape_identification.cpp delete mode 100644 perception/mil_vision/templates/underwater_shape_identification/circle.png delete mode 100644 perception/mil_vision/templates/underwater_shape_identification/cross.png delete mode 100644 perception/mil_vision/templates/underwater_shape_identification/triangle.png diff --git a/perception/mil_vision/README.md b/perception/mil_vision/README.md deleted file mode 100644 index 01d81d7c..00000000 --- a/perception/mil_vision/README.md +++ /dev/null @@ -1 +0,0 @@ -rosrun navigator_perception scan_the_code diff --git a/perception/mil_vision/calibration.xml b/perception/mil_vision/calibration.xml deleted file mode 100644 index fe6a79f6..00000000 --- a/perception/mil_vision/calibration.xml +++ /dev/null @@ -1,94 +0,0 @@ - - - - - 3 - 3 -

d
- - 702.1922 0. 476.5935 - 0. 699.2620 298.8085 - 0. 0. 1. - - - - 1 - 5 -
d
- - -0.1701 0.1220 -8.5339e-04 -0.0012 -0.0304 - -
- - 3 - 3 -
d
- - 704.5182 0 476.1918 - 0 701.7624 300.8320 - 0 0 1.0000 - -
- - 1 - 5 -
d
- - -0.1770 0.1433 -3.1992e-04 -0.0014 -0.0434 - -
- - 3 - 3 -
d
- - 1 0 0 - 0 1 0 - 0 0 1 -
- - 3 - 3 -
d
- - 1 0 0 - 0 1 0 - 0 0 1 - -
- - 3 - 4 -
d
- - 702.1922 0 476.5935 -140.4384 - 0 699.2620 298.8085 0 - 0 0 1.0000 0 - -
- - 3 - 4 -
d
- - 704.5182 0 476.1918 0 - 0 701.7624 300.8320 0 - 0 0 1.0000 0 - -
- - 4 - 4 -
d
- - - 1. 0. 0. -3.3994384384155273e+02 0. 1. 0. -2.2540025138854980e+02 0. - 0. 0. 5.4581318104785521e+02 0. 0. -9.1620188198129178e+00 0.
- - 2 - 1 -
i
- - 960 600
- diff --git a/perception/mil_vision/config/underwater_shape_detection.yaml b/perception/mil_vision/config/underwater_shape_detection.yaml deleted file mode 100644 index 570ffd03..00000000 --- a/perception/mil_vision/config/underwater_shape_detection.yaml +++ /dev/null @@ -1,15 +0,0 @@ -# Underwater Shape Identification Challenge Parameters - -template_directory : "templates/underwater_shape_identification/" -depth : 5.0 -depth_uncertainty : 1.0 -shape_area : 1.0 -min_detections : 10 -min_boat_displacement : 0.1 -is_target : {"circle":true, "cross":true, "triangle":true} -detection_thresholds : {"circle":0.5, "cross":0.5, "triangle":0.5} - -camera_topic : "/down/image_raw" -buffer_size : 5 - -# calibration_images : "calib/underwater_shape_identification/" diff --git a/perception/mil_vision/include/missions/underwater_shape_identification.hpp b/perception/mil_vision/include/missions/underwater_shape_identification.hpp deleted file mode 100644 index b288fa17..00000000 --- a/perception/mil_vision/include/missions/underwater_shape_identification.hpp +++ /dev/null @@ -1,80 +0,0 @@ -#pragma once - -#include -#include -#include -#include -#include -#include - -#include - -#include -#include -#include - -#include -#include - -#include -#include -#include - -namespace nav -{ - -class ShapeDetection; - -class Shape -{ - std::string _name; - std::string _template_path; - float _pixels_per_meter; - float _radial_symmetry_angle; - cv::Mat _template; - bool _ok = false; -public: - static std::vector loadShapes(std::string directory, float shape_area); - void load(std::string path, float shape_area); - std::string name() const { return _name; } - std::string path() const { return _template_path; } - float scale() const { return _pixels_per_meter; } - float radialSymmetryAngle() const { return _radial_symmetry_angle; } - cv::Mat const * const image() const { return &_template; } - std::unique_ptr detect(float threshold, bool try_rotation=false, int count=10) const; - bool ok() const { return _ok; } -}; - -struct ShapeDetection -{ - Shape* shape; - ros::Time time; - cv::Point2f location; - float depth; - float confidence; - std::shared_ptr image; - cv::RotatedRect roi; -}; - -class UnderwaterShapeDetector -{ - ros::NodeHandle _nh; - nav::ROSCameraStream _camera; - std::string _ns; - std::string _template_dir; - float _shape_area; - float _search_depth; - float _depth_uncertainty; - std::map _shapes; - std::map _is_target; - std::map _detection_thresholds; - std::map> _detections; - int _min_detections; - float _min_boat_displacement; -public: - UnderwaterShapeDetector(ros::NodeHandle &nh, int img_buf_size, std::string name_space); - bool searchFor(std::vector target_shapes); - void calibrateThresholds(); -}; - -} // namespace nav diff --git a/perception/mil_vision/nodes/A_C_test_vision.cpp b/perception/mil_vision/nodes/A_C_test_vision.cpp deleted file mode 100644 index a93afdab..00000000 --- a/perception/mil_vision/nodes/A_C_test_vision.cpp +++ /dev/null @@ -1,79 +0,0 @@ -#include -#include -#include -#include - -using namespace std; -using namespace cv; - -bool img_stuff(Mat& img_color, int* lo, int* hi, int* kernel_size) -{ - cout << "Image Processing" << endl; - vector hsv_split; - Mat hue; - cvtColor(img_color, hue, CV_BGR2HSV); - split(hue, hsv_split); - Mat blob_mask; - Mat mask; - inRange(hsv_split[0], Scalar(*lo), Scalar(*hi), blob_mask); - int morph_elem = MORPH_ELLIPSE; - int morph_size = *kernel_size; - Mat element = getStructuringElement( morph_elem, Size( 2*morph_size + 1, 2*morph_size+1 ), Point( morph_size, morph_size ) ); - erode(blob_mask, mask, element); - dilate(mask, mask, element); - imshow("mask", mask); - vector> contours; - findContours(mask, contours, CV_RETR_LIST, CV_CHAIN_APPROX_NONE); - Mat curve_img = Mat::zeros(mask.size(), CV_8U); - Scalar color{255}; - nav::tools::sortContours(contours, false); - if(contours.size() != 0) - { - drawContours(curve_img, contours, 0, color); - cout << "Curve size: " << contours[0].size() << endl; - for(auto& curve : contours) - { - cout << "Curve of size " << curve.size() << " is " << (nav::ClosedCurve::validateCurve(curve)? "valid" : "invalid") << endl; - } - } - imshow("curve", curve_img); - return true; -} - - -int main(int argc, char** argv) -{ - cout << "This is Active Contours Test" << endl; - nav::Perturbations::initPerturbationCache(); - - int low_slider {0}; - int hi_slider {8}; - int low_max {255}; - int hi_max {255}; - int kernel_size {5}; - int k_sz_max {255}; - string win_name("Thresh"); - namedWindow(win_name); - createTrackbar("hue_low", win_name, &low_slider, low_max); - createTrackbar("hue_hi", win_name, &hi_slider, hi_max); - createTrackbar("kernel_size", win_name, &kernel_size, k_sz_max); - VideoCapture cap; - if(!cap.open(0)) - return 0; - for(;;) - { - Mat frame; - cap >> frame; - if(frame.empty()) - break; - resize(frame, frame, Size(0,0), 0.5, 0.5); - imshow("Thresh", frame); - uint8_t wk = waitKey(1); - cout << int(wk) << endl; - img_stuff(frame, &low_slider, &hi_slider, &kernel_size); - if(wk == 27) - break; - } - cout << "I finished!" << nav::Perturbations::perturbation_cache.size() << endl; - return 0; -} diff --git a/perception/mil_vision/nodes/pinger_locator.py b/perception/mil_vision/nodes/pinger_locator.py deleted file mode 100644 index a6be7103..00000000 --- a/perception/mil_vision/nodes/pinger_locator.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python -import txros -import numpy as np -from hydrophones.msg import ProcessedPing -from geometry_msgs.msg import Point, PoseStamped, Pose -from navigator_alarm import single_alarm, AlarmListener, AlarmBroadcaster -from twisted.internet import defer -import navigator_tools -import tf -from tf import transformations as trans - -ping_sub = yield navigator.nh.subscribe("/hydrophones/processed", ProcessedPing) -yield ping_sub.get_next_message() -target_freq = 35000 -while True: - processed_ping = yield ping_sub.get_next_message() - print processed_ping - if isinstance(processed_ping, ProcessedPing): - print "Got processed ping message:\n{}".format(processed_ping) - if processed_ping.freq > 35000 and processed_ping.freq < 36000: - freq_dev = abs(target_freq - processed_ping.freq) - print "Trustworthy pinger heading" - hydrophones_enu_p, hydrophones_enu_q = tf.lookupTransform("/hydrophones", "/enu", processed_ping.header.stamp) - pinger_enu_p = navigator_tools.point_to_numpy(tf.transformPoint()) - dir_ = navigator_tools.point_to_numpy(processed_ping.position) - mv_mag = 2 - mv_hyd_frame = dir_ / np.linalg.norm(dir_) - pinger_move = navigator.move.set_position(navigator_tools.point_to_numpy(processed_ping.position)).go() - - print "Heading towards pinger" - else: - print "Untrustworthy pinger heading. Freq = {} kHZ".format(processed_ping.freq) - else: - print "Expected ProcessedPing, got {}".format(type(processed_ping)) - -# Hydrophone locate mission -@txros.util.cancellableInlineCallbacks -def main(navigator): - kill_alarm_broadcaster, kill_alarm = single_alarm('kill', action_required=True, problem_description="Killing wamv to listen to pinger") - df = defer.Deferred().addCallback(head_for_pinger) - df.callback(navigator) - try: - yield txros.util.wrap_timeout(df, 15, cancel_df_on_timeout=True) - except txros.util.TimeoutError: - print "Done heading towards pinger" \ No newline at end of file diff --git a/perception/mil_vision/nodes/shape_identification/DockShapeVision.cpp b/perception/mil_vision/nodes/shape_identification/DockShapeVision.cpp deleted file mode 100644 index ded453a2..00000000 --- a/perception/mil_vision/nodes/shape_identification/DockShapeVision.cpp +++ /dev/null @@ -1,16 +0,0 @@ -#include "DockShapeVision.h" -using namespace cv; -int DockShapeVision::fontFace = CV_FONT_HERSHEY_SIMPLEX; -double DockShapeVision::fontScale = .6; -DockShapeVision::DockShapeVision(ros::NodeHandle& nh) : nh(nh) {} -void DockShapeVision::DrawShapes(cv::Mat& frame, navigator_msgs::DockShapes& symbols) -{ - for (auto symbol : symbols.list) { - Scalar color(0,0,255); - if (symbol.Color == navigator_msgs::DockShape::RED) - color = Scalar(0,255,0); - putText(frame, symbol.Shape + " (" + symbol.Color + ")", - Point(symbol.CenterX, symbol.CenterY), fontFace, fontScale, - Scalar(0,0,255), 1,CV_AA); - } -} diff --git a/perception/mil_vision/nodes/shape_identification/DockShapeVision.h b/perception/mil_vision/nodes/shape_identification/DockShapeVision.h deleted file mode 100644 index af82d294..00000000 --- a/perception/mil_vision/nodes/shape_identification/DockShapeVision.h +++ /dev/null @@ -1,17 +0,0 @@ -#pragma once -#include -#include -#include -#include "opencv2/opencv.hpp" -class DockShapeVision { - protected: - ros::NodeHandle& nh; - DockShapeVision(ros::NodeHandle& nh); - - public: - virtual void GetShapes(cv::Mat& frame, navigator_msgs::DockShapes& symbols) = 0; - static void DrawShapes(cv::Mat& frame, navigator_msgs::DockShapes& symbols); - static int fontFace; - static double fontScale; - virtual void init() = 0; -}; diff --git a/perception/mil_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp b/perception/mil_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp deleted file mode 100644 index bdaee629..00000000 --- a/perception/mil_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp +++ /dev/null @@ -1,342 +0,0 @@ -#include "GrayscaleContour.h" -#include -GrayscaleContour::GrayscaleContour(ros::NodeHandle& nh) : DockShapeVision(nh) { - // Set ros params - nh.param("grayscale/canny/thresh1", cannyParams.thresh1, 60); - nh.param("grayscale/canny/thresh2", cannyParams.thresh2, 100); - nh.param("grayscale/blur_size", blur_size, 7); - nh.param("grayscale/min_contour_area", minArea, - 100.0 / (644.0 * 482.0)); - nh.param("grayscale/triangle/epsilon", triangleEpsilon, 0.1); - nh.param("grayscale/cross/epsilon", crossEpsilon, 0.03); - nh.param("grayscale/triangle/rect_error_threshold", - triRectErrorThreshold, 0.1); - nh.param("grayscale/triangle/side_error_threshold", - triSideErrorThreshold, 0.1); - nh.param("grayscale/triangle/angle_mean_error_threshold", - triAngleMeanErrorThreshold, 5); - nh.param("grayscale/triangle/angle_var_error_threshold", - triAngleVarErrorThreshold, 10); - nh.param("grayscale/cross/rect_error_threshold", - crossRectErrorThreshold, 0.2); - nh.param("grayscale/cross/side_error_threshold", - crossSideErrorThreshold, 0.2); - nh.param("grayscale/cross/angle_mean_error_threshold", - crossAngleMeanErrorThreshold, 5); - nh.param("grayscale/cross/angle_var_error_threshold", - crossAngleVarErrorThreshold, 5); - nh.param("grayscale/circle/enclosing_circle_error_threshold", - circleEnclosingErrorThreshold, 0.2); - nh.param("grayscale/red_hue_min",redHueMin,0); - nh.param("grayscale/red_hue_max",redHueMax,255); - nh.param("grayscale/blue_hue",blueHue,120); - nh.param("grayscale/green_hue",greenHue,60); - -#ifdef DO_DEBUG - namedWindow("Menu", CV_WINDOW_AUTOSIZE); - namedWindow("Result", CV_WINDOW_AUTOSIZE); - namedWindow("Grayscale", CV_WINDOW_AUTOSIZE); - namedWindow("Contours", CV_WINDOW_AUTOSIZE); - namedWindow("Edges", CV_WINDOW_AUTOSIZE); -#endif -#ifdef DO_ROS_DEBUG - image_transport.reset(new image_transport::ImageTransport(nh)); - color_debug_publisher = - image_transport->advertise("debug_color", 1); - contour_debug_publisher = - image_transport->advertise("debug_contours", 1); -#endif -} -void GrayscaleContour::init() { -#ifdef DO_DEBUG - createTrackbar("thresh1", "Menu", &cannyParams.thresh1, 500); - createTrackbar("thresh2", "Menu", &cannyParams.thresh2, 255); -#endif -} -void GrayscaleContour::GetShapes(cv::Mat& frame, navigator_msgs::DockShapes& symbols) { - if (frame.empty()) return; - - frame_width = frame.cols; - frame_height = frame.rows; - - colorFrame = frame; - Mat temp = colorFrame.clone(); - bilateralFilter(temp, colorFrame, blur_size, blur_size * 2, blur_size / 2); - ConvertToGrayscale(); - DetectEdges(); - FindContours(); - -#ifdef DO_DEBUG - Mat result = colorFrame.clone(); -#endif -#ifdef DO_ROS_DEBUG - cv_bridge::CvImage ros_color_debug; - ros_color_debug.encoding = "bgr8"; - ros_color_debug.image = colorFrame.clone(); -#endif - - for (size_t i = 0; i < contours.size(); i++) { - navigator_msgs::DockShape dockShape; - std::vector approx_tri; - std::vector approx_cross; - approxPolyDP(Mat(contours[i]), approx_tri, - arcLength(contours[i], true) * triangleEpsilon, true); - approxPolyDP(Mat(contours[i]), approx_cross, - arcLength(contours[i], true) * crossEpsilon, true); - if (isTriangle(approx_tri)) { - dockShape.Shape = navigator_msgs::DockShape::TRIANGLE; - setShapePoints(dockShape, approx_tri); - } else if (isCross(approx_cross)) { - dockShape.Shape = navigator_msgs::DockShape::CROSS; - setShapePoints(dockShape, approx_cross); - } else if (isCircle(contours[i])) { - dockShape.Shape = navigator_msgs::DockShape::CIRCLE; - setShapePoints(dockShape, contours[i]); - } else - continue; - - if (!GetColor(i, dockShape.Color,dockShape.color_confidence)) continue; - -#ifdef DO_DEBUG - drawContours(result, contours, i, Scalar(0, 0, 255)); -#endif -#ifdef DO_ROS_DEBUG - drawContours(ros_color_debug.image, contours, i, Scalar(0, 0, 255)); -#endif - - symbols.list.push_back(dockShape); - } - -#ifdef DO_DEBUG - DrawShapes(result,symbols); - imshow("Result", result); - imshow("Grayscale", grayscaleFrame); - imshow("Edges", edgesFrame); - imshow("Contours", contoursFrame); - waitKey(5); -#endif -#ifdef DO_ROS_DEBUG - DrawShapes(ros_color_debug.image,symbols); - color_debug_publisher.publish(ros_color_debug.toImageMsg()); - cv_bridge::CvImage ros_contours_debug; - ros_color_debug.encoding = "mono8"; - ros_color_debug.image = contoursFrame.clone(); - contour_debug_publisher.publish(ros_color_debug.toImageMsg()); -#endif -} -void GrayscaleContour::ConvertToGrayscale() { - cvtColor(colorFrame, grayscaleFrame, CV_BGR2GRAY); -} -void GrayscaleContour::DetectEdges() { - Canny(grayscaleFrame, edgesFrame, cannyParams.thresh1, cannyParams.thresh2); -} -void GrayscaleContour::FindContours() { - contours.clear(); - hierarchy.clear(); - findContours(edgesFrame, contours, hierarchy, CV_RETR_TREE, - CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); - - // Filter out very small contours - auto cit = contours.begin(); - auto hit = hierarchy.begin(); - while (cit != contours.end()) { - if (filterArea(*cit)) { - cit = contours.erase(cit); - hit = hierarchy.erase(hit); - } else { - cit++; - hit++; - } - } - -#if defined(DO_DEBUG) || defined(DO_ROS_DEBUG) - contoursFrame = Mat(edgesFrame.size(), edgesFrame.type(), Scalar(0, 0, 0)); - for (size_t i = 0; i < contours.size(); i++) { - drawContours(contoursFrame, contours, i, Scalar(255, 255, 255)); - } -#endif -} - -bool GrayscaleContour::GetColor(int Index, std::string& color, float& confidence) { - Mat mask = Mat::zeros(colorFrame.rows, colorFrame.cols, CV_8UC1); - drawContours(mask, contours, Index, Scalar(255), CV_FILLED); - Mat meanBGR(1,1,colorFrame.type(), mean(colorFrame, mask)); - Mat mean_hsv_mat(1,1,colorFrame.type(), Scalar(0,0,0)); - cvtColor(meanBGR,mean_hsv_mat,CV_BGR2HSV,3); - Vec3b meanColor = mean_hsv_mat.at(0,0); - double hue = meanColor[0]; - double redness = 1.0 / ( fmin(redHueMax-hue,hue-redHueMin) + 1.0); - double blueness = 1.0 / ( fabs(blueHue-hue) + 1.0); - double greeness = 1.0 / ( fabs(greenHue-hue) + 1.0); - double max = 0; - if (blueness > max) { - max = blueness; - color = navigator_msgs::DockShape::BLUE; - confidence = blueness; - } - if (greeness > max) { - max = greeness; - color = navigator_msgs::DockShape::GREEN; - confidence = greeness; - } - if (redness > max) { - max = redness; - color = navigator_msgs::DockShape::RED; - confidence = redness; - } - // ~printf("%s Confidence: %f Colors: H=%d S=%d V=%d \n",color.c_str(),confidence,meanColor.val[0],meanColor.val[1],meanColor.val[2]); - return true; -} -bool GrayscaleContour::isTriangle(std::vector& points) { - // If contour is not 3 sided, is not a triangle - if (points.size() != 3) return false; - - double contour_area = contourArea(points); - double perimeter = arcLength(points, true); - double expected_area, expected_perimeter, error; - - // Draw bounding rect, find area of triangle with this height/width, compare - // to real area - cv::Rect boundingRect = cv::boundingRect(points); - expected_area = 0.5 * boundingRect.width * boundingRect.height; - error = fabs(contour_area / expected_area - 1.0); - if (error > triRectErrorThreshold) return false; - - // Find side length based on bounding rect width, compare to real perimeter of - // contour - expected_perimeter = boundingRect.width * 3; - error = fabs(perimeter / expected_perimeter - 1.0); - if (error > triSideErrorThreshold) return false; - - // Find angles (in degrees) of contour, compare to expected of 60 degrees with - // low variance - std::vector angles; - findAngles(points, angles); - using namespace boost::accumulators; - accumulator_set> acc; - for (double angle : angles) acc(angle); - auto mean = boost::accumulators::mean(acc); - auto variance = sqrt(boost::accumulators::variance(acc)); - if (fabs(mean - 60.0) > triAngleMeanErrorThreshold) return false; - if (variance > triAngleVarErrorThreshold) return false; - - return true; -} -bool GrayscaleContour::isCross(std::vector& points) { - // If polygon is not 12 sided, is not a cross - if (points.size() != 12) return false; - - double contour_area = contourArea(points); - double perimeter = arcLength(points, true); - double expected_area, error; - - // Find length of 1 side using perimter/12, then compare area of contour with - // this side length to real area - double side_length = perimeter / 12.0; - expected_area = 5.0 * pow(side_length, 2); - error = fabs(contour_area / expected_area - 1.0); - if (error > crossRectErrorThreshold) return false; - - // Draw bounding rect around contour, determine what area should be of cross - // with this width, compare to real area - Rect rect = boundingRect(points); - expected_area = 5 * pow(rect.width / 3.0, 2); - error = fabs(contour_area / expected_area - 1); - if (error > crossSideErrorThreshold) return false; - - // Find all angles in contour, then check that the mean angle is around 40 - // degrees and variance is low - std::vector angles; - findAngles(points, angles); - using namespace boost::accumulators; - accumulator_set> acc; - for (double angle : angles) acc(angle); - auto mean = boost::accumulators::mean(acc); - auto variance = sqrt(boost::accumulators::variance(acc)); - // ~printf("CROSS SIDES=%d MEAN=%f VAR=%f\n",points.size(),mean,variance); - if (fabs(mean - 40.0) > crossAngleMeanErrorThreshold) return false; - if (variance > crossAngleVarErrorThreshold) return false; - - return true; -} - -const double pi = 3.1415926; -bool GrayscaleContour::isCircle(std::vector& points) { - if (points.size() < 5) return false; - double contour_area = contourArea(points); - // ~double perimeter = arcLength(points, true); - double expected_area, error; - - // Find a min enclosing circle for contour, then compare enclosing cirlcle - // area to real area - Point2f center; - float radius; - minEnclosingCircle(points, center, radius); - expected_area = pi * pow(radius, 2); - error = fabs(contour_area / expected_area - 1); - if (error > circleEnclosingErrorThreshold) return false; - - // ~doub - // ~Rect rect = boundingRect(points); - // ~double contour_area = contourArea(points); //Actual area of the contour - // ~double expected_area = pi*pow(rect.width/2.0,2); //What area should be if - // contour is a circle - // ~double error = fabs(contour_area/expected_area-1); - // ~if (error > 0.2) return false; - // ~ - // ~double perimeter = arcLength(points, true); - // ~double radius = perimeter/(2.0*pi); - // ~expected_area = pi*pow(radius,2); - // ~error = fabs(contour_area/expected_area-1.0); - // ~if (error > 0.2) return false; - - // double center,radius; - // minEnclosingCircle( points, center,radius); - - return true; -} -bool GrayscaleContour::filterArea(std::vector contour) { - return contourArea(contour) < (minArea * (frame_width * frame_height)); -} -Point GrayscaleContour::findCenter(std::vector& points) { - double x = 0, y = 0; - for (size_t i = 0; i < points.size(); i++) { - x += points[i].x; - y += points[i].y; - } - x /= points.size(); - y /= points.size(); - return Point(x, y); -} -double GrayscaleContour::findAngle(cv::Point& p1, cv::Point& p2, - cv::Point& p3) { - cv::Point v1 = p2 - p1; - cv::Point v2 = p3 - p1; - float m1 = sqrt(v1.x * v1.x + v1.y * v1.y); - float m2 = sqrt(v2.x * v2.x + v2.y * v2.y); - float thecos = v1.dot(v2) / (m1 * m2); - - return acos(thecos) * 180 / 3.1415; -} -void GrayscaleContour::findAngles(std::vector& points, - std::vector& angles) { - for (size_t i = 0; i < points.size() - 2; i++) - angles.push_back(findAngle(points[i], points[i + 1], points[i + 2])); - angles.push_back(findAngle(points[points.size() - 2], - points[points.size() - 1], points[0])); - angles.push_back(findAngle(points[points.size() - 1], points[0], points[1])); -} -void GrayscaleContour::setShapePoints(navigator_msgs::DockShape& dockShape, - std::vector& points) { - Point center = findCenter(points); - dockShape.CenterX = center.x; - dockShape.CenterY = center.y; - dockShape.img_width = colorFrame.cols; - for (size_t j = 0; j < points.size(); j++) { - geometry_msgs::Point p; - p.x = points[j].x; - p.y = points[j].y; - p.z = 0; - dockShape.points.push_back(p); - } -} diff --git a/perception/mil_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h b/perception/mil_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h deleted file mode 100644 index 3fcef197..00000000 --- a/perception/mil_vision/nodes/shape_identification/GrayscaleContour/GrayscaleContour.h +++ /dev/null @@ -1,80 +0,0 @@ -#pragma once -#include -#include -#include -#include -#include "../DockShapeVision.h" -#include "opencv2/opencv.hpp" -using namespace cv; - -//#define DO_DEBUG -#define DO_ROS_DEBUG -#ifdef DO_ROS_DEBUG -#include -#include -#include -#endif -class GrayscaleContour : public DockShapeVision { - private: - Mat colorFrame, grayscaleFrame, edgesFrame; - std::vector > contours; - std::vector hierarchy; - std::vector > shapes; - Rect roi; - - void ConvertToGrayscale(); - void DetectEdges(); - void FindContours(); - bool GetColor(int shapeIndex, std::string& color, float& confidence); - Point findCenter(std::vector& points); - Mat contoursFrame; - int frame_height; - int frame_width; - - bool filterArea(std::vector contour); - - bool isTriangle(std::vector& points); - bool isCross(std::vector& points); - bool isCircle(std::vector& points); - - void setShapePoints(navigator_msgs::DockShape& dockShape, - std::vector& points); - - static double findAngle(cv::Point& p1, cv::Point& p2, cv::Point& p3); - static void findAngles(std::vector& points, - std::vector& angles); -#ifdef DO_ROS_DEBUG - std::unique_ptr image_transport; - image_transport::Publisher color_debug_publisher; - image_transport::Publisher contour_debug_publisher; -#endif - - // Constants to use ros params for - struct CannyParams { - int thresh1; - int thresh2; - }; - CannyParams cannyParams; - double minArea; - int blur_size; - double triangleEpsilon; - double crossEpsilon; - double triRectErrorThreshold; - double triSideErrorThreshold; - double triAngleMeanErrorThreshold; - double triAngleVarErrorThreshold; - double crossRectErrorThreshold; - double crossSideErrorThreshold; - double crossAngleMeanErrorThreshold; - double crossAngleVarErrorThreshold; - double circleEnclosingErrorThreshold; - - double redHueMin; - double redHueMax; - double blueHue; - double greenHue; - public: - GrayscaleContour(ros::NodeHandle& nh); - void GetShapes(cv::Mat& frame, navigator_msgs::DockShapes& symbols); - void init(); -}; diff --git a/perception/mil_vision/nodes/shape_identification/ShapeTracker.cpp b/perception/mil_vision/nodes/shape_identification/ShapeTracker.cpp deleted file mode 100644 index 2f18cac1..00000000 --- a/perception/mil_vision/nodes/shape_identification/ShapeTracker.cpp +++ /dev/null @@ -1,81 +0,0 @@ -#include "ShapeTracker.h" -ShapeTracker::ShapeTracker() : active(false) -{ - -} -void ShapeTracker::setActive(bool a) -{ - active = a; -} -void ShapeTracker::init(ros::NodeHandle& nh) -{ - std::string get_shapes_topic; - nh.param("get_shapes_topic",get_shapes_topic,"get_shapes"); - getShapesService = nh.advertiseService(get_shapes_topic, &ShapeTracker::getShapesCallback, this); - allFoundShapesPublish = nh.advertise("filtered_shapes", 10); -} -void ShapeTracker::addShape(navigator_msgs::DockShape& s) -{ - for (auto &tracked : shapes) - { - if (tracked.update(s)) return; - } - // ~printf("Inserting Shape \n"); - shapes.push_back(TrackedShape(s)); -} -void ShapeTracker::addShapes(navigator_msgs::DockShapes& newShapes) -{ - // ~printf("Adding %d shapes current size = %d\n",newShapes.list.size(),shapes.size()); - for (auto &shape : newShapes.list) - { - addShape(shape); - } - shapes.erase(std::remove_if(shapes.begin(), shapes.end(), [] (TrackedShape& s) { - // if (s.isStale()) printf("Shape is stale, removing\n"); - return s.isStale(); - }),shapes.end()); - // ~printf("Shapes size %d\n",shapes.size()); - navigator_msgs::DockShapes found_shapes; - for (auto &tracked : shapes) - { - if (tracked.isReady() ) - { - found_shapes.list.push_back(tracked.get()); - } - } - allFoundShapesPublish.publish(found_shapes); -} -bool ShapeTracker::getShapesCallback(navigator_msgs::GetDockShapes::Request &req, - navigator_msgs::GetDockShapes::Response &res) { - if (!active) { - res.found = false; - res.error = navigator_msgs::GetDockShapes::Response::NODE_DISABLED; - return true; - } - if (!validRequest(req.Color,req.Shape)) { - res.found = false; - res.error = navigator_msgs::GetDockShapes::Response::INVALID_REQUEST; - return true; - } - for (auto &tracked : shapes) - { - if (tracked.sameType(req.Color,req.Shape) && tracked.isReady() ) - { - res.shapes.list.push_back(tracked.get()); - } - } - if (res.shapes.list.size() > 0) res.found = true; - else res.error = navigator_msgs::GetDockShapes::Response::SHAPE_NOT_FOUND; - return true; -} -bool ShapeTracker::validRequest(std::string& color, std::string& shape) -{ -return (color == navigator_msgs::GetDockShape::Request::BLUE || - color == navigator_msgs::GetDockShape::Request::GREEN || - color == navigator_msgs::GetDockShape::Request::RED || - color == navigator_msgs::GetDockShape::Request::ANY) && - (shape == navigator_msgs::GetDockShape::Request::CIRCLE || - shape == navigator_msgs::GetDockShape::Request::CROSS || - shape == navigator_msgs::GetDockShape::Request::TRIANGLE || - shape == navigator_msgs::GetDockShape::Request::ANY); -} diff --git a/perception/mil_vision/nodes/shape_identification/ShapeTracker.h b/perception/mil_vision/nodes/shape_identification/ShapeTracker.h deleted file mode 100644 index 3126705d..00000000 --- a/perception/mil_vision/nodes/shape_identification/ShapeTracker.h +++ /dev/null @@ -1,24 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include "TrackedShape.h" - -class ShapeTracker -{ - private: - ros::ServiceServer getShapesService; - ros::Publisher allFoundShapesPublish; - std::vector shapes; - bool active; - static bool validRequest(std::string& color, std::string& shape); - public: - ShapeTracker(); - void setActive(bool a); - bool getShapesCallback(navigator_msgs::GetDockShapes::Request &req,navigator_msgs::GetDockShapes::Response &res); - void init(ros::NodeHandle& nh); - void addShape(navigator_msgs::DockShape& s); - void addShapes(navigator_msgs::DockShapes& newShapes); -}; diff --git a/perception/mil_vision/nodes/shape_identification/TrackedShape.cpp b/perception/mil_vision/nodes/shape_identification/TrackedShape.cpp deleted file mode 100644 index 12c52847..00000000 --- a/perception/mil_vision/nodes/shape_identification/TrackedShape.cpp +++ /dev/null @@ -1,84 +0,0 @@ -#include "TrackedShape.h" -using namespace std; -int TrackedShape::MIN_COUNT = 20; -double TrackedShape::MAX_DISTANCE = 5; -ros::Duration TrackedShape::MAX_TIME_GAP = ros::Duration(0, 0.5 * 1E9); -TrackedShape::TrackedShape() : count(0) -{ - -} -TrackedShape::TrackedShape(navigator_msgs::DockShape& s) : - count(1), - latest(s) -{ - -} -void TrackedShape::init(ros::NodeHandle& nh) -{ - nh.param("tracking/min_count",MIN_COUNT,10); - nh.param("tracking/max_distance_gap",MAX_DISTANCE,15); - double seconds; - nh.param("tracking/max_seen_gap_seconds", seconds, 0.5); - double partial_secs = fmod(seconds, 1); - seconds -= partial_secs; - MAX_TIME_GAP = ros::Duration(seconds, partial_secs * 1E9); -} -double TrackedShape::centerDistance(navigator_msgs::DockShape& a, navigator_msgs::DockShape& b) -{ - // ~printf("A = (%d,%d) B= (%d,%d)\n",a.CenterX,a.CenterY,b.CenterX,b.CenterY); - return sqrtf( powf(double(a.CenterX)-double(b.CenterX), 2) + powf(double(a.CenterY)-double(b.CenterY),2) ); -} -bool TrackedShape::update(navigator_msgs::DockShape& s) -{ - double distance = centerDistance(latest,s); - if (distance > MAX_DISTANCE) - { - // printf(" %s %s distance too big to %s %s DISTANCE = %f \n",latest.Color.c_str(),latest.Shape.c_str(),s.Color.c_str(),s.Shape.c_str(),distance); - return false; - } - if (tooOld(s)) - { - // printf(" %s %s too old from %s %s\n",latest.Color.c_str(),latest.Shape.c_str(),s.Color.c_str(),s.Shape.c_str()); - return false; - } - if (latest.Color != s.Color) - { - //What to do if color is different with two nearby - if (latest.color_confidence > s.color_confidence) - { - s.Color = latest.Color; - s.color_confidence = latest.color_confidence; - } - } - // ~if (latest.Shape != s.Shape) - // ~{ - // ~if (s.shapeConfidence) - // ~} - latest = s; - count++; - // ~printf("Updating %s %s with %s %s COUNT=%d Distance=%f \n",latest.Color.c_str(),latest.Shape.c_str(),s.Color.c_str(),s.Shape.c_str(),count,distance); - return true; -} -bool TrackedShape::tooOld(navigator_msgs::DockShape& s) -{ - return s.header.stamp - latest.header.stamp > MAX_TIME_GAP; -} -bool TrackedShape::isStale() -{ - ros::Time now = ros::Time::now(); - // ~std::cout << "Time Diff: " << (now - latest.header.stamp).toSec() << std::endl; - return (now - latest.header.stamp) > MAX_TIME_GAP; -} -bool TrackedShape::isReady() -{ - // printf("Count=%d, TooOld=%d\n", count, isStale()); - return count >= MIN_COUNT && !isStale(); -} -navigator_msgs::DockShape TrackedShape::get() -{ - return latest; -} -bool TrackedShape::sameType(std::string& color, std::string& shape) -{ - return (color == navigator_msgs::GetDockShape::Request::ANY || color == latest.Color ) && (shape == navigator_msgs::GetDockShape::Request::ANY || shape == latest.Shape); -} diff --git a/perception/mil_vision/nodes/shape_identification/TrackedShape.h b/perception/mil_vision/nodes/shape_identification/TrackedShape.h deleted file mode 100644 index 1cc25272..00000000 --- a/perception/mil_vision/nodes/shape_identification/TrackedShape.h +++ /dev/null @@ -1,27 +0,0 @@ -#include -#include -#include -#include -#include -#include - -class TrackedShape -{ - private: - int count; - static int MIN_COUNT; - static double MAX_DISTANCE; - static ros::Duration MAX_TIME_GAP; - navigator_msgs::DockShape latest; - static double centerDistance(navigator_msgs::DockShape& a, navigator_msgs::DockShape& b); - bool tooOld(navigator_msgs::DockShape& s); - public: - TrackedShape(); - TrackedShape(navigator_msgs::DockShape& s); - bool sameType(std::string& color, std::string & shape); - bool update(navigator_msgs::DockShape& s); - bool isReady(); - bool isStale(); - navigator_msgs::DockShape get(); - static void init(ros::NodeHandle& nh); -}; diff --git a/perception/mil_vision/nodes/shape_identification/main.cpp b/perception/mil_vision/nodes/shape_identification/main.cpp deleted file mode 100644 index f48b75e2..00000000 --- a/perception/mil_vision/nodes/shape_identification/main.cpp +++ /dev/null @@ -1,141 +0,0 @@ -#include "std_msgs/String.h" -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include "GrayscaleContour/GrayscaleContour.h" -#include -#include -#include "DockShapeVision.h" -#include -#include -#include "ShapeTracker.h" - - -using namespace cv; - -class ShooterVision { - private: - ShapeTracker tracker; - std::unique_ptr vision; - navigator_msgs::DockShapes symbols; - ros::NodeHandle nh_; - image_transport::ImageTransport it_; - image_transport::Subscriber image_sub_; - ros::Publisher foundShapesPublisher; - - ros::ServiceServer runService; - ros::ServiceServer roiService; - std::string camera_topic; - bool active; - cv::Rect roi; - cv::Size size; - unsigned int width; - unsigned int height; - - public: - ShooterVision() : nh_(ros::this_node::getName()), it_(nh_) { - nh_.param("auto_start", active, false); - vision.reset(new GrayscaleContour(nh_)); - vision->init(); - nh_.param("symbol_camera", camera_topic, - "/right/right/image_raw"); - std::string get_shapes_topic; - nh_.param("get_shapes_topic",get_shapes_topic,"get_shapes"); - runService = nh_.advertiseService(get_shapes_topic+"/switch", &ShooterVision::runCallback, this); - roiService = nh_.advertiseService("setROI", - &ShooterVision::roiServiceCallback, this); - //#ifdef DO_DEBUG - // DebugWindow::init(); - //#endif - foundShapesPublisher = nh_.advertise( - "found_shapes", 1000); - image_sub_ = it_.subscribe(camera_topic, 1, &ShooterVision::run, this); - - int x_offset, y_offset, width, height; - nh_.param("roi/x_offset", x_offset, 73); - nh_.param("roi/y_offset", y_offset, 103); - nh_.param("roi/width", width, 499); - nh_.param("roi/height", height, 243); - nh_.param("size/height", size.height, 243); - nh_.param("size/width", size.width, 243); - roi = Rect(x_offset, y_offset, width, height); - TrackedShape::init(nh_); - tracker.init(nh_); - } - void fixPoint(geometry_msgs::Point& p) - { - p.x += roi.x; - p.y += roi.y; - p.x *= width/double(size.width); - p.y *= height/double(size.height); - } - bool runCallback(std_srvs::SetBool::Request &req, - std_srvs::SetBool::Response &res) { - active = req.data; - res.success = true; - tracker.setActive(req.data); - return true; - } - bool roiServiceCallback(navigator_msgs::SetROI::Request &req, - navigator_msgs::SetROI::Response &res) { - if (req.roi.x_offset < 0 || req.roi.y_offset < 0 || - req.roi.x_offset + req.roi.width > width || - req.roi.y_offset + req.roi.height > height) { - res.error = "OUTSIDE_OF_FRAME"; - res.success = false; - return true; - } - roi = - Rect(req.roi.x_offset, req.roi.y_offset, req.roi.width, req.roi.height); - res.success = true; - return true; - } - void run(const sensor_msgs::ImageConstPtr &msg) { - if (!active) return; - // Grab ros frame - cv_bridge::CvImagePtr cv_ptr; - try { - cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8); - } catch (cv_bridge::Exception &e) { - ROS_ERROR("cv_bridge exception: %s", e.what()); - return; - } - cv::Mat frame = cv_ptr->image; - width = frame.cols; - height = frame.rows; - symbols.list.clear(); - - cv::Mat resized; - cv::resize(frame,resized,size); - cv::Mat resized_roied = resized(roi); - vision->GetShapes(resized_roied,symbols); - for (navigator_msgs::DockShape& shape : symbols.list) - { - shape.img_width = frame.cols; - geometry_msgs::Point center; - center.x = shape.CenterX; - center.y = shape.CenterY; - fixPoint(center); - shape.CenterX = center.x; - shape.CenterY = center.y; - for (geometry_msgs::Point& p: shape.points) - fixPoint(p); - shape.header = msg->header; - } - foundShapesPublisher.publish(symbols); - tracker.addShapes(symbols); - } -}; - -int main(int argc, char **argv) { - ros::init(argc, argv,"shape_identificaiton"); - ShooterVision sv = ShooterVision(); - ros::spin(); - return 0; -} diff --git a/perception/mil_vision/nodes/start_gate_manual.py b/perception/mil_vision/nodes/start_gate_manual.py deleted file mode 100755 index 7da4b9af..00000000 --- a/perception/mil_vision/nodes/start_gate_manual.py +++ /dev/null @@ -1,308 +0,0 @@ -#!/usr/bin/python -from __future__ import division - -import rospy -import cv2 -import numpy as np -import time -import tf - -from sensor_msgs.msg import Image -from geometry_msgs.msg import PointStamped, PoseStamped -from navigator_msgs.srv import StartGate, StartGateResponse -import navigator_tools -import image_geometry -from scipy.optimize import minimize - -class ImageGetter(object): - def __init__(self, topic_name): - self.sub = navigator_tools.Image_Subscriber(topic_name, self.get_image) - - print 'getting topic', topic_name - self.frame = None - self.done = False - - self.camera_info = self.sub.wait_for_camera_info() - - def get_image(self, msg): - self.frame = msg - self.done = True - - -class BuoySelector(object): - ''' - Allows user to manually click the buoys - ''' - def __init__(self, name, img, scale_factor=1, color=(10, 75, 250), draw_radius=10): - assert img is not None, "Image is none" - - cv2.namedWindow(name) - cv2.setMouseCallback(name, self.mouse_cb) - - self.name = name - self.point = None - self.draw_radius = draw_radius - self.color = color - - self.scale_factor = scale_factor - self.img = cv2.resize(img, None, fx=scale_factor, fy=scale_factor, interpolation=cv2.INTER_CUBIC) - self.draw_img = self.img - - self.mouse_position = (0, 0) - - def segment(self): - while self.point is None: - draw_img = np.copy(self.img) - cv2.circle(draw_img, self.mouse_position, self.draw_radius, self.color, 1) - cv2.imshow(self.name, draw_img) - - k = cv2.waitKey(10) & 0xFF - - if k == 27: # Esc - cv2.destroyAllWindows() - rospy.sleep(.2) - - return None, None - elif k == ord('q'): - self.draw_radius += 2 - elif k == ord('a'): - self.draw_radius -= 2 - - cv2.destroyAllWindows() - rospy.sleep(.2) - pt = self.point / self.scale_factor - self.point = None - return pt, self.draw_radius - - def mouse_cb(self, event, x, y, flags, param): - self.draw_img = np.copy(self.img) - self.mouse_position = (x, y) - - if event == cv2.EVENT_LBUTTONUP: - self.point = np.array([x, y]) - - -class Segmenter(object): - def __init__(self, color_space, ranges, invert): - self.color_space = color_space - self.lower = np.array(ranges['lower']) - self.upper = np.array(ranges['upper']) - self.invert = invert - - def segment(self, orig_image, debug_color=False): - ''' - Make sure input image is BGR - If you want only the debug image returned, pass a color in for `debug_color` (ex. (255, 0, 0) for blue) - ''' - image = orig_image if self.color_space == 'bgr' else cv2.cvtColor(orig_image, cv2.COLOR_BGR2HSV) - filtered_image = self.filter_image(image) - - mask = cv2.inRange(image, self.lower, self.upper) - filtered_mask = self.filter_mask(mask) - - cnts, _ = cv2.findContours(filtered_mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) - cnt = sorted(cnts, key=cv2.contourArea, reverse=True)[0] - M = cv2.moments(cnt) - center = np.array([M['m10'], M['m01']]) / M['m00'] - - if debug_color: - debug_img = np.copy(orig_image) - x, y, w, h = cv2.boundingRect(cnt) - cv2.rectangle(debug_img, (x, y), (x + w, y + h), debug_color, 2) - cv2.circle(debug_img, tuple(center.astype(np.int16)), 2, debug_color, -1) - return debug_img - - return center, cv2.contourArea(cnt) - - def filter_image(self, image): - filtered = cv2.GaussianBlur(image, (5, 5), 0) - return filtered - - def filter_mask(self, mask): - kernel = np.ones((5, 5), np.uint8) - filtered = cv2.erode(mask, kernel, iterations=1) - filtered = cv2.dilate(filtered, kernel, iterations=1) - return filtered - - -def intersect(A, a, B, b): - # Based on http://morroworks.com/Content/Docs/Rays%20closest%20point.pdf - # Finds the intersection of two rays `a` and `b` whose origin are `A` and `B` - return (A + a * (-np.dot(a, b) * np.dot(b, B - A) + np.dot(a, B - A) * np.dot(b, b)) / - (np.dot(a, a) * np.dot(b, b) - np.dot(a, b) ** 2) + - B + b * (np.dot(a, b) * np.dot(a, B - A) - np.dot(b, B - A) * np.dot(a, a)) / - (np.dot(a, a) * np.dot(b, b) - np.dot(a, b) ** 2)) / 2 - - -def minimize_repro_error(left_cam, right_cam, pt_l, pt_r, estimation): - - def f(x): - left_error = np.linalg.norm(left_cam.project3dToPixel((x)) - pt_l) - right_error = np.linalg.norm(right_cam.project3dToPixel((x)) - pt_r) - return left_error ** 2 + right_error ** 2 - - correction = minimize( - fun=f, - x0=estimation, - tol=1e-9, - method="TNC", - ) - return correction.x - -def do_the_magic(pt_l, pt_r, cam_tf): - ''' - pt_l is in the left frame and pt_r is in the right frame - ''' - global left_cam, right_cam - - ray_1 = np.array(left_cam.projectPixelTo3dRay((pt_l))) - ray_2 = np.array(right_cam.projectPixelTo3dRay((pt_r))) - - # I'm doing all the math in the camera frame - origin_1 = np.array([0, 0, 0]) - origin_2 = np.array(cam_tf[0]) # Not accounting for different rotations - inital_estimate = intersect(origin_1, ray_1, origin_2, ray_2) - - # Now re-project points and find the minimum error - estimate = minimize_repro_error(left_cam, right_cam, pt_l, pt_r, inital_estimate) - - return estimate - - -def load_from_parameter(color): - param_name = '/start_gate/{}'.format(color) - if not rospy.has_param(param_name): - rospy.logerr("No parameters have been set!") - rospy.signal_shutdown("Requires param to be set: {}".format(param_name)) - # exit() - - param = rospy.get_param(param_name) - - s = Segmenter(param['color_space'], param['ranges'], param['invert']) - return s - - -def do_buoys(srv, left, right, red_seg, green_seg, tf_listener): - ''' - FYI: - `left`: the left camera ImageGetter object - `right`: the right camera ImageGetter object - ''' - left_point = None - right_point = None - - while not rospy.is_shutdown(): - # Get all the required TF links - try: - # Working copy of the current frame obtained at the same time as the tf link - tf_listener.waitForTransform("enu", "stereo_left_cam", rospy.Time(), rospy.Duration(4.0)) - left_image, right_image = left.frame, right.frame - cam_tf = tf_listener.lookupTransform("stereo_left_cam", "stereo_right_cam", left.sub.last_image_time) - cam_p, cam_q = tf_listener.lookupTransform("enu", "stereo_left_cam", left.sub.last_image_time) - cam_p = np.array([cam_p]) - cam_r = tf.transformations.quaternion_matrix(cam_q)[:3, :3] - break - - except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException, TypeError) as e: - print e - rospy.logwarn("TF link not found.") - time.sleep(.5) - continue - - red_left_pt, rl_area = red_seg.segment(left_image) - red_right_pt, rr_area = red_seg.segment(right_image) - green_left_pt, gl_area = green_seg.segment(left_image) - green_right_pt, gr_area = green_seg.segment(right_image) - - area_tol = 50 - if np.linalg.norm(rl_area - rr_area) > area_tol or np.linalg.norm(gl_area - gr_area) > area_tol: - rospy.logwarn("Unsafe segmentation") - StartGateResponse(success=False) - - red_point_np = do_the_magic(red_left_pt, red_right_pt, cam_tf) - red_point_np = np.array(cam_r.dot(red_point_np) + cam_p)[0] - green_point_np = do_the_magic(green_left_pt, green_right_pt, cam_tf) - green_point_np = np.array(cam_r.dot(green_point_np) + cam_p)[0] - - # Just for visualization - for i in range(5): - # Publish it 5 times so we can see it in rviz - navigator_tools.draw_ray_3d(red_left_pt, left_cam, [1, 0, 0, 1], m_id=0, frame="stereo_left_cam") - navigator_tools.draw_ray_3d(red_right_pt, right_cam, [1, 0, 0, 1], m_id=1, frame="stereo_right_cam") - navigator_tools.draw_ray_3d(green_left_pt, left_cam, [0, 1, 0, 1], m_id=2, frame="stereo_left_cam") - navigator_tools.draw_ray_3d(green_right_pt, right_cam, [0, 1, 0, 1], m_id=3, frame="stereo_right_cam") - - red_point = PointStamped() - red_point.header = navigator_tools.make_header(frame="enu") - red_point.point = navigator_tools.numpy_to_point(red_point_np) - red_pub.publish(red_point) - - green_point = PointStamped() - green_point.header = navigator_tools.make_header(frame="enu") - green_point.point = navigator_tools.numpy_to_point(green_point_np) - green_pub.publish(green_point) - - time.sleep(1) - - # Now we have two points, find their midpoint and calculate a target angle - midpoint = (red_point_np + green_point_np) / 2 - between_vector = green_point_np - red_point_np # Red is on the left when we go out. - yaw_theta = np.arctan2(between_vector[1], between_vector[0]) - # Rotate that theta by 90 deg to get the angle through the buoys - yaw_theta += np.pi / 2.0 - - p = midpoint - q = tf.transformations.quaternion_from_euler(0, 0, yaw_theta) - - target_pose = PoseStamped() - target_pose.header = navigator_tools.make_header(frame="enu") - target_pose.pose = navigator_tools.numpy_quat_pair_to_pose(p, q) - - return StartGateResponse(target=target_pose, success=True) - - -def publish_debug(red_pub, green_pub, red_seg, green_seg, camera, *args): - r_debug = red_seg.segment(camera.frame, (0, 10, 250)) - g_debug = green_seg.segment(camera.frame, (0, 250, 10)) - - red_pub.publish(navigator_tools.make_image_msg(r_debug)) - green_pub.publish(navigator_tools.make_image_msg(g_debug)) - - -if __name__ == "__main__": - rospy.init_node("start_gate_perception") - tf_listener = tf.TransformListener() - - # For visualization - red_pub = rospy.Publisher("red_buoy", PointStamped, queue_size=5) - green_pub = rospy.Publisher("green_buoy", PointStamped, queue_size=5) - red_debug = rospy.Publisher("vision/start_gate/red", Image, queue_size=5) - green_debug = rospy.Publisher("vision/start_gate/green", Image, queue_size=5) - - left = ImageGetter('/stereo/left/image_rect_color') - left_cam = image_geometry.PinholeCameraModel() - left_cam.fromCameraInfo(left.camera_info) - - right = ImageGetter('/stereo/right/image_rect_color') - right_cam = image_geometry.PinholeCameraModel() - right_cam.fromCameraInfo(right.camera_info) - - while left.frame is None and right.frame is None: - print "Waiting for frame..." - rospy.sleep(.5) - - red_seg = load_from_parameter("red") - green_seg = load_from_parameter("green") - - # while not rospy.is_shutdown(): - # l_center, debug_img = red.segment(left.frame, (0, 70, 255)) - # red_debug.publish(navigator_tools.make_image_msg(debug_img)) - # rospy.sleep(1) - - s = rospy.Service("/vision/start_gate_buoys", StartGate, - lambda srv: do_buoys(srv, left, right, red_seg, green_seg, tf_listener)) - - # rospy.Timer(rospy.Duration(.5), lambda republish: publish_debug(red_debug, green_debug, - # red_seg, green_seg, left)) - - rospy.spin() diff --git a/perception/mil_vision/nodes/stereo_point_cloud_driver.cpp b/perception/mil_vision/nodes/stereo_point_cloud_driver.cpp deleted file mode 100644 index 3b06a12c..00000000 --- a/perception/mil_vision/nodes/stereo_point_cloud_driver.cpp +++ /dev/null @@ -1,500 +0,0 @@ -/* - * Author: David Soto - * Year: 2016 - */ - -// this node uses a stereo camera rig to generate a point cloud of the environment. -// It uses the exFAST detector to generate a sparse point cloud. -// See this for more info: http://www.ra.cs.uni-tuebingen.de/software/sparsestereo/welcome_e.html - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -void left_image_callback( - const sensor_msgs::ImageConstPtr &image_msg_ptr, - const sensor_msgs::CameraInfoConstPtr &info_msg_ptr); -void right_image_callback( - const sensor_msgs::ImageConstPtr &image_msg_ptr, - const sensor_msgs::CameraInfoConstPtr &info_msg_ptr); -void cloud_callback (const sensor_msgs::PointCloud2ConstPtr& cloud_msg); -bool detection_activation_switch( - std_srvs::SetBool::Request &req, - std_srvs::SetBool::Response &resp); - -using namespace std; -using namespace cv; -using namespace sparsestereo; -using namespace boost; -using namespace boost::posix_time; -using ros::param::param; - -struct XYZRGB{ - - float x, y, z; - uint8_t r, g, b; - - bool operator==(const XYZRGB &other) const{ - if(this->x == other.x && this->y == other.y && this->z == other.z){ - return true; - } - else return false; - } - - bool operator<(const XYZRGB &other) const{ - if(this->x > other.x) return false; - else if(this->y > other.y) return false; - else if(this->z >= other.z) return false; - else return true; - } -}; - -// Image storage -sensor_msgs::ImageConstPtr left_most_recent, right_most_recent; -sensor_msgs::CameraInfoConstPtr left_most_recent_info, right_most_recent_info; - -// Stereo subscribing -image_transport::CameraSubscriber left_image_sub, right_image_sub; -image_geometry::PinholeCameraModel left_cam_model, right_cam_model; - -// Velodyne subscribing -ros::Subscriber pcd_sub; - -// Camera projection matrices -Matx34d left_P, right_P; - -// TF -// tf::TransformListener tf_listener; -string tf_frame_stereo_l {"/stereo_left_cam"}; -string tf_frame_stereo_r {"/stereo_right_cam"}; -string tf_frame_velodyne {"/velodyne"}; -tf::StampedTransform stereoL_tf_velodyne; - -// Point clouds -vector stereo_point_cloud; -// sensor_msgs::PointCloud2 pt_cloud_msg; -std_msgs::Header header; -int pt_cloud_seq {0}; - -// Point cloud publisher -ros::Publisher pcl_pub; - -// Thread safety -boost::mutex left_mtx, right_mtx; - -// Control -bool active = false; -ros::ServiceServer detection_switch; - - -int main(int argc, char** argv) { - namespace fs = boost::filesystem; - - try { - - stringstream log_msg; - log_msg << "\nInitializing Sparse Stereo Point Cloud Driver:\n"; - int tab_sz = 4; - - // globals - ros::init(argc, argv, "stereo_point_cloud_driver"); - ros::NodeHandle nh; - image_transport::ImageTransport img_trans {nh}; - image_geometry::PinholeCameraModel left_cam_model, right_cam_model; - cv_bridge::CvImagePtr input_bridge; - string activation_srv_name {"stereo/activation_srv"}; - - // Default parameters - string img_topic_left_default = "/stereo/left/image_raw/"; - string img_topic_right_default = "/stereo/right/image_raw/"; - string activation_default = "/stereo/activation_switch"; - string dbg_topic_default = "/stereo/dbg_imgs"; - - // Advertise activation switch - detection_switch = nh.advertiseService( - activation_srv_name, detection_activation_switch); - - // Advertise point cloud topic - pcl_pub = nh.advertise ("stereo_front_point_cloud", 1); - - // Subscribe to Cameras (image + camera_info) - cout << "subscribing to cameras" << endl; - string left = param("/stereo/input_left", img_topic_left_default); - string right = param("/stereo/input_right", img_topic_right_default); - cout << "Got topic name params!" << endl; - left_image_sub = img_trans.subscribeCamera(left, 10, left_image_callback); - right_image_sub = img_trans.subscribeCamera(right, 10, right_image_callback); - log_msg << setw(1 * tab_sz) << "" << "Camera Subscriptions:\x1b[37m\n" - << setw(2 * tab_sz) << "" << "left = " << left << endl - << setw(2 * tab_sz) << "" << "right = " << right << "\x1b[0m\n"; - cout << left << ' ' << right << endl; - ROS_INFO(log_msg.str().c_str()); - - // Subscribe to Velodyne scan - pcd_sub = nh.subscribe ("/velodyne_points", 1, cloud_callback); - - // Wait for transform between velodyne and stereo frames - tf::TransformListener tf_listener; - ROS_INFO("Waiting 10 seconds for tf between velodyne and stereo frames to become available..."); - // try{ - // ros::Time now = ros::Time(0); - // if (tf_listener.waitForTransform(tf_frame_velodyne, tf_frame_stereo_l, - // now, ros::Duration(10.0))){ - // cout << "TF is available." << endl; - // } - // else cout << "Timed out waiting for TF." << endl; - - // tf_listener.lookupTransform(tf_frame_velodyne, tf_frame_stereo_l, now, stereoL_tf_velodyne); - // // cout << "TF:" << stereoL_tf_velodyne << endl; - // } - // catch (const std::exception& e) { - // cerr << "Fatal exception while acquiring TF's: " << e.what(); - // return -1; - // } - - - - // Size adjustment ROI - /* - exFAST is currently heavily optimized for VGA size images, extracting a - properly sized region of interest is much more efficient than resizing the image - */ - - // Stereo matching parameters - double uniqueness = 0.8; - int maxDisp = 70; - int leftRightStep = 2; - - // Feature detection parameters - double adaptivity = 1.0; - int minThreshold = 10; - - // Load rectification data - fs::path calibration_file_path {ros::package::getPath("navigator_perception") + "/calibration.xml"}; - cout << calibration_file_path << endl; - if ( !boost::filesystem::exists( calibration_file_path ) ) // Check file exists - { - cout << "Can't find calibration file!" << std::endl; - throw "Can't find calibration file!"; - } - StereoRectification* rectification = NULL; - rectification = new StereoRectification(CalibrationResult(calibration_file_path.c_str())); - - // The stereo matcher. SSE Optimized implementation is only available for a 5x5 window - SparseStereo, short> stereo(maxDisp, 1, uniqueness, rectification, - false, false, leftRightStep); - - // Feature detectors for left and right image - FeatureDetector* leftFeatureDetector = new ExtendedFAST(true, minThreshold, adaptivity, true, 10); - FeatureDetector* rightFeatureDetector = new ExtendedFAST(false, minThreshold, adaptivity, true, 10); - - // Sparse Correspondences - vector correspondences; - - // Main stereo processing loop - ros::Rate loop_rate(10); // process images 10 times per second - cout << "\x1b[1;31mMain Loop!\x1b[0m" << endl; - while (ros::ok()) { - cout << "clock: " << microsec_clock::local_time() << endl; - cout << " The left Camera Subscriber is connected to " << left_image_sub.getNumPublishers() << " publishers." << endl; - cout << " The right Camera Subscriber is connected to " << right_image_sub.getNumPublishers() << " publishers." << endl; - cout << "active=" << (active? "true" : "false") << endl; - cout << "left_most_recent=" << left_most_recent << " right_most_recent=" << right_most_recent << endl; - // Idle if active is not set or valid image pointer pairs have not been retrieved - if (!active || (left_most_recent == nullptr) || (right_most_recent == nullptr)){ - ros::spinOnce(); // still handle ROS callbacks if skipping an iteration - loop_rate.sleep(); - cout << "Skipping this stereo pair for processing." << endl; - continue; - } - try { - // Containers - Mat raw_left, raw_right, draw_left, draw_right; - Mat_ leftImg, rightImg; - - // Thread Safety - cout << "Getting frame data" << endl; - left_mtx.lock(); - right_mtx.lock(); - { - // Get Frame Message data - namespace cvb = cv_bridge; - cout << "left_most_recent: " << left_most_recent << endl; - cout << "right_most_recent: " << right_most_recent << endl; - input_bridge = cvb::toCvCopy(left_most_recent, sensor_msgs::image_encodings::BGR8); - raw_left = input_bridge->image; - draw_left = raw_left.clone(); - // imshow("raw_left", raw_left); waitKey(0); - cout << "raw_left_size:" << raw_left.size() << endl; - input_bridge = cvb::toCvCopy(right_most_recent, sensor_msgs::image_encodings::BGR8); - raw_right = input_bridge->image; - draw_right = raw_right.clone(); - } - cout << "Got frame data" << endl; - left_mtx.unlock(); - right_mtx.unlock(); - - // Check images for data and adjust size - if(raw_left.data == NULL || raw_right.data == NULL) - throw sparsestereo::Exception("Unable to open input images!"); - cvtColor(raw_left, leftImg, CV_BGR2GRAY); - cvtColor(raw_right, rightImg, CV_BGR2GRAY); - - // Enforce approximate image synchronization - double left_stamp = left_most_recent_info->header.stamp.toSec(); - double right_stamp = right_most_recent_info->header.stamp.toSec(); - double sync_error = fabs(left_stamp - right_stamp); - stringstream sync_msg; - sync_msg << "Left and right images were not sufficiently synchronized" - << "\nsync error: " << sync_error << "s"; - if (sync_error > 0.3) { - ROS_WARN(sync_msg.str().c_str()); - cout << "skipping" << endl; - continue; - } - - // Objects for storing final and intermediate results - int rows = leftImg.rows; - int cols = leftImg.cols; - Mat_ charLeft(rows, cols); - Mat_ charRight(rows, cols); - Mat_ censusLeft(rows, cols); - Mat_ censusRight(rows, cols); - vector keypointsLeft, keypointsRight; - - ptime lastTime = microsec_clock::local_time(); // Time algorithm start time - - // Calculate census transforms for both images and detet features - // for both images in parallel - #pragma omp parallel sections default(shared) num_threads(2) - { - #pragma omp section - { - ImageConversion::unsignedToSigned(leftImg, &charLeft); - Census::transform5x5(charLeft, &censusLeft); - keypointsLeft.clear(); - leftFeatureDetector->detect(leftImg, keypointsLeft); - } - #pragma omp section - { - ImageConversion::unsignedToSigned(rightImg, &charRight); - Census::transform5x5(charRight, &censusRight); - keypointsRight.clear(); - rightFeatureDetector->detect(rightImg, keypointsRight); - } - } - - // Stereo matching and reconstruction - Point2f pt_L; - Point2f pt_R; - Matx31d pt_L_hom; - Matx31d pt_R_hom; - Mat X, X_hom; - Vec3b color; - - // Calculate correspondences in Blue channel - correspondences.clear(); - stereo.match(censusLeft, censusRight, keypointsLeft, keypointsRight, &correspondences); - - for(int i=0; i<(int)correspondences.size(); i++) { - - // Visualize feature points - Scalar rand_color = Scalar(rand() % 256, rand() % 256, rand() % 256); - rectangle(raw_left, pt_L - Point2f(2,2), pt_L + Point2f(2, 2), rand_color, CV_FILLED); - rectangle(raw_right, pt_R - Point2f(2,2), pt_R + Point2f(2, 2), rand_color, CV_FILLED); - - // Triangulate matches to reconstruct 3D point - pt_L = correspondences[i].imgLeft->pt; - pt_R = correspondences[i].imgRight->pt; - pt_L_hom = Matx31d(pt_L.x, pt_L.y, 1); - pt_R_hom = Matx31d(pt_R.x, pt_R.y, 1); - X_hom = nav::triangulate_Linear_LS(Mat(left_P), Mat(right_P), Mat(pt_L_hom), Mat(pt_R_hom)); - X_hom = X_hom / X_hom.at(3, 0); - - // Get Color and fill point_cloud element precursor - XYZRGB point; - point.x = X_hom.at(0, 0); - point.y = X_hom.at(1, 0); - point.z = X_hom.at(2, 0); - Vec3b pix_color = raw_left.at(pt_L); - cout << i << "ptL: " << pt_L << " ptR: " << pt_R << " Color: " << pix_color - << " " << X_hom.t() << endl; - point.b = pix_color[0]; - point.g = pix_color[1]; - point.r = pix_color[2]; - stereo_point_cloud.push_back(point); - } - - // Display image - Mat screen = Mat::zeros(Size(leftImg.cols*2, leftImg.rows), CV_8UC3); - raw_left.copyTo(screen(Rect(Point(0,0), leftImg.size()))); - raw_right.copyTo(screen(Rect(Point(leftImg.cols,0), leftImg.size()))); - namedWindow("Stereo"); - imshow("Stereo", screen); - waitKey(0); - - // Print statistics - time_duration elapsed = (microsec_clock::local_time() - lastTime); - cout << "Time for stereo matching: " << elapsed.total_microseconds()/1.0e6 << "s" << endl - << "Features detected in left image: " << keypointsLeft.size() << endl - << "Features detected in right image: " << keypointsRight.size() << endl - << "Percentage of matched features: " << (100.0 * correspondences.size() / keypointsLeft.size()) << "%" << endl; - - // Populate point cloud message header - sensor_msgs::PointCloud2 pt_cloud_msg; - header.seq = pt_cloud_seq++; - ros::Time frame_time_l = left_most_recent_info->header.stamp; - ros::Time frame_time_r = right_most_recent_info->header.stamp; - ros::Duration offset((frame_time_r - frame_time_l).toSec() / 2); - header.stamp = left_most_recent_info->header.stamp + offset; - header.frame_id = tf_frame_stereo_l; - pt_cloud_msg.header = header; - cout << "pcd header: " < iter_x(pt_cloud_msg, "x"); - sensor_msgs::PointCloud2Iterator iter_y(pt_cloud_msg, "y"); - sensor_msgs::PointCloud2Iterator iter_z(pt_cloud_msg, "z"); - sensor_msgs::PointCloud2Iterator iter_b(pt_cloud_msg, "b"); - sensor_msgs::PointCloud2Iterator iter_g(pt_cloud_msg, "g"); - sensor_msgs::PointCloud2Iterator iter_r(pt_cloud_msg, "r"); - - cout << "filling pcd2 msg " << endl; - for(size_t i = 0; i < stereo_point_cloud.size(); ++i, ++iter_x, ++iter_y, ++iter_z, ++iter_b, ++iter_g, ++iter_r) - { - *iter_x = stereo_point_cloud[i].x; - *iter_y = stereo_point_cloud[i].y; - *iter_z = stereo_point_cloud[i].z; - *iter_b = stereo_point_cloud[i].b; - *iter_g = stereo_point_cloud[i].g; - *iter_r = stereo_point_cloud[i].r; - cout << i << "XYZBGR: " << *iter_x << " " << *iter_y << " " << *iter_z << " " << int(*iter_b) << " " << int(*iter_g) << " " << - int(*iter_r) << endl; - } - - // Publish point cloud - pcl_pub.publish(pt_cloud_msg); - - // Empty point cloud container for upcoming iteration - cout << "PCD size: " << pt_cloud_msg.height * pt_cloud_msg.row_step - << ", " << stereo_point_cloud.size() << endl; - stereo_point_cloud.clear(); - - // Handle ROS callbacks - ros::spinOnce(); - } - catch (const std::exception& e) { - cerr << "Fatal exception during main loop: " << e.what(); - return -1; - } - loop_rate.sleep(); - } // end main while loop - return 0; - } // end large try block - catch (const std::exception& e) { - cerr << "Fatal exception during initialization: " << e.what(); - return -1; - } -} - -bool detection_activation_switch( - std_srvs::SetBool::Request &req, - std_srvs::SetBool::Response &resp) { - stringstream ros_log; - ros_log << "\x1b[1;31mSetting sereo point cloud generation to: \x1b[1;37m" - << (req.data ? "on" : "off") << "\x1b[0m"; - ROS_INFO(ros_log.str().c_str()); - active = req.data; - resp.success = true; - return true; -} - -void left_image_callback( - const sensor_msgs::ImageConstPtr &image_msg_ptr, - const sensor_msgs::CameraInfoConstPtr &info_msg_ptr) { - - // Get ptrs to most recent frame and info (thread safe) - left_mtx.lock(); - cout << "\x1b[1;31mleft callback!\x1b[0m" << endl; - left_most_recent = image_msg_ptr; - left_most_recent_info = info_msg_ptr; - left_mtx.unlock(); - - // Initialize camera parameters - static bool first_call = true; - if(first_call){ - left_cam_model.fromCameraInfo(left_most_recent_info); - left_P = left_cam_model.fullProjectionMatrix(); - tf_frame_stereo_l = left_cam_model.tfFrame(); - first_call = false; - } -} - -void right_image_callback( - const sensor_msgs::ImageConstPtr &image_msg_ptr, - const sensor_msgs::CameraInfoConstPtr &info_msg_ptr) { - - // Get ptrs to most recent frame and info (thread safe) - right_mtx.lock(); - cout << "\x1b[1;31mright callback!\x1b[0m" << endl; - right_most_recent = image_msg_ptr; - right_most_recent_info = info_msg_ptr; - right_mtx.unlock(); - - // Initialize camera parameters - static bool first_call = true; - if(first_call){ - right_cam_model.fromCameraInfo(right_most_recent_info); - right_P = right_cam_model.fullProjectionMatrix(); - right_P(0, 3) = -140; - cout << "right projection_matrix: " << right_P << endl; - tf_frame_stereo_r = right_cam_model.tfFrame(); - first_call = false; - } -} - -void cloud_callback (const sensor_msgs::PointCloud2ConstPtr& cloud_msg){ - ROS_INFO("velodyne_points callback"); - sensor_msgs::PointCloud2 colored_velodyne_pcd; - -} - diff --git a/perception/mil_vision/nodes/underwater_shape_identification_vision.cpp b/perception/mil_vision/nodes/underwater_shape_identification_vision.cpp deleted file mode 100644 index d64054fc..00000000 --- a/perception/mil_vision/nodes/underwater_shape_identification_vision.cpp +++ /dev/null @@ -1,28 +0,0 @@ -#include -#include - -#include - -#include -#include - -#include - -using namespace std; - -int main(int argc, char** argv) -{ - cout << "\033[1;31mUnderwater Shape Identification Vision\033[0m" << endl; - - ros::NodeHandle nh; - string challenge_name = "underwater_shape_identification"; - ros::init(argc, argv, challenge_name + "_vision"); - string name_space{challenge_name + "/"}; - - int img_buffer_size = 0; - nh.param(name_space + "buffer_size", img_buffer_size, 5); - nav::UnderwaterShapeDetector underwater_shape_detector(nh, img_buffer_size, name_space); - - return 0; -} - diff --git a/perception/mil_vision/nodes/velodyne_pcd_colorizer.cpp b/perception/mil_vision/nodes/velodyne_pcd_colorizer.cpp deleted file mode 100644 index 4d35e2f1..00000000 --- a/perception/mil_vision/nodes/velodyne_pcd_colorizer.cpp +++ /dev/null @@ -1,26 +0,0 @@ -#include -#include -#include -#include -#include -#include - - -using namespace std; - -int main(int argc, char** argv) -{ - // Init ROS - ros::init(argc, argv, "velodyne_pcd_colorizer"); - ros::NodeHandle nh; - - // Create PcdColorizer active object - nav::PcdColorizer colorizer{nh, "/velodyne_points"}; - - while(colorizer.ok()) - { - ROS_INFO("velodyne_pcd_colorizer: Spinning ROS callbacks"); - ros::spin(); - } - -} \ No newline at end of file diff --git a/perception/mil_vision/object_classification/stc_train.py b/perception/mil_vision/object_classification/stc_train.py deleted file mode 100644 index 375c4da1..00000000 --- a/perception/mil_vision/object_classification/stc_train.py +++ /dev/null @@ -1,208 +0,0 @@ -from __future__ import division -from roi_generator_slow import ROI_Collection_Slow -import pickle -import numpy as np -import cv2 -import numpy.linalg as npl -import numpy.ma as ma -from navigator_tools import BagCrawler -from cv_bridge import CvBridge -import scipy.ndimage.measurements as mes -from SVM_classifier import SVMClassifier - - -def _get_lw(box): - p0 = box[0] - p1 = box[1] - vec1 = np.array(box[2] - p0) - vec1 = vec1 / np.linalg.norm(vec1) - vec2 = np.array(p1 - p0) - vec2 = vec2 / np.linalg.norm(vec2) - vec3 = np.array(box[3] - p0) - vec3 = vec3 / np.linalg.norm(vec3) - ang1 = np.arccos((vec1).dot(vec2)) - ang2 = np.arccos((vec3).dot(vec2)) - dif1 = 1.5708 - ang1 - dif2 = 1.5708 - ang2 - if dif1 < dif2: - p2 = box[2] - else: - p2 = box[3] - l, lp = np.linalg.norm(abs(p1 - p0)), p1 - w, wp = np.linalg.norm(abs(p2 - p0)), p2 - if l < w: - temp = w - templ = wp - w = l - wp = lp - l = temp - lp = templ - direc = (wp - p0) / np.linalg.norm(wp - p0) - dot = direc.dot(np.array([0, 1])) - vcost = abs(dot) - return l, w, vcost - - -def get_rectangle(roi): - """ - Get the rectangle that has changing colors in the roi. - - Returns boolean success value and the four rectangle points in the image - """ - gaussian = cv2.GaussianBlur(roi, (9, 9), 10.0) - roi = cv2.addWeighted(roi, 1.5, gaussian, -0.5, 0, roi) - - nh, nw, r = roi.shape - - # cluster - Z = roi.reshape((-1, 3)) - Z = np.float32(Z) - criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0) - K = 7 - ret, label, centers = cv2.kmeans(Z, K, criteria, 10, 0) - centers = np.uint8(centers) - image_as_centers = centers[label.flatten()] - image_as_centers = image_as_centers.reshape((roi.shape)) - labels = label.reshape((roi.shape[:2])) - - possible_clusters = list(np.arange(K)) - whiteness = map(lambda x: npl.norm(x - np.array([255, 255, 255])), centers) - whitest = np.argmin(whiteness) - possible_clusters.remove(whitest) - energys = [] - correct_masks = [] - for num, p in enumerate(possible_clusters): - mask_clusters = ma.masked_equal(labels, p) - - draw_mask = mask_clusters.mask.astype(np.uint8) - draw_mask *= 255 - - labeled_array, num_features = mes.label(draw_mask) - count = np.bincount(labeled_array.flatten()) - count = count[1:] - val = np.argmax(count) - mask_obj = ma.masked_equal(labeled_array, val + 1) - - draw_mask = mask_obj.mask.astype(np.uint8) - draw_mask *= 255 - - # cv2.imshow(str(num), draw_mask) - # cv2.waitKey(0) - - top = np.count_nonzero(draw_mask) - valz = np.fliplr(np.transpose(draw_mask.nonzero())) - rect = cv2.minAreaRect(valz) - box = cv2.cv.BoxPoints(rect) - box = np.int0(box) - rect_mask = np.zeros((nh, nw)) - cv2.drawContours(rect_mask, [box], 0, 255, -1) - bottom = np.count_nonzero(rect_mask) - - l, w, vcost = _get_lw(box) - if w < .001: - print 'WIDTH TOO SMALL' - continue - - valz = np.fliplr(np.transpose(draw_mask.nonzero())) - area = cv2.contourArea(box) - area /= (nh * nw) - - if vcost > .5: - print "VCOST TOO HIGH" - continue - if area < .03: - print area - print "TOOOO SMALL" - continue - if top / bottom < .7: - print "TOO SPARSE", top / bottom - continue - - energy = area + 1.5 * top / bottom - abs(2.5 - l / w) - .2 * vcost - if energy < 0: - "LOW ENERGY!" - continue - print num, "area: ", area, "filled:", top, "total:", bottom, 'rat', top / bottom, "l/w", abs(2.5 - l / w), "vcost", - vcost, "energy", energy - energys.append(energy) - correct_masks.append(mask_obj) - - if len(energys) == 0: - print "EVERY ENERGY WRONG" - return False, None - - correct_masks = [x for y, x in sorted(zip(energys, correct_masks), reverse=True)] - energys = sorted(energys, reverse=True) - - if len(energys) > 1 and abs(energys[0] - energys[1]) < .2: - print "TOO CLOSE TO CALLS" - return False, None - - correct_mask = correct_masks[0] - colors = roi[correct_mask.mask] - draw_mask = correct_mask.mask.astype(np.uint8) - draw_mask *= 255 - - return True, colors - - -class Config(): - - def __init__(self): - self.mymap = {'r': 1, 'b': 2, 'y': 3, 'k': 4} - self.inv_map = {v: k for k, v in self.mymap.iteritems()} - - def get_class(self, val): - return self.inv_map[val] - - def get_val(self, clss): - return self.mymap[clss] - - -class Training(object): - - def __init__(self): - self.config = Config() - self.svm = SVMClassifier() - self.data = [] - self.colors = [] - - def train(self, img, colors, color): - mean = np.mean(np.mean(img, axis=0), axis=0) - m = np.repeat([mean], len(colors), axis=0) - t = np.hstack([colors, m]) - color = self.config.get_val(color) - c = np.repeat(color, len(colors)) - - self.data.extend(t) - self.colors.extend(c) - - def pickle(self, file): - self.svm.train(self.data, self.colors) - self.svm.pickle(file) - - -if __name__ == "__main__": - t = Training() - bridge = CvBridge() - pick = pickle.load(open("stc_train1.p", 'rb')) - for bag in pick.bag_to_rois: - colors = pick.bag_to_rois[bag] - b = BagCrawler(bag) - topic = b.image_topics[0] - crawl = b.crawl(topic=topic) - for color in colors: - if len(color) is 0: - continue - color, roi = color.iteritems().next() - img = crawl.next() - img = bridge.imgmsg_to_cv2(img, 'bgr8') - image_clone = img.copy() - print roi, color - xmin, ymin, w, h = roi[0], roi[1], roi[2], roi[3] - cv2.rectangle(image_clone, (xmin, ymin), (xmin + w, ymin + h), (0, 255, 0), 2) - roi = img[ymin:ymin + h, xmin:xmin + w] - succ, color_vec = get_rectangle(roi) - if succ: - t.train(img, color_vec, color) - t.pickle("/home/tess/mil_ws/src/NaviGator/mission_systems/navigator_scan_the_code/navigator_scan_the_code/scan_the_code_lib/svm_train.p") diff --git a/perception/mil_vision/setup.py b/perception/mil_vision/setup.py index c311a2fd..29a9ec75 100644 --- a/perception/mil_vision/setup.py +++ b/perception/mil_vision/setup.py @@ -5,7 +5,7 @@ # fetch values from package.xml setup_args = generate_distutils_setup( - packages=['object_classification'], + packages=[''], ) setup(**setup_args) diff --git a/perception/mil_vision/src/missions/underwater_shape_identification.cpp b/perception/mil_vision/src/missions/underwater_shape_identification.cpp deleted file mode 100644 index f7a1a0b9..00000000 --- a/perception/mil_vision/src/missions/underwater_shape_identification.cpp +++ /dev/null @@ -1,108 +0,0 @@ -#include - -namespace nav { - -using namespace std; -using namespace cv; - -namespace fs = boost::filesystem; - -vector Shape::loadShapes(string directory, float shape_area) -{ - fs::directory_iterator dir_end; // default ctor --> past-the-end - fs::path dir{directory}; - vector img_filenames; - if(fs::is_directory(dir)){ - - // iterate through all files in directory - for(fs:: directory_iterator dir_itr(dir); dir_itr != dir_end; ++dir_itr){ - // collect all files that match img extension - string path_str {dir_itr->path().filename().string()}; - if(path_str.compare(path_str.size() - 3, 3, "png") == 0){ - img_filenames.push_back(dir_itr->path().string()); - } - } - } - else { - cerr << "Error: " << dir << " is not a valid directory." << endl; - } - - vector shapes; - for(auto path : img_filenames) - { - shapes.push_back(Shape()); - shapes.back().load(path, shape_area); - if(!shapes.back().ok()) - shapes.pop_back(); - } - - return shapes; -} - -void Shape::load(string path, float shape_area) -{ - _ok = true; - fs::path template_path{path}; - if(!fs::exists(template_path)) - { - cout << __PRETTY_FUNCTION__ << ": the file " << path << "doesn't exist." << endl; - _ok = false; - return; - } - - _name = template_path.stem().string(); - _template_path = path; - _template = cv::imread(path, CV_LOAD_IMAGE_GRAYSCALE); - if(!_template.data) - { - cout << __PRETTY_FUNCTION__ << ": the image at " << path << " could not be loaded." << endl; - _ok = false; - return; - } - - float template_area = _template.rows * _template.cols; - if(shape_area == 0.0f) - { - cout << __PRETTY_FUNCTION__ << ": shape_area can't be zero." << endl; - _ok = false; - return; - } - _pixels_per_meter = sqrt(template_area / shape_area); - - // Find angle of radial symmetry - _radial_symmetry_angle = getRadialSymmetryAngle(_template, 0.01); -} - -UnderwaterShapeDetector::UnderwaterShapeDetector(ros::NodeHandle &nh, int img_buf_size, string name_space) -: _nh(nh), _camera(_nh, img_buf_size), _ns(name_space) -{ - string camera_topic; - _nh.param(_ns + "template_directory", _template_dir, "templates/underwater_shape_identification/"); - _nh.param(_ns + "shape_area", _shape_area, 1.0f); - _nh.param(_ns + "camera_topic", camera_topic, "down/image_rect"); - _nh.param(_ns + "search_depth", _search_depth, 5.0f); - _nh.param(_ns + "depth_uncertainty", _depth_uncertainty, 1.0f); - _nh.param(_ns + "min_boat_displacement", _min_boat_displacement, 0.1); - _nh.param(_ns + "min_detections", _min_detections, 10); - _nh.param>(_ns + "is_target", _is_target, - {{"circle", true}, {"cross", true}, {"triangle", true}}); - _nh.param>(_ns + "detection_thresholds", _detection_thresholds, - {{"circle", 0.5}, {"cross", 0.5}, {"triangle", 0.5}}); - - string pkg_path = ros::package::getPath("navigator_vision"); - auto shapes = Shape::loadShapes(pkg_path + "/" + _template_dir, _shape_area); - - // Eliminate shapes without thresholds or not targets - for(auto it = shapes.end() - 1; it >= shapes.begin(); it--) - { - if(_detection_thresholds.count(it->name()) < 1 || !_is_target.at(it->name())) - shapes.erase(it); - } - - // Add remaining shapes to map - for(auto& sh : shapes) - _shapes[sh.name()] = sh; -} - -} // namespace nav - diff --git a/perception/mil_vision/templates/underwater_shape_identification/circle.png b/perception/mil_vision/templates/underwater_shape_identification/circle.png deleted file mode 100644 index fa5bd300932a2b2db3c70a04f84e8711480c5a6c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2590 zcmaJ@c{r478=oSLeFii3WsE^~LYB!g#+sdUI3Y_VM~O0)hRDpA88N6y_B|wPl;y}U zldTR0Q6h$9G}(?*eNp<}uIsz5@B8a}uIGK9Kknzf?)(0|_x=0b6i;_YQ6V`Y5C|md z?1c3O&hTFkSO9omU8eg22mdvUvo9D}cyKrw1QLAZjJ5SmDBdX9KP9!GJhZrSyNNf~ zExk2+I{)>g@l+cZLzJa^JMh~}^D?~e3VqZE?+M>!XiD%2s%Gj*UDBktcuo%Dj!(VX zrZzNoOUK7uELB!AHRA1apN_rsFPf#L7mpVtZs5lXuHAw1+op<`5S#)J6?6?_H0|IR z*?F3@g4j_~Mh0j1TXo%da5|>K27@^~9r?MKw@Ya6=-@0r&*XmmDA5>u6RCbjFkdea z6BD!Z%lL9^Y+yCEfBibuud5IG^sQ*$qV-nyJjs)!Lofrg|Hn}qWHg8P zKc?G>>6yk%0}&aLEf&jfbo_Yky?Z1smur6WxTU3V&q6j$**Y{L0z5S}1%BWk`H|S7 zliAs6CX?mx{{8#%@^X+s`XMhbFEW{&imAB2`xQOTT-yzajg@e(m4s&=QB+iH325Z< zcuW?HfQ@f6sDll}D2NE~J&9h-&XPYd5N?D(AP!lSh(V#yb21;SUo0gmHJoX7aCR2l z+uJ+W5x}0AA$|FRyPcIqxhkERVF6r67!gf%>>%Ea%S}IoJezeA0@ZNc8wdMV` z_5fXDIZM)goPm;933e`2fbf*qljyK_=u2w`FZ zTU}kPd-xFJ;zCu6lL!Pe4Tgt@Z2+HCDz)6N!QR8C@78&LH57`o)Ss@Yt)1@I0P7X2 zv=;0QqA@eX!)BWTe|r*D@IHEwMj#MW3=N?g?!}C3MN0@Au6_k?m>#2$p=gO)Vx;j+ ztY4oV0W;ckT-7DBm953@jHpFMc@ZC-?UFcs`m}*u*7PVC_=@CKWLnYI<~==)Dugup zW!};7s4OpEqT!P>ZNtOWMn^|mE{-d)=girJs5fm#_P=aMp49j$Cc;!PHHAB7E2Q*nMeMQ1!`()c1e5AHfO;@F|YqjJT4?G+~20Neib_r z*`ayt7<5$^p{HhHA!l!IPo_{1id31jpxK$m-}__2kDee4kBp5Sn5Ifct!sgp1{1u` zub(AbN=QhM2UWEQ=l!MNa8(qFSlGPtQ<{iLgK7JzSXvfw#WT4+)a+~r9PZ1X$=CY< zh@D(ru|Yvf>{3-PghLfnAU!SN9gL3@0sx*%r`!KymwP{lk%PnI@e{QTAmBgg>WYqx zjIcr>H@sbHBnk=&d_@|69s^n!k4H~VPLiLisAFrqe-*J7o$3pA_H1|s_)we4hDb`l zV8qVOP82%@PXjpS>{$g>RaNp`4_C5#ZF*31u+h4fO+;)Z`}rw> zp_qvXV^1wQc2q-M4UI1Tt)>RZP?PQw>I261^!6KVwaOfB@O6q?r3eDc-)K>JQS!Bf z?ZeAzUs(LM;D^bw!uAmUZM4UycVs~z;{KN}Me3JPb`bu5;h&Y2zTeeQQD^Z!hbAQz z`tIa$zU~{IJn8mn$ia1ZdRpqig9lD9ec6;gWgj0Omis(XJsgiOrmp_DSy)gY3s6J< z22S!=*?mX`^H9%%yUwf&aeOU(;flBw0-zy2KECY)bT!85*3>#{X-TE^OYVILZHTYv zFA{}9K~4RD@gG7Wk=%_9(Pvkh$JgvUJj4eF2k+g6l&%>X7^EH?q=XuhQ)F;)ahwu)`)KCehGtI$$q*)#co>2#Bp3ThH4-QR8 zFyl_meEOu5l#~Q88s*sz%kgJWZxRvU6~@}`kwz(7h=I8|1?WfSg~!k=`?JFg2B@&8 z$k58l-9pMRp>zl^?Qq%1Wu`5Q_%FN_I(bhjdhuD!hjzmA<=C!?$;n$|WA4kX*n*ap zH&GFhk++x}@M!z1SFeaSzHauf#zJYRIg!a9`(N2}b8^?CDQ`C%e0(Hz;F+2`SV381HM-5R9;yo`+rdwWUkPo7W%JlJOt_-}t7iHnOfDEezVsn^$~ zptzXzSJ8$1W27*$bh@;Bqx1BO`(OXubI}MG6M9ZJxEO@lIRmT+EX++nHOxK5*eN2cYZD z(Zn|U{|_r4 zL`w`r9*5JOms5gIE+%4pwP1k8*DDkTxl~tIf4?4b6ggpRi9|j{wEuVBSy4W{nVY5y T3)Tk)K9IA$JGKdP?$*BnjCQY# diff --git a/perception/mil_vision/templates/underwater_shape_identification/cross.png b/perception/mil_vision/templates/underwater_shape_identification/cross.png deleted file mode 100644 index 9b400145f7986163d222d8090a4041a18f160506..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 761 zcmeAS@N?(olHy`uVBq!ia0vp^DIm!dJxA*GJo3?%A+rx0PN;Qtz%wA6>cj~~67)p|3h?Q2zSobj;(o7U{l`_{33=kn>(#YMVZQ;cSQp2f)U`Cx{L z(bJ-vf2!j88{}S=+$!09xBL9R_SaH#olj_RNU|y}R!9+P>1mn}C}70pg3Z;1J;HK^0nK(-+D90K!z{z_BQ^>tj%^FD?^-i-*q#Z`DM@b8oTz#7FQfgPAK6r z3aj}{yIW?TfBx~;udWP}SDEvj7Ct!rwS=L=ZE=Q))QS+TdEX-Src3a!704W)zOzm| zVnU2wd)nsC95d-F(SJ5a=(JVseOEdEWJ=S^5-AS>*-dbFW3>!Z@bgl~d0*>x*KxgZ z=lfeH|M;WC!hjch%1-|+%ibzwHJ4AQv*r2BoZD`zuO7-U`LyNy%aTL4Z_j>S`A=no z!tuu+AElkye(P;nLG1PRPKN8H8}H_Aj9OcBQ=h}C9U~+$Ee8kG8E3Z2XWj?De6PFz z{(FH8d;8&sd&*wNuh$df-m`x{yV3Ia7tLR)Y`;|Pt?vJrD)(u^*E2Xwg2Y~Uyxy@S z!zrgzU6dwPpZR>hejAf#^rRRsVQKij<@5DKP(Hb+6tK8 S51s@}%M6~belF{r5}E)I=}4CV diff --git a/perception/mil_vision/templates/underwater_shape_identification/triangle.png b/perception/mil_vision/templates/underwater_shape_identification/triangle.png deleted file mode 100644 index 80da22ae87cf2d9ebeeddbd532b55e36624a62b9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1961 zcmaJ?c~FyC5)Xn9AV`eNaz)BOKorCP84ZUhpFs#B*KiGTgfXCiKtPUw$YnSs8PH)+ zu5gr~D1-nBmxvq^FdPPCVkUA%j1t+%f(YW`e%AiCf9$LGUe~Lx`gOnVUw7xbyE>|= z=%_#-5H*4m-V>Ch9|x)k-sGhcFHk640|;JFaHT>?c@T&ak$|`NqExI_NPYfx*5!V` zBd-j{%V+Nqg<2s4QU?^ZUEg+O6LeM)w~(Ep2%O4Pk;3F zOVIn3YM5}$e%d%P1(^#XWqazou zpLuz*3Z(^`a{b`i>7Qp@aM#7isbP8{S+B`Qbv$f(=2HH3OQjYTD>JPxU1+kjh8q@} z12ypFvFP6;vqxGB*C>Tb>UtTHWH995Ufr}Xz234ey)T|5_? z+1N<=y78fjYOS^o;5~-;{QZ9k1hQB3X6eg>%I$ppbpVmCuuolGmU_tB`#Gpyk2G{q z1q~k$dh%>#Wu;QIl&rV2voppl!5jlqv$(ZI2`RnjXPQ%bJlNt?r{ysc4r%tSP>*(Q zWI0YX>RK_zgp{1D%?#@G-zv+g569Wsk~pQ9BUCZ&qfBPNm^}XEEsB7spFVPX^erF? z)NoPp@cyZe4_zcv7TY2L=+pB3TmHWFFa;4 zMJewPz##(S#j=GCEhj2ATK*Rxt_%6~-*!GeK39*I8~|FLKBa@~q}WDBJ}!S&2xm;@ zru5PS%p|Y7v*%@wO+II9+=*dLRBcpOz`nsnc(KzE>*ioA5<~=1G#p~l?-DR5{B1PI zH*h=h#8f!weZ2KSQroZGrCpd2bx0iR_oqYXy%I4_J@WRwd!fz8vghZRs^ShD1fFxmDTPAA(ok8#dH*3*{r< zJX8j={b#5-JLNFXVsmrzL3CrC5#HOoqM`!BRs}1-#pvcJ35X&$7!6@y?fy*{W767G zJv3tD;((}HCx?-#DMZ!j^ofs(?>!$oVTK%lW$?O_wrgFOy)5MWnV5?SL*-cmX(gc1C@CO0@9+a z-h%-R;yz~_cqL7^EVkBu>}NXAYT6ijyS#inX^;{(*6-rQK_Z5Hb&oXfM}DI_Zh1Z&`bOM6 z1@;pg8=DcyI(_IGKCeE!#gtc)?s=p@-}yeGd$!r&5@Qumus_okaE|StFizY(Muk6U zEJ*C3(P(NSvCs^yLmR+eAX#X?i%IydS|#y#==E|nF>2l;T)H2jASV)uu5^gT9w)z1 z1fV{?zF>;0!^@gjSAS}4Z>NGV260&+&)S4|mS6j4&g==W=b`S55|ffjor|OS zZ~*LEKQL#jqsoiU9Lfl5%a35M6yz^3L0X&gfExFfsM%T0Oe**>W^muqV6J*%cJ+Wz zs7v)R8C9qgBm-wJgy7(cZw822{{dcwsBi!P From 3be80f91943ed3c84f9ccaa3bd6cb3925bc58210 Mon Sep 17 00:00:00 2001 From: David Soto Date: Fri, 17 Mar 2017 20:12:34 -0400 Subject: [PATCH 239/267] GREAT MERGE: create purgatory for files that aren't yet generic --- perception/mil_vision/great_merge_purgatory/README.txt | 5 +++++ .../nodes/camera_to_lidar/CameraLidarTransformer.cpp | 0 .../nodes/camera_to_lidar/CameraLidarTransformer.hpp | 0 .../nodes/camera_to_lidar/main.cpp | 0 .../{ => great_merge_purgatory}/nodes/database_color.py | 0 .../{ => great_merge_purgatory}/nodes/object_classifier.py | 0 .../{ => great_merge_purgatory}/nodes/pinger_finder.py | 0 .../{ => great_merge_purgatory}/nodes/shooter_finder.py | 0 8 files changed, 5 insertions(+) create mode 100644 perception/mil_vision/great_merge_purgatory/README.txt rename perception/mil_vision/{ => great_merge_purgatory}/nodes/camera_to_lidar/CameraLidarTransformer.cpp (100%) rename perception/mil_vision/{ => great_merge_purgatory}/nodes/camera_to_lidar/CameraLidarTransformer.hpp (100%) rename perception/mil_vision/{ => great_merge_purgatory}/nodes/camera_to_lidar/main.cpp (100%) rename perception/mil_vision/{ => great_merge_purgatory}/nodes/database_color.py (100%) rename perception/mil_vision/{ => great_merge_purgatory}/nodes/object_classifier.py (100%) rename perception/mil_vision/{ => great_merge_purgatory}/nodes/pinger_finder.py (100%) rename perception/mil_vision/{ => great_merge_purgatory}/nodes/shooter_finder.py (100%) diff --git a/perception/mil_vision/great_merge_purgatory/README.txt b/perception/mil_vision/great_merge_purgatory/README.txt new file mode 100644 index 00000000..48fe2616 --- /dev/null +++ b/perception/mil_vision/great_merge_purgatory/README.txt @@ -0,0 +1,5 @@ +Some of these files can be made generic and therefore be able to stay as a part of mil_common. Until that is done, they will remain in this folder. +Anything that remains in the purgatory after the end of the month will be removed from this repo. + +- David Soto + 3/17/16 diff --git a/perception/mil_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp b/perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.cpp similarity index 100% rename from perception/mil_vision/nodes/camera_to_lidar/CameraLidarTransformer.cpp rename to perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.cpp diff --git a/perception/mil_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp b/perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.hpp similarity index 100% rename from perception/mil_vision/nodes/camera_to_lidar/CameraLidarTransformer.hpp rename to perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.hpp diff --git a/perception/mil_vision/nodes/camera_to_lidar/main.cpp b/perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/main.cpp similarity index 100% rename from perception/mil_vision/nodes/camera_to_lidar/main.cpp rename to perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/main.cpp diff --git a/perception/mil_vision/nodes/database_color.py b/perception/mil_vision/great_merge_purgatory/nodes/database_color.py similarity index 100% rename from perception/mil_vision/nodes/database_color.py rename to perception/mil_vision/great_merge_purgatory/nodes/database_color.py diff --git a/perception/mil_vision/nodes/object_classifier.py b/perception/mil_vision/great_merge_purgatory/nodes/object_classifier.py similarity index 100% rename from perception/mil_vision/nodes/object_classifier.py rename to perception/mil_vision/great_merge_purgatory/nodes/object_classifier.py diff --git a/perception/mil_vision/nodes/pinger_finder.py b/perception/mil_vision/great_merge_purgatory/nodes/pinger_finder.py similarity index 100% rename from perception/mil_vision/nodes/pinger_finder.py rename to perception/mil_vision/great_merge_purgatory/nodes/pinger_finder.py diff --git a/perception/mil_vision/nodes/shooter_finder.py b/perception/mil_vision/great_merge_purgatory/nodes/shooter_finder.py similarity index 100% rename from perception/mil_vision/nodes/shooter_finder.py rename to perception/mil_vision/great_merge_purgatory/nodes/shooter_finder.py From 4edc6760a4178066c13842a64a014829c609f2a6 Mon Sep 17 00:00:00 2001 From: David Soto Date: Fri, 17 Mar 2017 21:28:22 -0400 Subject: [PATCH 240/267] GREAT MERGE: change navigator_vision refs to mil_vision --- perception/mil_vision/CMakeLists.txt | 147 +++--------------- .../CameraLidarTransformer.hpp | 4 +- .../active_contours.hpp | 0 .../colorizer/camera_observer.hpp | 4 +- .../colorizer/color_observation.hpp | 4 +- .../colorizer/common.hpp | 0 .../colorizer/pcd_colorizer.hpp | 10 +- .../colorizer/single_cloud_processor.hpp | 8 +- .../cv_tools.hpp | 0 .../image_acquisition/camera_frame.hpp | 2 +- .../camera_frame_sequence.hpp | 2 +- .../image_acquisition/camera_model.hpp | 0 .../image_acquisition/ros_camera_stream.hpp | 4 +- .../image_filtering.hpp | 2 +- .../pcd_sub_pub_algorithm.hpp | 0 .../point_cloud_algorithms.hpp | 0 .../visualization.hpp | 0 perception/mil_vision/package.xml | 9 +- .../active_contours.cpp | 0 .../colorizer/camera_observer.cpp | 0 .../colorizer/color_observation.cpp | 0 .../colorizer/pcd_colorizer.cpp | 0 .../colorizer/single_cloud_processor.cpp | 0 .../cv_utils.cc | 0 .../image_filtering.cpp | 0 .../point_cloud_algorithms.cc | 0 .../visualization.cc | 0 27 files changed, 49 insertions(+), 147 deletions(-) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/active_contours.hpp (100%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/colorizer/camera_observer.hpp (86%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/colorizer/color_observation.hpp (87%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/colorizer/common.hpp (100%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/colorizer/pcd_colorizer.hpp (87%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/colorizer/single_cloud_processor.hpp (80%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/cv_tools.hpp (100%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/image_acquisition/camera_frame.hpp (98%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/image_acquisition/camera_frame_sequence.hpp (97%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/image_acquisition/camera_model.hpp (100%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/image_acquisition/ros_camera_stream.hpp (99%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/image_filtering.hpp (97%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/pcd_sub_pub_algorithm.hpp (100%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/point_cloud_algorithms.hpp (100%) rename perception/mil_vision/include/{navigator_vision_lib => mil_vision_lib}/visualization.hpp (100%) rename perception/mil_vision/src/{navigator_vision_lib => mil_vision_lib}/active_contours.cpp (100%) rename perception/mil_vision/src/{navigator_vision_lib => mil_vision_lib}/colorizer/camera_observer.cpp (100%) rename perception/mil_vision/src/{navigator_vision_lib => mil_vision_lib}/colorizer/color_observation.cpp (100%) rename perception/mil_vision/src/{navigator_vision_lib => mil_vision_lib}/colorizer/pcd_colorizer.cpp (100%) rename perception/mil_vision/src/{navigator_vision_lib => mil_vision_lib}/colorizer/single_cloud_processor.cpp (100%) rename perception/mil_vision/src/{navigator_vision_lib => mil_vision_lib}/cv_utils.cc (100%) rename perception/mil_vision/src/{navigator_vision_lib => mil_vision_lib}/image_filtering.cpp (100%) rename perception/mil_vision/src/{navigator_vision_lib => mil_vision_lib}/point_cloud_algorithms.cc (100%) rename perception/mil_vision/src/{navigator_vision_lib => mil_vision_lib}/visualization.cc (100%) diff --git a/perception/mil_vision/CMakeLists.txt b/perception/mil_vision/CMakeLists.txt index b2d8d2e9..dda345ad 100644 --- a/perception/mil_vision/CMakeLists.txt +++ b/perception/mil_vision/CMakeLists.txt @@ -1,7 +1,6 @@ cmake_minimum_required(VERSION 2.8.3) -project(navigator_vision) +project(mil_vision) -# SET(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake;${CMAKE_MODULE_PATH}") # set c++11 as default, overide with set_target_properties() # if needed for specific nodes (cough..cough... PCL) SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fopenmp -g -pedantic -Wall -std=c++11 ") @@ -16,7 +15,7 @@ find_package(catkin image_geometry cv_bridge message_generation - navigator_msgs + mil_msgs std_msgs std_srvs geometry_msgs @@ -36,8 +35,8 @@ catkin_package( INCLUDE_DIRS include LIBRARIES - navigator_vision_lib - navigator_sparsestereo + mil_vision_lib + mil_sparsestereo CATKIN_DEPENDS roscpp rospy @@ -45,15 +44,15 @@ catkin_package( std_msgs geometry_msgs sensor_msgs - navigator_msgs - navigator_tools + mil_msgs + mil_tools pcl_ros DEPENDS system_lib image_transport image_geometry cv_bridge - navigator_msgs + mil_msgs ) include_directories( @@ -67,128 +66,32 @@ link_directories(${PCL_LIBRARY_DIRS}) add_definitions(${PCL_DEFINITIONS}) FILE(GLOB EXFAST_SOURCES exFAST_SparseStereo/src/sparsestereo/*) -add_library( navigator_sparsestereo SHARED ${EXFAST_SOURCES}) -target_include_directories(navigator_sparsestereo PUBLIC exFAST_SparseStereo/src) -target_link_libraries(navigator_sparsestereo ${catkin_LIBRARIES} ${Boost_LIBRARIES} ${OpenCV_INCLUDE_DIRS}) -set_target_properties(navigator_sparsestereo PROPERTIES COMPILE_FLAGS "-O3 -DNDEBUG -fopenmp -g -Wall -march=native -msse -msse2 -msse3 -mssse3 -msse4 -ffast-math -mfpmath=sse") +add_library( mil_sparsestereo SHARED ${EXFAST_SOURCES}) +target_include_directories(mil_sparsestereo PUBLIC exFAST_SparseStereo/src) +target_link_libraries(mil_sparsestereo ${catkin_LIBRARIES} ${Boost_LIBRARIES} ${OpenCV_INCLUDE_DIRS}) +set_target_properties(mil_sparsestereo PROPERTIES COMPILE_FLAGS "-O3 -DNDEBUG -fopenmp -g -Wall -march=native -msse -msse2 -msse3 -mssse3 -msse4 -ffast-math -mfpmath=sse") add_library( - navigator_vision_lib - src/navigator_vision_lib/visualization.cc - src/navigator_vision_lib/cv_utils.cc - src/navigator_vision_lib/image_filtering.cpp - src/navigator_vision_lib/active_contours.cpp - src/navigator_vision_lib/colorizer/pcd_colorizer.cpp - src/navigator_vision_lib/colorizer/single_cloud_processor.cpp - src/navigator_vision_lib/colorizer/camera_observer.cpp - src/navigator_vision_lib/colorizer/color_observation.cpp -) -target_include_directories(navigator_vision_lib PUBLIC include/navigator_vision_lib) -target_link_libraries(navigator_vision_lib navigator_tools_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES} ${PCL_COMMON_LIBRARIES} ${PCL_COMMON_LIBRARIES}) - -add_executable(stereo_point_cloud_driver nodes/stereo_point_cloud_driver.cpp) -add_dependencies(stereo_point_cloud_driver navigator_vision_lib ${catkin_EXPORTED_TARGETS}) -target_link_libraries( - stereo_point_cloud_driver - navigator_sparsestereo - navigator_vision_lib - ${PCL_COMMON_LIBRARIES} - ${PCL_IO_LIBRARIES} - ${PCL_LIBRARIES} - ${catkin_LIBRARIES} - ${Boost_LIBRARIES} - ${OpenCV_INCLUDE_DIRS} -) -set_target_properties(stereo_point_cloud_driver PROPERTIES COMPILE_FLAGS "-O3 -DNDEBUG -fopenmp -g -Wall -march=native -msse -msse2 -msse3 -mssse3 -msse4 -ffast-math -mfpmath=sse") - + mil_vision_lib + src/mil_vision_lib/visualization.cc + src/mil_vision_lib/cv_utils.cc + src/mil_vision_lib/image_filtering.cpp + src/mil_vision_lib/active_contours.cpp + src/mil_vision_lib/colorizer/pcd_colorizer.cpp + src/mil_vision_lib/colorizer/single_cloud_processor.cpp + src/mil_vision_lib/colorizer/camera_observer.cpp + src/mil_vision_lib/colorizer/color_observation.cpp +) +target_include_directories(mil_vision_lib PUBLIC include/mil_vision_lib) +target_link_libraries(mil_vision_lib mil_tools_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES} ${PCL_COMMON_LIBRARIES} ${PCL_COMMON_LIBRARIES}) add_executable(camera_stream_demo nodes/camera_stream_demo.cpp) -add_dependencies(camera_stream_demo navigator_vision_lib ${catkin_EXPORTED_TARGETS}) +add_dependencies(camera_stream_demo mil_vision_lib ${catkin_EXPORTED_TARGETS}) target_link_libraries( camera_stream_demo - navigator_vision_lib - ${catkin_LIBRARIES} - ${Boost_LIBRARIES} - ${OpenCV_INCLUDE_DIRS} -) - -add_executable(underwater_shape_identification_vision - nodes/underwater_shape_identification_vision.cpp - src/missions/underwater_shape_identification.cpp -) -add_dependencies(underwater_shape_identification_vision navigator_vision_lib ${catkin_EXPORTED_TARGETS}) -target_link_libraries( - underwater_shape_identification_vision - navigator_vision_lib - navigator_tools_lib - ${catkin_LIBRARIES} - ${Boost_LIBRARIES} - ${OpenCV_INCLUDE_DIRS} -) - -add_executable(A_C_test_vision - nodes/A_C_test_vision.cpp -) -add_dependencies(A_C_test_vision navigator_vision_lib ${catkin_EXPORTED_TARGETS}) -target_link_libraries( - A_C_test_vision - navigator_vision_lib - navigator_tools_lib + mil_vision_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES} ${OpenCV_INCLUDE_DIRS} ) -add_executable(shape_identification - nodes/shape_identification/main.cpp - nodes/shape_identification/DockShapeVision.cpp - nodes/shape_identification/GrayscaleContour/GrayscaleContour.cpp - nodes/shape_identification/TrackedShape.cpp - nodes/shape_identification/ShapeTracker.cpp -) -add_dependencies(shape_identification - ${catkin_EXPORTED_TARGETS} -) -target_link_libraries(shape_identification - ${catkin_LIBRARIES} - ${OpenCV_LIBS} -) -add_executable(camera_to_lidar - nodes/camera_to_lidar/main.cpp - nodes/camera_to_lidar/CameraLidarTransformer.cpp -) -add_dependencies(camera_to_lidar - ${catkin_EXPORTED_TARGETS} -) -target_link_libraries(camera_to_lidar - ${catkin_LIBRARIES} - ${OpenCV_LIBS} -) - -add_executable(velodyne_pcd_colorizer nodes/velodyne_pcd_colorizer.cpp) -add_dependencies(velodyne_pcd_colorizer navigator_vision_lib ${catkin_EXPORTED_TARGETS}) -target_link_libraries( - velodyne_pcd_colorizer - navigator_vision_lib - navigator_tools_lib - ${PCL_COMMON_LIBRARIES} - ${PCL_IO_LIBRARIES} - ${PCL_LIBRARIES} - ${catkin_LIBRARIES} - ${Boost_LIBRARIES} - ${OpenCV_INCLUDE_DIRS} -) - -install(TARGETS navigator_vision_lib navigator_sparsestereo - ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} - LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} - RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} -) -install(DIRECTORY include/${PROJECT_NAME}/ - DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} - FILES_MATCHING PATTERN "*.hpp" - PATTERN ".svn" EXCLUDE -) -install( - TARGETS -) diff --git a/perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.hpp b/perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.hpp index 2f8e2da9..1273129d 100644 --- a/perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.hpp +++ b/perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.hpp @@ -5,7 +5,7 @@ #include #include #include -#include +#include #include #include #include @@ -53,7 +53,7 @@ class CameraLidarTransformer bool inCameraFrame(cv::Point2d& p); void cameraInfoCallback(const sensor_msgs::CameraInfo info); void drawPoint(cv::Mat& mat, cv::Point2d& p, cv::Scalar color=cv::Scalar(0, 0, 255)); - bool transformServiceCallback(navigator_msgs::CameraToLidarTransform::Request &req,navigator_msgs::CameraToLidarTransform::Response &res); + bool transformServiceCallback(mil_msgs::CameraToLidarTransform::Request &req,mil_msgs::CameraToLidarTransform::Response &res); static ros::Duration MAX_TIME_ERR; #ifdef DO_ROS_DEBUG ros::Publisher pubMarkers; diff --git a/perception/mil_vision/include/navigator_vision_lib/active_contours.hpp b/perception/mil_vision/include/mil_vision_lib/active_contours.hpp similarity index 100% rename from perception/mil_vision/include/navigator_vision_lib/active_contours.hpp rename to perception/mil_vision/include/mil_vision_lib/active_contours.hpp diff --git a/perception/mil_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp b/perception/mil_vision/include/mil_vision_lib/colorizer/camera_observer.hpp similarity index 86% rename from perception/mil_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp rename to perception/mil_vision/include/mil_vision_lib/colorizer/camera_observer.hpp index bf33008f..73950829 100644 --- a/perception/mil_vision/include/navigator_vision_lib/colorizer/camera_observer.hpp +++ b/perception/mil_vision/include/mil_vision_lib/colorizer/camera_observer.hpp @@ -1,7 +1,7 @@ #pragma once -#include // Common includes are here -#include +#include // Common includes are here +#include #include #include #include diff --git a/perception/mil_vision/include/navigator_vision_lib/colorizer/color_observation.hpp b/perception/mil_vision/include/mil_vision_lib/colorizer/color_observation.hpp similarity index 87% rename from perception/mil_vision/include/navigator_vision_lib/colorizer/color_observation.hpp rename to perception/mil_vision/include/mil_vision_lib/colorizer/color_observation.hpp index b35c7807..890f8dff 100644 --- a/perception/mil_vision/include/navigator_vision_lib/colorizer/color_observation.hpp +++ b/perception/mil_vision/include/mil_vision_lib/colorizer/color_observation.hpp @@ -1,7 +1,7 @@ #pragma once -#include -#include +#include +#include #include #include diff --git a/perception/mil_vision/include/navigator_vision_lib/colorizer/common.hpp b/perception/mil_vision/include/mil_vision_lib/colorizer/common.hpp similarity index 100% rename from perception/mil_vision/include/navigator_vision_lib/colorizer/common.hpp rename to perception/mil_vision/include/mil_vision_lib/colorizer/common.hpp diff --git a/perception/mil_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp b/perception/mil_vision/include/mil_vision_lib/colorizer/pcd_colorizer.hpp similarity index 87% rename from perception/mil_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp rename to perception/mil_vision/include/mil_vision_lib/colorizer/pcd_colorizer.hpp index 04bb9760..1584e238 100644 --- a/perception/mil_vision/include/navigator_vision_lib/colorizer/pcd_colorizer.hpp +++ b/perception/mil_vision/include/mil_vision_lib/colorizer/pcd_colorizer.hpp @@ -1,10 +1,10 @@ #pragma once -#include -#include -#include -#include -#include +#include +#include +#include +#include +#include #include #include diff --git a/perception/mil_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp b/perception/mil_vision/include/mil_vision_lib/colorizer/single_cloud_processor.hpp similarity index 80% rename from perception/mil_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp rename to perception/mil_vision/include/mil_vision_lib/colorizer/single_cloud_processor.hpp index 5b8743fb..79859c9d 100644 --- a/perception/mil_vision/include/navigator_vision_lib/colorizer/single_cloud_processor.hpp +++ b/perception/mil_vision/include/mil_vision_lib/colorizer/single_cloud_processor.hpp @@ -1,9 +1,9 @@ #pragma once -#include -#include -#include -#include +#include +#include +#include +#include #include #include diff --git a/perception/mil_vision/include/navigator_vision_lib/cv_tools.hpp b/perception/mil_vision/include/mil_vision_lib/cv_tools.hpp similarity index 100% rename from perception/mil_vision/include/navigator_vision_lib/cv_tools.hpp rename to perception/mil_vision/include/mil_vision_lib/cv_tools.hpp diff --git a/perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp b/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame.hpp similarity index 98% rename from perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp rename to perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame.hpp index 7a0cf66c..dee3c788 100644 --- a/perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_frame.hpp +++ b/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame.hpp @@ -1,6 +1,6 @@ #pragma once -// #include +// #include #include #include #include diff --git a/perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp b/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame_sequence.hpp similarity index 97% rename from perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp rename to perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame_sequence.hpp index 147d8357..7b34d7ee 100644 --- a/perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_frame_sequence.hpp +++ b/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame_sequence.hpp @@ -1,6 +1,6 @@ #pragma once -#include +#include #include #include #include diff --git a/perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp b/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_model.hpp similarity index 100% rename from perception/mil_vision/include/navigator_vision_lib/image_acquisition/camera_model.hpp rename to perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_model.hpp diff --git a/perception/mil_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp b/perception/mil_vision/include/mil_vision_lib/image_acquisition/ros_camera_stream.hpp similarity index 99% rename from perception/mil_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp rename to perception/mil_vision/include/mil_vision_lib/image_acquisition/ros_camera_stream.hpp index 3127f404..820dd74a 100644 --- a/perception/mil_vision/include/navigator_vision_lib/image_acquisition/ros_camera_stream.hpp +++ b/perception/mil_vision/include/mil_vision_lib/image_acquisition/ros_camera_stream.hpp @@ -1,7 +1,7 @@ #pragma once -#include -#include +#include +#include #include #include diff --git a/perception/mil_vision/include/navigator_vision_lib/image_filtering.hpp b/perception/mil_vision/include/mil_vision_lib/image_filtering.hpp similarity index 97% rename from perception/mil_vision/include/navigator_vision_lib/image_filtering.hpp rename to perception/mil_vision/include/mil_vision_lib/image_filtering.hpp index 44ea8e7d..0d5becd9 100644 --- a/perception/mil_vision/include/navigator_vision_lib/image_filtering.hpp +++ b/perception/mil_vision/include/mil_vision_lib/image_filtering.hpp @@ -5,7 +5,7 @@ #include #include -#include +#include namespace nav { diff --git a/perception/mil_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp b/perception/mil_vision/include/mil_vision_lib/pcd_sub_pub_algorithm.hpp similarity index 100% rename from perception/mil_vision/include/navigator_vision_lib/pcd_sub_pub_algorithm.hpp rename to perception/mil_vision/include/mil_vision_lib/pcd_sub_pub_algorithm.hpp diff --git a/perception/mil_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp b/perception/mil_vision/include/mil_vision_lib/point_cloud_algorithms.hpp similarity index 100% rename from perception/mil_vision/include/navigator_vision_lib/point_cloud_algorithms.hpp rename to perception/mil_vision/include/mil_vision_lib/point_cloud_algorithms.hpp diff --git a/perception/mil_vision/include/navigator_vision_lib/visualization.hpp b/perception/mil_vision/include/mil_vision_lib/visualization.hpp similarity index 100% rename from perception/mil_vision/include/navigator_vision_lib/visualization.hpp rename to perception/mil_vision/include/mil_vision_lib/visualization.hpp diff --git a/perception/mil_vision/package.xml b/perception/mil_vision/package.xml index cd84ab19..b57848f0 100644 --- a/perception/mil_vision/package.xml +++ b/perception/mil_vision/package.xml @@ -1,10 +1,9 @@ - navigator_vision - 1.0.0 + mil_vision + 2.0.0 Nodes and libraries used for computer vision perception on NaviGator - tess David Soto MIT catkin @@ -18,8 +17,8 @@ image_geometry cv_bridge cv_bridge - navigator_msgs - navigator_msgs + mil_msgs + mil_msgs message_generation message_runtime std_msgs diff --git a/perception/mil_vision/src/navigator_vision_lib/active_contours.cpp b/perception/mil_vision/src/mil_vision_lib/active_contours.cpp similarity index 100% rename from perception/mil_vision/src/navigator_vision_lib/active_contours.cpp rename to perception/mil_vision/src/mil_vision_lib/active_contours.cpp diff --git a/perception/mil_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp b/perception/mil_vision/src/mil_vision_lib/colorizer/camera_observer.cpp similarity index 100% rename from perception/mil_vision/src/navigator_vision_lib/colorizer/camera_observer.cpp rename to perception/mil_vision/src/mil_vision_lib/colorizer/camera_observer.cpp diff --git a/perception/mil_vision/src/navigator_vision_lib/colorizer/color_observation.cpp b/perception/mil_vision/src/mil_vision_lib/colorizer/color_observation.cpp similarity index 100% rename from perception/mil_vision/src/navigator_vision_lib/colorizer/color_observation.cpp rename to perception/mil_vision/src/mil_vision_lib/colorizer/color_observation.cpp diff --git a/perception/mil_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp b/perception/mil_vision/src/mil_vision_lib/colorizer/pcd_colorizer.cpp similarity index 100% rename from perception/mil_vision/src/navigator_vision_lib/colorizer/pcd_colorizer.cpp rename to perception/mil_vision/src/mil_vision_lib/colorizer/pcd_colorizer.cpp diff --git a/perception/mil_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp b/perception/mil_vision/src/mil_vision_lib/colorizer/single_cloud_processor.cpp similarity index 100% rename from perception/mil_vision/src/navigator_vision_lib/colorizer/single_cloud_processor.cpp rename to perception/mil_vision/src/mil_vision_lib/colorizer/single_cloud_processor.cpp diff --git a/perception/mil_vision/src/navigator_vision_lib/cv_utils.cc b/perception/mil_vision/src/mil_vision_lib/cv_utils.cc similarity index 100% rename from perception/mil_vision/src/navigator_vision_lib/cv_utils.cc rename to perception/mil_vision/src/mil_vision_lib/cv_utils.cc diff --git a/perception/mil_vision/src/navigator_vision_lib/image_filtering.cpp b/perception/mil_vision/src/mil_vision_lib/image_filtering.cpp similarity index 100% rename from perception/mil_vision/src/navigator_vision_lib/image_filtering.cpp rename to perception/mil_vision/src/mil_vision_lib/image_filtering.cpp diff --git a/perception/mil_vision/src/navigator_vision_lib/point_cloud_algorithms.cc b/perception/mil_vision/src/mil_vision_lib/point_cloud_algorithms.cc similarity index 100% rename from perception/mil_vision/src/navigator_vision_lib/point_cloud_algorithms.cc rename to perception/mil_vision/src/mil_vision_lib/point_cloud_algorithms.cc diff --git a/perception/mil_vision/src/navigator_vision_lib/visualization.cc b/perception/mil_vision/src/mil_vision_lib/visualization.cc similarity index 100% rename from perception/mil_vision/src/navigator_vision_lib/visualization.cc rename to perception/mil_vision/src/mil_vision_lib/visualization.cc From 79f667578e5bf8d2c79fb77b480ae7f0952c01e2 Mon Sep 17 00:00:00 2001 From: David Soto Date: Sat, 18 Mar 2017 13:41:44 -0400 Subject: [PATCH 241/267] GREAT MERGE: transform uf_common to mil_msgs --- .../mil_msgs}/CMakeLists.txt | 30 ++++++++++++------- utils/mil_msgs/README.txt | 6 ++++ .../mil_msgs}/action/MoveTo.action | 2 +- .../mil_msgs/include/mil_msgs}/msg_helpers.h | 4 +-- .../include/mil_msgs}/param_helpers.h | 4 +-- .../mil_msgs/mil_msgs}/__init__.py | 0 .../mil_msgs}/msg/Acceleration.msg | 0 .../mil_msgs}/msg/Float64Stamped.msg | 0 .../mil_msgs}/msg/PoseTwist.msg | 0 .../mil_msgs}/msg/PoseTwistStamped.msg | 0 .../mil_msgs}/msg/VelocityMeasurement.msg | 0 .../mil_msgs}/msg/VelocityMeasurements.msg | 0 .../uf_common => utils/mil_msgs}/package.xml | 7 +++-- .../mil_msgs}/scripts/param_saver | 0 .../uf_common => utils/mil_msgs}/setup.py | 2 +- 15 files changed, 33 insertions(+), 22 deletions(-) rename {.great_merge/utils/uf_common => utils/mil_msgs}/CMakeLists.txt (52%) create mode 100644 utils/mil_msgs/README.txt rename {.great_merge/utils/uf_common => utils/mil_msgs}/action/MoveTo.action (90%) rename {.great_merge/utils/uf_common/include/uf_common => utils/mil_msgs/include/mil_msgs}/msg_helpers.h (95%) rename {.great_merge/utils/uf_common/include/uf_common => utils/mil_msgs/include/mil_msgs}/param_helpers.h (98%) rename {.great_merge/utils/uf_common/uf_common => utils/mil_msgs/mil_msgs}/__init__.py (100%) rename {.great_merge/utils/uf_common => utils/mil_msgs}/msg/Acceleration.msg (100%) rename {.great_merge/utils/uf_common => utils/mil_msgs}/msg/Float64Stamped.msg (100%) rename {.great_merge/utils/uf_common => utils/mil_msgs}/msg/PoseTwist.msg (100%) rename {.great_merge/utils/uf_common => utils/mil_msgs}/msg/PoseTwistStamped.msg (100%) rename {.great_merge/utils/uf_common => utils/mil_msgs}/msg/VelocityMeasurement.msg (100%) rename {.great_merge/utils/uf_common => utils/mil_msgs}/msg/VelocityMeasurements.msg (100%) rename {.great_merge/utils/uf_common => utils/mil_msgs}/package.xml (82%) rename {.great_merge/utils/uf_common => utils/mil_msgs}/scripts/param_saver (100%) rename {.great_merge/utils/uf_common => utils/mil_msgs}/setup.py (88%) diff --git a/.great_merge/utils/uf_common/CMakeLists.txt b/utils/mil_msgs/CMakeLists.txt similarity index 52% rename from .great_merge/utils/uf_common/CMakeLists.txt rename to utils/mil_msgs/CMakeLists.txt index 62efc331..a65ab09c 100644 --- a/.great_merge/utils/uf_common/CMakeLists.txt +++ b/utils/mil_msgs/CMakeLists.txt @@ -1,16 +1,24 @@ cmake_minimum_required(VERSION 2.8.3) -project(uf_common) -find_package(catkin REQUIRED COMPONENTS message_generation message_runtime geometry_msgs tf actionlib interactive_markers std_msgs actionlib_msgs cmake_modules) +project(mil_msgs) +find_package(catkin + REQUIRED COMPONENTS + message_generation + message_runtime + geometry_msgs + tf + actionlib + interactive_markers + std_msgs + actionlib_msgs + cmake_modules +) catkin_python_setup() -add_action_files( - FILES - MoveTo.action +add_action_files(FILES + MoveTo.action ) - -add_message_files( - FILES +add_message_files(FILES PoseTwistStamped.msg PoseTwist.msg VelocityMeasurements.msg @@ -19,10 +27,10 @@ add_message_files( Acceleration.msg ) -find_package(Eigen REQUIRED) +find_package(Eigen 3 REQUIRED) include_directories(${Eigen_INCLUDE_DIRS}) generate_messages( - DEPENDENCIES std_msgs actionlib_msgs geometry_msgs uf_common + DEPENDENCIES std_msgs actionlib_msgs geometry_msgs mil_msgs ) catkin_package( DEPENDS Eigen @@ -32,4 +40,4 @@ catkin_package( ) include_directories(${EIGEN_INCLUDE_DIRS} include ${Boost_INCLUDE_DIR} ${catkin_INCLUDE_DIRS}) -install(PROGRAMS scripts/interactive_marker_moveto scripts/velocitymeasurements_to_vector3 scripts/param_saver DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}) +#install(PROGRAMS scripts/interactive_marker_moveto scripts/velocitymeasurements_to_vector3 scripts/param_saver DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}) diff --git a/utils/mil_msgs/README.txt b/utils/mil_msgs/README.txt new file mode 100644 index 00000000..3aa7b83d --- /dev/null +++ b/utils/mil_msgs/README.txt @@ -0,0 +1,6 @@ +This is the beggining of the mil_msgs ros package. This is still in the process of being adapted from the legacy uf-common package. Be sure to update old references to uf_common if you find them around our repos. + +This package should contain any .msg, .srv, or .action files that are platform agnostic and have potential to be used by multiiple robots. + +Note: +The 'msg_helpers' and 'param_helpers' headers are a better fit for the mil_dev_tools package but I (David Soto) will not be transitioning them at the moment. (Feel free to do so!) diff --git a/.great_merge/utils/uf_common/action/MoveTo.action b/utils/mil_msgs/action/MoveTo.action similarity index 90% rename from .great_merge/utils/uf_common/action/MoveTo.action rename to utils/mil_msgs/action/MoveTo.action index c2ec5960..824ca3fe 100644 --- a/.great_merge/utils/uf_common/action/MoveTo.action +++ b/utils/mil_msgs/action/MoveTo.action @@ -1,6 +1,6 @@ # goal. copies PoseTwistStamped. Header header -uf_common/PoseTwist posetwist +mil_msgs/PoseTwist posetwist float64 speed bool uncoordinated # false goes in a straight line, true achieves some components before others float64 linear_tolerance # distance from goal for result to be sent diff --git a/.great_merge/utils/uf_common/include/uf_common/msg_helpers.h b/utils/mil_msgs/include/mil_msgs/msg_helpers.h similarity index 95% rename from .great_merge/utils/uf_common/include/uf_common/msg_helpers.h rename to utils/mil_msgs/include/mil_msgs/msg_helpers.h index 548a98f8..578467b8 100644 --- a/.great_merge/utils/uf_common/include/uf_common/msg_helpers.h +++ b/utils/mil_msgs/include/mil_msgs/msg_helpers.h @@ -1,5 +1,4 @@ -#ifndef UF_COMMON__MSG_HELPERS_H -#define UF_COMMON__MSG_HELPERS_H +#pragma once #include #include @@ -76,4 +75,3 @@ inline T quat2xyzw(Eigen::Quaterniond q) { } } -#endif diff --git a/.great_merge/utils/uf_common/include/uf_common/param_helpers.h b/utils/mil_msgs/include/mil_msgs/param_helpers.h similarity index 98% rename from .great_merge/utils/uf_common/include/uf_common/param_helpers.h rename to utils/mil_msgs/include/mil_msgs/param_helpers.h index fee648cf..09c353b5 100644 --- a/.great_merge/utils/uf_common/include/uf_common/param_helpers.h +++ b/utils/mil_msgs/include/mil_msgs/param_helpers.h @@ -1,5 +1,4 @@ -#ifndef UF_COMMON__PARAM_HELPERS_H -#define UF_COMMON__PARAM_HELPERS_H +#pragma once #include #include @@ -124,4 +123,3 @@ T getParam(ros::NodeHandle &nh, std::string name, T default_value) { } } -#endif diff --git a/.great_merge/utils/uf_common/uf_common/__init__.py b/utils/mil_msgs/mil_msgs/__init__.py similarity index 100% rename from .great_merge/utils/uf_common/uf_common/__init__.py rename to utils/mil_msgs/mil_msgs/__init__.py diff --git a/.great_merge/utils/uf_common/msg/Acceleration.msg b/utils/mil_msgs/msg/Acceleration.msg similarity index 100% rename from .great_merge/utils/uf_common/msg/Acceleration.msg rename to utils/mil_msgs/msg/Acceleration.msg diff --git a/.great_merge/utils/uf_common/msg/Float64Stamped.msg b/utils/mil_msgs/msg/Float64Stamped.msg similarity index 100% rename from .great_merge/utils/uf_common/msg/Float64Stamped.msg rename to utils/mil_msgs/msg/Float64Stamped.msg diff --git a/.great_merge/utils/uf_common/msg/PoseTwist.msg b/utils/mil_msgs/msg/PoseTwist.msg similarity index 100% rename from .great_merge/utils/uf_common/msg/PoseTwist.msg rename to utils/mil_msgs/msg/PoseTwist.msg diff --git a/.great_merge/utils/uf_common/msg/PoseTwistStamped.msg b/utils/mil_msgs/msg/PoseTwistStamped.msg similarity index 100% rename from .great_merge/utils/uf_common/msg/PoseTwistStamped.msg rename to utils/mil_msgs/msg/PoseTwistStamped.msg diff --git a/.great_merge/utils/uf_common/msg/VelocityMeasurement.msg b/utils/mil_msgs/msg/VelocityMeasurement.msg similarity index 100% rename from .great_merge/utils/uf_common/msg/VelocityMeasurement.msg rename to utils/mil_msgs/msg/VelocityMeasurement.msg diff --git a/.great_merge/utils/uf_common/msg/VelocityMeasurements.msg b/utils/mil_msgs/msg/VelocityMeasurements.msg similarity index 100% rename from .great_merge/utils/uf_common/msg/VelocityMeasurements.msg rename to utils/mil_msgs/msg/VelocityMeasurements.msg diff --git a/.great_merge/utils/uf_common/package.xml b/utils/mil_msgs/package.xml similarity index 82% rename from .great_merge/utils/uf_common/package.xml rename to utils/mil_msgs/package.xml index 5414fab8..f0cb7cb1 100644 --- a/.great_merge/utils/uf_common/package.xml +++ b/utils/mil_msgs/package.xml @@ -1,12 +1,13 @@ - uf_common + mil_msgs 1.0.0 - uf_common + Common ROS msgs,srvs and actions for MIL repos Forrest Voight + David Soto BSD - http://ros.org/wiki/uf_common + http://github.com/uf-mil/mil_common Forrest Voight catkin diff --git a/.great_merge/utils/uf_common/scripts/param_saver b/utils/mil_msgs/scripts/param_saver similarity index 100% rename from .great_merge/utils/uf_common/scripts/param_saver rename to utils/mil_msgs/scripts/param_saver diff --git a/.great_merge/utils/uf_common/setup.py b/utils/mil_msgs/setup.py similarity index 88% rename from .great_merge/utils/uf_common/setup.py rename to utils/mil_msgs/setup.py index 9ab73768..aa9637cd 100644 --- a/.great_merge/utils/uf_common/setup.py +++ b/utils/mil_msgs/setup.py @@ -4,7 +4,7 @@ from catkin_pkg.python_setup import generate_distutils_setup setup_args = generate_distutils_setup( - packages=['uf_common'], + packages=['mil_msgs'], ) setup(**setup_args) From 284a0c4d95ccbc80f63d22015b839d0a64424f78 Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 19 Mar 2017 04:51:44 -0400 Subject: [PATCH 242/267] GREAT MERGE: convert sub8_sonar to mil_passive_sonar Added the passive to the name because we now have an imaging sonar. --- .../sub8_sonar => drivers/mil_passive_sonar}/CMakeLists.txt | 0 .../drivers/sub8_sonar => drivers/mil_passive_sonar}/README.md | 0 .../sub8_sonar => drivers/mil_passive_sonar}/launch/test.launch | 0 .../mil_passive_sonar/mil_passive_sonar}/__init__.py | 0 .../mil_passive_sonar/mil_passive_sonar}/sonar_driver.py | 0 .../mil_passive_sonar}/multilateration/__init__.py | 0 .../mil_passive_sonar}/multilateration/multilateration.py | 0 .../sub8_sonar => drivers/mil_passive_sonar}/nodes/plotter.py | 0 .../sub8_sonar => drivers/mil_passive_sonar}/nodes/sonar_test.py | 0 .../drivers/sub8_sonar => drivers/mil_passive_sonar}/package.xml | 0 .../drivers/sub8_sonar => drivers/mil_passive_sonar}/setup.py | 0 11 files changed, 0 insertions(+), 0 deletions(-) rename {.great_merge/drivers/sub8_sonar => drivers/mil_passive_sonar}/CMakeLists.txt (100%) rename {.great_merge/drivers/sub8_sonar => drivers/mil_passive_sonar}/README.md (100%) rename {.great_merge/drivers/sub8_sonar => drivers/mil_passive_sonar}/launch/test.launch (100%) rename {.great_merge/drivers/sub8_sonar/sub8_sonar => drivers/mil_passive_sonar/mil_passive_sonar}/__init__.py (100%) rename {.great_merge/drivers/sub8_sonar/sub8_sonar => drivers/mil_passive_sonar/mil_passive_sonar}/sonar_driver.py (100%) rename {.great_merge/drivers/sub8_sonar => drivers/mil_passive_sonar}/multilateration/__init__.py (100%) rename {.great_merge/drivers/sub8_sonar => drivers/mil_passive_sonar}/multilateration/multilateration.py (100%) rename {.great_merge/drivers/sub8_sonar => drivers/mil_passive_sonar}/nodes/plotter.py (100%) rename {.great_merge/drivers/sub8_sonar => drivers/mil_passive_sonar}/nodes/sonar_test.py (100%) rename {.great_merge/drivers/sub8_sonar => drivers/mil_passive_sonar}/package.xml (100%) rename {.great_merge/drivers/sub8_sonar => drivers/mil_passive_sonar}/setup.py (100%) diff --git a/.great_merge/drivers/sub8_sonar/CMakeLists.txt b/drivers/mil_passive_sonar/CMakeLists.txt similarity index 100% rename from .great_merge/drivers/sub8_sonar/CMakeLists.txt rename to drivers/mil_passive_sonar/CMakeLists.txt diff --git a/.great_merge/drivers/sub8_sonar/README.md b/drivers/mil_passive_sonar/README.md similarity index 100% rename from .great_merge/drivers/sub8_sonar/README.md rename to drivers/mil_passive_sonar/README.md diff --git a/.great_merge/drivers/sub8_sonar/launch/test.launch b/drivers/mil_passive_sonar/launch/test.launch similarity index 100% rename from .great_merge/drivers/sub8_sonar/launch/test.launch rename to drivers/mil_passive_sonar/launch/test.launch diff --git a/.great_merge/drivers/sub8_sonar/sub8_sonar/__init__.py b/drivers/mil_passive_sonar/mil_passive_sonar/__init__.py similarity index 100% rename from .great_merge/drivers/sub8_sonar/sub8_sonar/__init__.py rename to drivers/mil_passive_sonar/mil_passive_sonar/__init__.py diff --git a/.great_merge/drivers/sub8_sonar/sub8_sonar/sonar_driver.py b/drivers/mil_passive_sonar/mil_passive_sonar/sonar_driver.py similarity index 100% rename from .great_merge/drivers/sub8_sonar/sub8_sonar/sonar_driver.py rename to drivers/mil_passive_sonar/mil_passive_sonar/sonar_driver.py diff --git a/.great_merge/drivers/sub8_sonar/multilateration/__init__.py b/drivers/mil_passive_sonar/multilateration/__init__.py similarity index 100% rename from .great_merge/drivers/sub8_sonar/multilateration/__init__.py rename to drivers/mil_passive_sonar/multilateration/__init__.py diff --git a/.great_merge/drivers/sub8_sonar/multilateration/multilateration.py b/drivers/mil_passive_sonar/multilateration/multilateration.py similarity index 100% rename from .great_merge/drivers/sub8_sonar/multilateration/multilateration.py rename to drivers/mil_passive_sonar/multilateration/multilateration.py diff --git a/.great_merge/drivers/sub8_sonar/nodes/plotter.py b/drivers/mil_passive_sonar/nodes/plotter.py similarity index 100% rename from .great_merge/drivers/sub8_sonar/nodes/plotter.py rename to drivers/mil_passive_sonar/nodes/plotter.py diff --git a/.great_merge/drivers/sub8_sonar/nodes/sonar_test.py b/drivers/mil_passive_sonar/nodes/sonar_test.py similarity index 100% rename from .great_merge/drivers/sub8_sonar/nodes/sonar_test.py rename to drivers/mil_passive_sonar/nodes/sonar_test.py diff --git a/.great_merge/drivers/sub8_sonar/package.xml b/drivers/mil_passive_sonar/package.xml similarity index 100% rename from .great_merge/drivers/sub8_sonar/package.xml rename to drivers/mil_passive_sonar/package.xml diff --git a/.great_merge/drivers/sub8_sonar/setup.py b/drivers/mil_passive_sonar/setup.py similarity index 100% rename from .great_merge/drivers/sub8_sonar/setup.py rename to drivers/mil_passive_sonar/setup.py From d1e5a15d780fe751f41004b19d3929014245352a Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 19 Mar 2017 04:59:22 -0400 Subject: [PATCH 243/267] SONAR: add script to generate dtoas to test hardware --- .../scripts/generate_dtoa.py | 63 +++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100755 drivers/mil_passive_sonar/scripts/generate_dtoa.py diff --git a/drivers/mil_passive_sonar/scripts/generate_dtoa.py b/drivers/mil_passive_sonar/scripts/generate_dtoa.py new file mode 100755 index 00000000..b23c3b68 --- /dev/null +++ b/drivers/mil_passive_sonar/scripts/generate_dtoa.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +from multilateration import * +import numpy as np +import argparse + +h0 = {'x' : 0, 'y': 0, 'z' : 0} +h1 = {'x' : 25.4, 'y': 0, 'z' : 0} +h2 = {'x' : -25.4, 'y': 0, 'z' : 0} +h3 = {'x' : 0, 'y': 25.4, 'z' : 0} +hydrophone_locations = [h1, h2, h3] + +def dict_to_xyz_vec(pos_dict): + ''' + Converts a dictionary with 'x', 'y', and 'z' keys to a numpy array + ''' + p = pos_dict + return np.array([p['x'], p['y'], p['z']]) + +def get_dtoa(c, sensor_position, source): + ''' + Calculates the dtoa for a signal and a sensor that is ref_offset away from the reference sensor + c should be in millimeter per microsecond + position params should have units of mm + ''' + assert(isinstance(sensor_position, np.ndarray)) + assert(isinstance(source, np.ndarray)) + assert(len(sensor_position) == 3) + assert(len(source) == 3) + t_ref = np.linalg.norm(source) / c + t = np.linalg.norm(source - sensor_position) / c + delta_t = t_ref - t + #print __name__, sensor_position, source, c, t_ref, t, delta_t + return delta_t + +def get_batch_dtoa(c, sensor_positions, source): + ''' + Calculates the dtoa's of a signal to a list of sensors(positions). The reference + is assumed to be at the origin. + c should be in millimeter per microsecond + position params should have units of mm + ''' + dtoa = [] + for x in sensor_positions: + dtoa.append(get_dtoa(c, x, source)) + return dtoa + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='Generates dtoa measurements given sensor and source locations', + usage='Pass the path to a file with source locations. Each row should have 3 columns' + + 'The columns should have the x, y, and z coordinates of each source location, ' \ + 'separated by spaces or tabs. Coordinates should be in units of millimeters.') + parser.add_argument('path_to_input') + args = parser.parse_args() + sonar_test = np.loadtxt(args.path_to_input) + sensors = [dict_to_xyz_vec(x) for x in hydrophone_locations] + res = [] + for case in sonar_test: + res.append(get_batch_dtoa(1.484, sensors, case)) + res = np.array(res) + # output format is {numbers of input rows} rows w/ 3 columns (dtoa measurements in microseconds) + print res + From 324bcdb7c5402d9c66ef539427117e1db85da04e Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 19 Mar 2017 06:10:52 -0400 Subject: [PATCH 244/267] GREAT MERGE: adapt mil_vision to compile w/ new names --- .../colorizer/pcd_colorizer.hpp | 2 +- .../colorizer/single_cloud_processor.hpp | 2 +- .../include/mil_vision_lib/cv_tools.hpp | 42 +++++++------------ .../image_acquisition/ros_camera_stream.hpp | 8 ++-- .../mil_vision_lib/image_filtering.hpp | 2 +- .../mil_vision/nodes/camera_stream_demo.cpp | 2 +- .../src/mil_vision_lib/active_contours.cpp | 2 +- .../colorizer/camera_observer.cpp | 2 +- .../colorizer/pcd_colorizer.cpp | 6 +-- .../colorizer/single_cloud_processor.cpp | 4 +- .../mil_vision/src/mil_vision_lib/cv_utils.cc | 2 +- .../src/mil_vision_lib/image_filtering.cpp | 8 ++-- .../mil_vision_lib/point_cloud_algorithms.cc | 2 +- .../src/mil_vision_lib/visualization.cc | 2 +- 14 files changed, 38 insertions(+), 48 deletions(-) diff --git a/perception/mil_vision/include/mil_vision_lib/colorizer/pcd_colorizer.hpp b/perception/mil_vision/include/mil_vision_lib/colorizer/pcd_colorizer.hpp index 1584e238..a81b1747 100644 --- a/perception/mil_vision/include/mil_vision_lib/colorizer/pcd_colorizer.hpp +++ b/perception/mil_vision/include/mil_vision_lib/colorizer/pcd_colorizer.hpp @@ -4,7 +4,7 @@ #include #include #include -#include +#include #include #include diff --git a/perception/mil_vision/include/mil_vision_lib/colorizer/single_cloud_processor.hpp b/perception/mil_vision/include/mil_vision_lib/colorizer/single_cloud_processor.hpp index 79859c9d..675c0a5c 100644 --- a/perception/mil_vision/include/mil_vision_lib/colorizer/single_cloud_processor.hpp +++ b/perception/mil_vision/include/mil_vision_lib/colorizer/single_cloud_processor.hpp @@ -3,7 +3,7 @@ #include #include #include -#include +#include #include #include diff --git a/perception/mil_vision/include/mil_vision_lib/cv_tools.hpp b/perception/mil_vision/include/mil_vision_lib/cv_tools.hpp index f6669481..3a9fba30 100644 --- a/perception/mil_vision/include/mil_vision_lib/cv_tools.hpp +++ b/perception/mil_vision/include/mil_vision_lib/cv_tools.hpp @@ -27,11 +27,23 @@ namespace nav { /// UTILS +/// Templated pseudoinverse function implementation template -bool pseudoInverse( - const _Matrix_Type_ &a, _Matrix_Type_ &result, - double epsilon = - std::numeric_limits::epsilon()); +bool pseudoInverse(const _Matrix_Type_ &a, _Matrix_Type_ &result, + double epsilon = std::numeric_limits::epsilon()) +{ + if (a.rows() < a.cols()) + return false; + + Eigen::JacobiSVD<_Matrix_Type_> svd = a.jacobiSvd(); + + typename _Matrix_Type_::Scalar tolerance = epsilon * std::max(a.cols(), a.rows()) * + svd.singularValues().array().abs().maxCoeff(); + + result = + svd.matrixV() * _Matrix_Type_(_Matrix_Type_((svd.singularValues().array().abs() > tolerance) + .select(svd.singularValues().array().inverse(), 0)).diagonal()) * svd.matrixU().adjoint(); +} typedef std::vector Contour; @@ -152,26 +164,4 @@ void range_from_param(std::string ¶m_root, Range &range); void inParamRange(cv::Mat &src, Range &range, cv::Mat &dest); -/// Templated pseudoinverse function implementation -template -bool pseudoInverse( - const _Matrix_Type_ &a, _Matrix_Type_ &result, - double epsilon = - std::numeric_limits::epsilon()) { - if (a.rows() < a.cols()) - return false; - - Eigen::JacobiSVD<_Matrix_Type_> svd = a.jacobiSvd(); - - typename _Matrix_Type_::Scalar tolerance = - epsilon * std::max(a.cols(), a.rows()) * - svd.singularValues().array().abs().maxCoeff(); - - result = svd.matrixV() * - _Matrix_Type_( - _Matrix_Type_((svd.singularValues().array().abs() > tolerance) - .select(svd.singularValues().array().inverse(), - 0)).diagonal()) * - svd.matrixU().adjoint(); -} } // namespace sub diff --git a/perception/mil_vision/include/mil_vision_lib/image_acquisition/ros_camera_stream.hpp b/perception/mil_vision/include/mil_vision_lib/image_acquisition/ros_camera_stream.hpp index 820dd74a..d68f23e3 100644 --- a/perception/mil_vision/include/mil_vision_lib/image_acquisition/ros_camera_stream.hpp +++ b/perception/mil_vision/include/mil_vision_lib/image_acquisition/ros_camera_stream.hpp @@ -1,7 +1,7 @@ #pragma once #include -#include +#include #include #include @@ -168,7 +168,7 @@ class ROSCameraStream : public CameraFrameSequence bool ROSCameraStream::init(std::string &camera_topic) { - using nav::tools::operator "" _s; // convert raw string literal to std::string + using mil::tools::operator "" _s; // convert raw string literal to std::string _img_topic = camera_topic; bool success = false; image_transport::CameraSubscriber cam_sub; @@ -237,7 +237,7 @@ template typename ROSCameraStream::CamFrameConstPtr ROSCameraStream::getFrameFromTime(ros::Time desired_time) { - using nav::tools::operator "" _s; + using mil::tools::operator "" _s; // Check bounds on time if(desired_time < this->_start_time || desired_time > this->_end_time) @@ -269,7 +269,7 @@ template typename ROSCameraStream::CamFrameConstPtr ROSCameraStream::operator[](int i) { - using nav::tools::operator "" _s; + using mil::tools::operator "" _s; // Prevent adding new frames while frames are being accessed _mtx.lock(); diff --git a/perception/mil_vision/include/mil_vision_lib/image_filtering.hpp b/perception/mil_vision/include/mil_vision_lib/image_filtering.hpp index 0d5becd9..7200f102 100644 --- a/perception/mil_vision/include/mil_vision_lib/image_filtering.hpp +++ b/perception/mil_vision/include/mil_vision_lib/image_filtering.hpp @@ -5,7 +5,7 @@ #include #include -#include +#include namespace nav { diff --git a/perception/mil_vision/nodes/camera_stream_demo.cpp b/perception/mil_vision/nodes/camera_stream_demo.cpp index 01eb785c..b6d6fae1 100755 --- a/perception/mil_vision/nodes/camera_stream_demo.cpp +++ b/perception/mil_vision/nodes/camera_stream_demo.cpp @@ -1,7 +1,7 @@ #include #include #include -#include // dont forget this include for camera stream functionality +#include // dont forget this include for camera stream functionality using namespace std; diff --git a/perception/mil_vision/src/mil_vision_lib/active_contours.cpp b/perception/mil_vision/src/mil_vision_lib/active_contours.cpp index fe1bafd4..e12ad7ed 100644 --- a/perception/mil_vision/src/mil_vision_lib/active_contours.cpp +++ b/perception/mil_vision/src/mil_vision_lib/active_contours.cpp @@ -1,4 +1,4 @@ -#include +#include using namespace std; using namespace cv; diff --git a/perception/mil_vision/src/mil_vision_lib/colorizer/camera_observer.cpp b/perception/mil_vision/src/mil_vision_lib/colorizer/camera_observer.cpp index df4dccca..eac494fe 100644 --- a/perception/mil_vision/src/mil_vision_lib/colorizer/camera_observer.cpp +++ b/perception/mil_vision/src/mil_vision_lib/colorizer/camera_observer.cpp @@ -2,7 +2,7 @@ namespace nav{ -using nav::tools::operator "" _s; // converts to std::string +using mil::tools::operator "" _s; // converts to std::string CameraObserver::CameraObserver(ros::NodeHandle &nh, std::string &pcd_in_topic, std::string &cam_topic, size_t hist_size) : _nh{nh}, _cam_stream{nh, hist_size} diff --git a/perception/mil_vision/src/mil_vision_lib/colorizer/pcd_colorizer.cpp b/perception/mil_vision/src/mil_vision_lib/colorizer/pcd_colorizer.cpp index 0b8c9833..8e3f67a4 100644 --- a/perception/mil_vision/src/mil_vision_lib/colorizer/pcd_colorizer.cpp +++ b/perception/mil_vision/src/mil_vision_lib/colorizer/pcd_colorizer.cpp @@ -1,4 +1,4 @@ -#include +#include using namespace std; using namespace cv; @@ -9,7 +9,7 @@ PcdColorizer::PcdColorizer(ros::NodeHandle nh, string input_pcd_topic) : _nh(nh), _img_hist_size{10}, _cloud_processor{nh, input_pcd_topic, _img_hist_size}, _input_pcd_topic{input_pcd_topic}, _output_pcd_topic{input_pcd_topic + "_colored"} { - using nav::tools::operator "" _s; // converts to std::string + using mil::tools::operator "" _s; // converts to std::string try { @@ -52,7 +52,7 @@ void PcdColorizer::_cloud_cb(const PCD<>::ConstPtr &cloud_in) // { // cout << "img hist size: " << _img_hist_size << endl; // // Subscribe to all rectified color img topics -// auto rect_color_topics = nav::tools::getRectifiedImageTopics(true); +// auto rect_color_topics = mil::tools::getRectifiedImageTopics(true); // if(rect_color_topics.size() == 0) // { // _err_msg += "COLORIZER: There are no rectified color camera topics currently publishing on this ROS master ("; diff --git a/perception/mil_vision/src/mil_vision_lib/colorizer/single_cloud_processor.cpp b/perception/mil_vision/src/mil_vision_lib/colorizer/single_cloud_processor.cpp index f22f0d2b..cd24af4e 100644 --- a/perception/mil_vision/src/mil_vision_lib/colorizer/single_cloud_processor.cpp +++ b/perception/mil_vision/src/mil_vision_lib/colorizer/single_cloud_processor.cpp @@ -4,13 +4,13 @@ namespace nav { -using nav::tools::operator "" _s; // converts to std::string +using mil::tools::operator "" _s; // converts to std::string SingleCloudProcessor::SingleCloudProcessor(ros::NodeHandle nh, std::string& in_pcd_topic, size_t hist_size) : _nh{nh}, _hist_size{hist_size} { ROS_INFO(("SingleCloudProcessor: Initializing with " + in_pcd_topic).c_str()); - auto rect_color_topics = nav::tools::getRectifiedImageTopics(true); + auto rect_color_topics = mil::tools::getRectifiedImageTopics(true); if(rect_color_topics.size() == 0) { _err_msg += "COLORIZER: There are no rectified color camera topics currently publishing on this ROS master ("; diff --git a/perception/mil_vision/src/mil_vision_lib/cv_utils.cc b/perception/mil_vision/src/mil_vision_lib/cv_utils.cc index 1787b747..a7f6de7d 100644 --- a/perception/mil_vision/src/mil_vision_lib/cv_utils.cc +++ b/perception/mil_vision/src/mil_vision_lib/cv_utils.cc @@ -1,4 +1,4 @@ -#include +#include namespace nav { diff --git a/perception/mil_vision/src/mil_vision_lib/image_filtering.cpp b/perception/mil_vision/src/mil_vision_lib/image_filtering.cpp index ddae5398..13c52ef4 100644 --- a/perception/mil_vision/src/mil_vision_lib/image_filtering.cpp +++ b/perception/mil_vision/src/mil_vision_lib/image_filtering.cpp @@ -1,11 +1,11 @@ -#include +#include namespace nav { cv::Mat rotateKernel(const cv::Mat &kernel, float theta, bool deg, bool no_expand) { - theta = deg? theta : theta * nav::PI / 180.0f; + theta = deg? theta : theta * mil::PI / 180.0f; cv::Point2f c_org{kernel.cols * 0.5f, kernel.rows * 0.5f}; // center of original if(no_expand) // rotates without expanding the canvas @@ -73,12 +73,12 @@ float getRadialSymmetryAngle(const cv::Mat &kernel, float ang_res, bool deg) cv::Mat elem_wise_mult{original.size(), CV_32S}; cv::multiply(original, original, elem_wise_mult); auto standard = cv::sum(elem_wise_mult)[0]; - float max = deg? 360.0f : 2 * nav::PI; + float max = deg? 360.0f : 2 * mil::PI; float result = max; float best_score = 0; bool left_starting_region = false; - for(float theta = 0.0f; theta < max; theta += (deg? ang_res * 180.0f / nav::PI : ang_res)) + for(float theta = 0.0f; theta < max; theta += (deg? ang_res * 180.0f / mil::PI : ang_res)) { cv::multiply(original, rotateKernel(original, theta, deg, true), elem_wise_mult); double score = standard / cv::sum(elem_wise_mult)[0]; diff --git a/perception/mil_vision/src/mil_vision_lib/point_cloud_algorithms.cc b/perception/mil_vision/src/mil_vision_lib/point_cloud_algorithms.cc index 6f3263de..88d7b854 100644 --- a/perception/mil_vision/src/mil_vision_lib/point_cloud_algorithms.cc +++ b/perception/mil_vision/src/mil_vision_lib/point_cloud_algorithms.cc @@ -1,4 +1,4 @@ -#include +#include using namespace std; using namespace cv; diff --git a/perception/mil_vision/src/mil_vision_lib/visualization.cc b/perception/mil_vision/src/mil_vision_lib/visualization.cc index f0903073..f3f8293e 100644 --- a/perception/mil_vision/src/mil_vision_lib/visualization.cc +++ b/perception/mil_vision/src/mil_vision_lib/visualization.cc @@ -1,4 +1,4 @@ -#include +#include #include namespace nav { From 7891563ab7cf385187bc57a07ea1f263f35764f3 Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 19 Mar 2017 06:16:54 -0400 Subject: [PATCH 245/267] GREAT MERGE: add mil_tools cpp lib --- mil_tools/CMakeLists.txt | 34 +++++++++-- mil_tools/package.xml | 4 ++ .../mil_tools/include/mil_tools/mil_tools.hpp | 33 +++++++++++ utils/mil_tools/src/mil_tools/mil_tools.cpp | 57 +++++++++++++++++++ 4 files changed, 123 insertions(+), 5 deletions(-) create mode 100644 utils/mil_tools/include/mil_tools/mil_tools.hpp create mode 100644 utils/mil_tools/src/mil_tools/mil_tools.cpp diff --git a/mil_tools/CMakeLists.txt b/mil_tools/CMakeLists.txt index d02f9113..4c1e7b9f 100644 --- a/mil_tools/CMakeLists.txt +++ b/mil_tools/CMakeLists.txt @@ -1,13 +1,24 @@ cmake_minimum_required(VERSION 2.8.3) project(mil_tools) +SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -pedantic -Wall -std=c++11") + find_package(catkin REQUIRED COMPONENTS - rostest - std_msgs - rospy - roscpp + rostest + std_msgs + rospy + roscpp + cv_bridge ) -catkin_package() +catkin_package( + INCLUDE_DIRS + include + LIBRARIES + mil_tools_lib + CATKIN_DEPENDS + roscpp + cv_bridge +) # Add folders to be run by python nosetests if(CATKIN_ENABLE_TESTING) @@ -16,3 +27,16 @@ endif() catkin_python_setup() +include_directories( + include + SYSTEM + ${catkin_INCLUDE_DIRS} + ${Boost_INCLUDE_DIRS} +) + +add_library(mil_tools_lib + src/mil_tools/mil_tools.cpp +) +target_include_directories(mil_tools_lib PUBLIC include) +target_link_libraries(mil_tools_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES}) + diff --git a/mil_tools/package.xml b/mil_tools/package.xml index c80f0061..260b7ddc 100644 --- a/mil_tools/package.xml +++ b/mil_tools/package.xml @@ -9,7 +9,11 @@ MIT catkin rostest + roscpp + roscpp rospy + cv_bridge + cv_bridge diff --git a/utils/mil_tools/include/mil_tools/mil_tools.hpp b/utils/mil_tools/include/mil_tools/mil_tools.hpp new file mode 100644 index 00000000..90b174e1 --- /dev/null +++ b/utils/mil_tools/include/mil_tools/mil_tools.hpp @@ -0,0 +1,33 @@ +#pragma once + +#include +#include +#include +#include +#include +#include + +namespace mil{ + +static const double PI = 3.1415926535897932; + +namespace tools +{ + +// Returns a vector of topic names (std::string) that end with "image_rect_color" +// if color is false then it looks for those that end in "image_rect" +std::vector getRectifiedImageTopics(bool color = true); + +// converts raw string literals to std:string's +inline std::string operator "" _s(const char* str, size_t /*length*/) +{ + return std::string(str); +} + + +// Sorts contours by curve length +void sortContours(std::vector>& contour_vec, bool ascending=true); + +} // namespace mil::tools +} // namespace mil + diff --git a/utils/mil_tools/src/mil_tools/mil_tools.cpp b/utils/mil_tools/src/mil_tools/mil_tools.cpp new file mode 100644 index 00000000..00fd1c9f --- /dev/null +++ b/utils/mil_tools/src/mil_tools/mil_tools.cpp @@ -0,0 +1,57 @@ +#include + +namespace mil{ +namespace tools{ + +using namespace std; + +vector getRectifiedImageTopics(bool color) +{ + // get all currently published topics from master + ros::master::V_TopicInfo master_topics; + ros::master::getTopics(master_topics); + + // Define lambda to determine if a topic is a rectified img topic + string target_pattern; + target_pattern = color? "image_rect_color" : "image_rect"; + auto isRectImgTopic = [&target_pattern](ros::master::TopicInfo topic_info) + { + // Find last slash + size_t final_slash = topic_info.name.rfind('/'); + + // Match end of topic name to image_rect pattern + if(final_slash == string::npos) + return false; + else + { + string topic_name_end = topic_info.name.substr(final_slash + 1); + return (topic_name_end.find(target_pattern) != string::npos)? true : false; + } + }; // end lambda isRectImgTopic + + // return list of rectified image topics + vector image_rect_topics; + for(ros::master::TopicInfo topic : master_topics) + { + if(isRectImgTopic(topic)) + image_rect_topics.push_back(topic.name); + } + return image_rect_topics; +} + + +void sortContours(vector>& contour_vec, bool ascending) +{ + auto comp_length = ascending? + [](vector const &contour1, vector const &contour2) + { return fabs(arcLength(contour1, true)) < fabs(arcLength(contour2, true)); } : + [](vector const &contour1, vector const &contour2) + { return fabs(arcLength(contour1, true)) > fabs(arcLength(contour2, true)); }; + + sort(contour_vec.begin(), contour_vec.end(), comp_length); +} + + +} // namespace mil::tools +} // namespace mil + From 46ba90e97750d08884c8d2b5cb569f1f6fdc2582 Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 19 Mar 2017 04:51:44 -0400 Subject: [PATCH 246/267] GREAT MERGE: convert sub8_sonar to mil_passive_sonar Added 'passive' to the name because we now have an imaging sonar. --- drivers/mil_passive_sonar/CMakeLists.txt | 2 +- drivers/mil_passive_sonar/package.xml | 6 ++---- drivers/mil_passive_sonar/setup.py | 2 +- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/drivers/mil_passive_sonar/CMakeLists.txt b/drivers/mil_passive_sonar/CMakeLists.txt index 15603797..94c4818b 100644 --- a/drivers/mil_passive_sonar/CMakeLists.txt +++ b/drivers/mil_passive_sonar/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.8.3) -project(sub8_sonar) +project(mil_passive_sonar) find_package(catkin REQUIRED COMPONENTS rospy diff --git a/drivers/mil_passive_sonar/package.xml b/drivers/mil_passive_sonar/package.xml index aaa335ae..f3f79469 100644 --- a/drivers/mil_passive_sonar/package.xml +++ b/drivers/mil_passive_sonar/package.xml @@ -1,8 +1,8 @@ - sub8_sonar + mil_passive_sonar 1.0.0 - The sub8_sonar package + Package for multilateration and passive sonar Matthew Langford David Soto @@ -13,6 +13,4 @@ rospy rospy - - diff --git a/drivers/mil_passive_sonar/setup.py b/drivers/mil_passive_sonar/setup.py index 2486c904..ccbc94ee 100644 --- a/drivers/mil_passive_sonar/setup.py +++ b/drivers/mil_passive_sonar/setup.py @@ -5,7 +5,7 @@ # fetch values from package.xml setup_args = generate_distutils_setup( - packages=['sub8_sonar', 'multilateration'], + packages=['mil_passive_sonar', 'multilateration'], ) setup(**setup_args) From 0ff7acbdf39ed1e735ec96cb6c08783409bbe86c Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 19 Mar 2017 07:31:09 -0400 Subject: [PATCH 247/267] GREAT MERGE: add sub8_perception tools to mil_vision --- .../mil_vision}/ros_tools/easy_thresh.py | 0 .../mil_vision}/ros_tools/image_dumper.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename {.great_merge/perception/sub8_perception => perception/mil_vision}/ros_tools/easy_thresh.py (100%) rename {.great_merge/perception/sub8_perception => perception/mil_vision}/ros_tools/image_dumper.py (100%) diff --git a/.great_merge/perception/sub8_perception/ros_tools/easy_thresh.py b/perception/mil_vision/ros_tools/easy_thresh.py similarity index 100% rename from .great_merge/perception/sub8_perception/ros_tools/easy_thresh.py rename to perception/mil_vision/ros_tools/easy_thresh.py diff --git a/.great_merge/perception/sub8_perception/ros_tools/image_dumper.py b/perception/mil_vision/ros_tools/image_dumper.py similarity index 100% rename from .great_merge/perception/sub8_perception/ros_tools/image_dumper.py rename to perception/mil_vision/ros_tools/image_dumper.py From bfadfc1a09f9cedf69a1fd647fee7a85d7b895e3 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Mon, 20 Mar 2017 20:58:43 -0400 Subject: [PATCH 248/267] REPO: move mil_tools to utils/ --- {mil_tools => utils/mil_tools}/CMakeLists.txt | 0 {mil_tools => utils/mil_tools}/mil_misc_tools/__init__.py | 0 {mil_tools => utils/mil_tools}/mil_misc_tools/download.py | 0 {mil_tools => utils/mil_tools}/mil_misc_tools/text_effects.py | 0 {mil_tools => utils/mil_tools}/mil_ros_tools/__init__.py | 0 {mil_tools => utils/mil_tools}/mil_ros_tools/bag_crawler.py | 0 {mil_tools => utils/mil_tools}/mil_ros_tools/cv_debug.py | 0 {mil_tools => utils/mil_tools}/mil_ros_tools/func_helpers.py | 0 {mil_tools => utils/mil_tools}/mil_ros_tools/geometry_helpers.py | 0 {mil_tools => utils/mil_tools}/mil_ros_tools/image_helpers.py | 0 {mil_tools => utils/mil_tools}/mil_ros_tools/init_helpers.py | 0 {mil_tools => utils/mil_tools}/mil_ros_tools/msg_helpers.py | 0 {mil_tools => utils/mil_tools}/mil_ros_tools/rviz_helpers.py | 0 {mil_tools => utils/mil_tools}/mil_ros_tools/threading_helpers.py | 0 {mil_tools => utils/mil_tools}/mil_tools/__init__.py | 0 {mil_tools => utils/mil_tools}/nodes/clicked_point_recorder.py | 0 {mil_tools => utils/mil_tools}/nodes/ogrid_draw.py | 0 {mil_tools => utils/mil_tools}/nodes/rviz_waypoint.py | 0 {mil_tools => utils/mil_tools}/nodes/tf_fudger.py | 0 {mil_tools => utils/mil_tools}/nodes/tf_to_gazebo.py | 0 {mil_tools => utils/mil_tools}/nodes/video_player | 0 {mil_tools => utils/mil_tools}/package.xml | 0 {mil_tools => utils/mil_tools}/readme.md | 0 {mil_tools => utils/mil_tools}/setup.py | 0 {mil_tools => utils/mil_tools}/test/math_helpers_test.py | 0 {mil_tools => utils/mil_tools}/test/test_ros_tools.py | 0 26 files changed, 0 insertions(+), 0 deletions(-) rename {mil_tools => utils/mil_tools}/CMakeLists.txt (100%) rename {mil_tools => utils/mil_tools}/mil_misc_tools/__init__.py (100%) rename {mil_tools => utils/mil_tools}/mil_misc_tools/download.py (100%) rename {mil_tools => utils/mil_tools}/mil_misc_tools/text_effects.py (100%) rename {mil_tools => utils/mil_tools}/mil_ros_tools/__init__.py (100%) rename {mil_tools => utils/mil_tools}/mil_ros_tools/bag_crawler.py (100%) rename {mil_tools => utils/mil_tools}/mil_ros_tools/cv_debug.py (100%) rename {mil_tools => utils/mil_tools}/mil_ros_tools/func_helpers.py (100%) rename {mil_tools => utils/mil_tools}/mil_ros_tools/geometry_helpers.py (100%) rename {mil_tools => utils/mil_tools}/mil_ros_tools/image_helpers.py (100%) rename {mil_tools => utils/mil_tools}/mil_ros_tools/init_helpers.py (100%) rename {mil_tools => utils/mil_tools}/mil_ros_tools/msg_helpers.py (100%) rename {mil_tools => utils/mil_tools}/mil_ros_tools/rviz_helpers.py (100%) rename {mil_tools => utils/mil_tools}/mil_ros_tools/threading_helpers.py (100%) rename {mil_tools => utils/mil_tools}/mil_tools/__init__.py (100%) rename {mil_tools => utils/mil_tools}/nodes/clicked_point_recorder.py (100%) rename {mil_tools => utils/mil_tools}/nodes/ogrid_draw.py (100%) rename {mil_tools => utils/mil_tools}/nodes/rviz_waypoint.py (100%) rename {mil_tools => utils/mil_tools}/nodes/tf_fudger.py (100%) rename {mil_tools => utils/mil_tools}/nodes/tf_to_gazebo.py (100%) rename {mil_tools => utils/mil_tools}/nodes/video_player (100%) rename {mil_tools => utils/mil_tools}/package.xml (100%) rename {mil_tools => utils/mil_tools}/readme.md (100%) rename {mil_tools => utils/mil_tools}/setup.py (100%) rename {mil_tools => utils/mil_tools}/test/math_helpers_test.py (100%) rename {mil_tools => utils/mil_tools}/test/test_ros_tools.py (100%) diff --git a/mil_tools/CMakeLists.txt b/utils/mil_tools/CMakeLists.txt similarity index 100% rename from mil_tools/CMakeLists.txt rename to utils/mil_tools/CMakeLists.txt diff --git a/mil_tools/mil_misc_tools/__init__.py b/utils/mil_tools/mil_misc_tools/__init__.py similarity index 100% rename from mil_tools/mil_misc_tools/__init__.py rename to utils/mil_tools/mil_misc_tools/__init__.py diff --git a/mil_tools/mil_misc_tools/download.py b/utils/mil_tools/mil_misc_tools/download.py similarity index 100% rename from mil_tools/mil_misc_tools/download.py rename to utils/mil_tools/mil_misc_tools/download.py diff --git a/mil_tools/mil_misc_tools/text_effects.py b/utils/mil_tools/mil_misc_tools/text_effects.py similarity index 100% rename from mil_tools/mil_misc_tools/text_effects.py rename to utils/mil_tools/mil_misc_tools/text_effects.py diff --git a/mil_tools/mil_ros_tools/__init__.py b/utils/mil_tools/mil_ros_tools/__init__.py similarity index 100% rename from mil_tools/mil_ros_tools/__init__.py rename to utils/mil_tools/mil_ros_tools/__init__.py diff --git a/mil_tools/mil_ros_tools/bag_crawler.py b/utils/mil_tools/mil_ros_tools/bag_crawler.py similarity index 100% rename from mil_tools/mil_ros_tools/bag_crawler.py rename to utils/mil_tools/mil_ros_tools/bag_crawler.py diff --git a/mil_tools/mil_ros_tools/cv_debug.py b/utils/mil_tools/mil_ros_tools/cv_debug.py similarity index 100% rename from mil_tools/mil_ros_tools/cv_debug.py rename to utils/mil_tools/mil_ros_tools/cv_debug.py diff --git a/mil_tools/mil_ros_tools/func_helpers.py b/utils/mil_tools/mil_ros_tools/func_helpers.py similarity index 100% rename from mil_tools/mil_ros_tools/func_helpers.py rename to utils/mil_tools/mil_ros_tools/func_helpers.py diff --git a/mil_tools/mil_ros_tools/geometry_helpers.py b/utils/mil_tools/mil_ros_tools/geometry_helpers.py similarity index 100% rename from mil_tools/mil_ros_tools/geometry_helpers.py rename to utils/mil_tools/mil_ros_tools/geometry_helpers.py diff --git a/mil_tools/mil_ros_tools/image_helpers.py b/utils/mil_tools/mil_ros_tools/image_helpers.py similarity index 100% rename from mil_tools/mil_ros_tools/image_helpers.py rename to utils/mil_tools/mil_ros_tools/image_helpers.py diff --git a/mil_tools/mil_ros_tools/init_helpers.py b/utils/mil_tools/mil_ros_tools/init_helpers.py similarity index 100% rename from mil_tools/mil_ros_tools/init_helpers.py rename to utils/mil_tools/mil_ros_tools/init_helpers.py diff --git a/mil_tools/mil_ros_tools/msg_helpers.py b/utils/mil_tools/mil_ros_tools/msg_helpers.py similarity index 100% rename from mil_tools/mil_ros_tools/msg_helpers.py rename to utils/mil_tools/mil_ros_tools/msg_helpers.py diff --git a/mil_tools/mil_ros_tools/rviz_helpers.py b/utils/mil_tools/mil_ros_tools/rviz_helpers.py similarity index 100% rename from mil_tools/mil_ros_tools/rviz_helpers.py rename to utils/mil_tools/mil_ros_tools/rviz_helpers.py diff --git a/mil_tools/mil_ros_tools/threading_helpers.py b/utils/mil_tools/mil_ros_tools/threading_helpers.py similarity index 100% rename from mil_tools/mil_ros_tools/threading_helpers.py rename to utils/mil_tools/mil_ros_tools/threading_helpers.py diff --git a/mil_tools/mil_tools/__init__.py b/utils/mil_tools/mil_tools/__init__.py similarity index 100% rename from mil_tools/mil_tools/__init__.py rename to utils/mil_tools/mil_tools/__init__.py diff --git a/mil_tools/nodes/clicked_point_recorder.py b/utils/mil_tools/nodes/clicked_point_recorder.py similarity index 100% rename from mil_tools/nodes/clicked_point_recorder.py rename to utils/mil_tools/nodes/clicked_point_recorder.py diff --git a/mil_tools/nodes/ogrid_draw.py b/utils/mil_tools/nodes/ogrid_draw.py similarity index 100% rename from mil_tools/nodes/ogrid_draw.py rename to utils/mil_tools/nodes/ogrid_draw.py diff --git a/mil_tools/nodes/rviz_waypoint.py b/utils/mil_tools/nodes/rviz_waypoint.py similarity index 100% rename from mil_tools/nodes/rviz_waypoint.py rename to utils/mil_tools/nodes/rviz_waypoint.py diff --git a/mil_tools/nodes/tf_fudger.py b/utils/mil_tools/nodes/tf_fudger.py similarity index 100% rename from mil_tools/nodes/tf_fudger.py rename to utils/mil_tools/nodes/tf_fudger.py diff --git a/mil_tools/nodes/tf_to_gazebo.py b/utils/mil_tools/nodes/tf_to_gazebo.py similarity index 100% rename from mil_tools/nodes/tf_to_gazebo.py rename to utils/mil_tools/nodes/tf_to_gazebo.py diff --git a/mil_tools/nodes/video_player b/utils/mil_tools/nodes/video_player similarity index 100% rename from mil_tools/nodes/video_player rename to utils/mil_tools/nodes/video_player diff --git a/mil_tools/package.xml b/utils/mil_tools/package.xml similarity index 100% rename from mil_tools/package.xml rename to utils/mil_tools/package.xml diff --git a/mil_tools/readme.md b/utils/mil_tools/readme.md similarity index 100% rename from mil_tools/readme.md rename to utils/mil_tools/readme.md diff --git a/mil_tools/setup.py b/utils/mil_tools/setup.py similarity index 100% rename from mil_tools/setup.py rename to utils/mil_tools/setup.py diff --git a/mil_tools/test/math_helpers_test.py b/utils/mil_tools/test/math_helpers_test.py similarity index 100% rename from mil_tools/test/math_helpers_test.py rename to utils/mil_tools/test/math_helpers_test.py diff --git a/mil_tools/test/test_ros_tools.py b/utils/mil_tools/test/test_ros_tools.py similarity index 100% rename from mil_tools/test/test_ros_tools.py rename to utils/mil_tools/test/test_ros_tools.py From 15dd9b347e0b15fc814558216eaad2d1d79a2936 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Mon, 20 Mar 2017 20:59:06 -0400 Subject: [PATCH 249/267] VISION: add mil_tools as dependency to vision --- perception/mil_vision/package.xml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/perception/mil_vision/package.xml b/perception/mil_vision/package.xml index b57848f0..01ac5999 100644 --- a/perception/mil_vision/package.xml +++ b/perception/mil_vision/package.xml @@ -19,6 +19,8 @@ cv_bridge mil_msgs mil_msgs + mil_tools + mil_tools message_generation message_runtime std_msgs From d6719e4769b7c8c358ef9d3a370e057e558b4c24 Mon Sep 17 00:00:00 2001 From: kingkevlar Date: Fri, 24 Mar 2017 16:46:14 -0400 Subject: [PATCH 250/267] ONLINE_BAGGER: Add individual bag times to topic config --- .../config/online_bagger_example.yaml | 18 --- .../online_bagger/CMakeLists.txt | 0 .../config/online_bagger_example.yaml | 15 +++ .../launch/online_bagger_example.launch | 2 +- .../online_bagger/nodes/online_bagger.py | 123 +++++++----------- .../mil_ros_tools/online_bagger/package.xml | 0 .../online_bagger/srv/BaggerCommands.srv | 1 - 7 files changed, 60 insertions(+), 99 deletions(-) delete mode 100644 utils/mil_ros_tools/online_bagger/config/online_bagger_example.yaml rename utils/{ => mil_tools}/mil_ros_tools/online_bagger/CMakeLists.txt (100%) create mode 100644 utils/mil_tools/mil_ros_tools/online_bagger/config/online_bagger_example.yaml rename utils/{ => mil_tools}/mil_ros_tools/online_bagger/launch/online_bagger_example.launch (95%) rename utils/{ => mil_tools}/mil_ros_tools/online_bagger/nodes/online_bagger.py (69%) rename utils/{ => mil_tools}/mil_ros_tools/online_bagger/package.xml (100%) rename utils/{ => mil_tools}/mil_ros_tools/online_bagger/srv/BaggerCommands.srv (66%) diff --git a/utils/mil_ros_tools/online_bagger/config/online_bagger_example.yaml b/utils/mil_ros_tools/online_bagger/config/online_bagger_example.yaml deleted file mode 100644 index 5c62e291..00000000 --- a/utils/mil_ros_tools/online_bagger/config/online_bagger_example.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# list an arbitrary set of topics to subscribe to and stream: - -# the more conservative between the two, stream_time and ram_limit are always in place -# the more conservative will be enforced -stream_time: 10 # seconds -# ram_limit: 0.25 # gb soft limit not hard enforced -topics: ["/odom", - "/absodom", - "/stereo/left/camera_info", - "/stereo/left/image_raw", - "/stereo/right/camera_info", - "/stereo/right/image_raw", - "/velodyne_points", - "/ooka"] - -# comment out if default home/user/online_bagger is desired -# bag_package_path: '' - diff --git a/utils/mil_ros_tools/online_bagger/CMakeLists.txt b/utils/mil_tools/mil_ros_tools/online_bagger/CMakeLists.txt similarity index 100% rename from utils/mil_ros_tools/online_bagger/CMakeLists.txt rename to utils/mil_tools/mil_ros_tools/online_bagger/CMakeLists.txt diff --git a/utils/mil_tools/mil_ros_tools/online_bagger/config/online_bagger_example.yaml b/utils/mil_tools/mil_ros_tools/online_bagger/config/online_bagger_example.yaml new file mode 100644 index 00000000..f88f6d57 --- /dev/null +++ b/utils/mil_tools/mil_ros_tools/online_bagger/config/online_bagger_example.yaml @@ -0,0 +1,15 @@ +# list an arbitrary set of topics to subscribe to and stream: + +stream_time: 10 # seconds + +topics: [["/odom", 300] , + ["/absodom", 300], + ["/stereo/left/camera_info" ], + ["/stereo/left/image_raw", 20], + ["/stereo/right/camera_info", 20], + ["/stereo/right/image_raw", 20], + ["/velodyne_points", 300], + ["/ooka", 100]] + +# comment out if default home/user/online_bagger is desired +# bag_package_path: '' \ No newline at end of file diff --git a/utils/mil_ros_tools/online_bagger/launch/online_bagger_example.launch b/utils/mil_tools/mil_ros_tools/online_bagger/launch/online_bagger_example.launch similarity index 95% rename from utils/mil_ros_tools/online_bagger/launch/online_bagger_example.launch rename to utils/mil_tools/mil_ros_tools/online_bagger/launch/online_bagger_example.launch index e693f616..f25eba59 100644 --- a/utils/mil_ros_tools/online_bagger/launch/online_bagger_example.launch +++ b/utils/mil_tools/mil_ros_tools/online_bagger/launch/online_bagger_example.launch @@ -1,4 +1,4 @@ - + \ No newline at end of file diff --git a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py b/utils/mil_tools/mil_ros_tools/online_bagger/nodes/online_bagger.py similarity index 69% rename from utils/mil_ros_tools/online_bagger/nodes/online_bagger.py rename to utils/mil_tools/mil_ros_tools/online_bagger/nodes/online_bagger.py index ac7f9ecd..11ce54b2 100755 --- a/utils/mil_ros_tools/online_bagger/nodes/online_bagger.py +++ b/utils/mil_tools/mil_ros_tools/online_bagger/nodes/online_bagger.py @@ -19,9 +19,10 @@ For example: bagging_server = rospy.ServiceProxy('/online_bagger/dump', BaggerCommands) - bag_status = bagging_server(bag_name, bag_time) - bag_name = name of bag (leave blank to use default name: current date and time) - bag_time = float32 (leave blank to dump entire bag) + bag_status = bagging_server(bag_name) + bag_name = name of bag (leave blank to use default name: current date and time) : '' + * Blank is an empty string + """ class OnlineBagger(object): @@ -45,7 +46,7 @@ def __init__(self): self.start_bagging) self.subscribe_loop() - rospy.loginfo('subscriber success: %s', self.subscriber_list) + rospy.loginfo('subscriber list: %s', self.subscriber_list) def get_params(self): @@ -53,10 +54,11 @@ def get_params(self): Retrieve parameters from param server. """ - self.topics = rospy.get_param('/online_bagger/topics') + self.subscriber_list = rospy.get_param('/online_bagger/topics') self.dir = rospy.get_param('/online_bagger/bag_package_path', default=os.environ['HOME']) self.stream_time = rospy.get_param('/online_bagger/stream_time', default=30) # seconds + rospy.loginfo('subscriber list: %s', self.subscriber_list) rospy.loginfo('stream_time: %s seconds', self.stream_time) def make_dicts(self): @@ -64,12 +66,14 @@ def make_dicts(self): """ Make dictionary with sliceable deques() that will be filled with messages and time stamps. - Subscriber list contains all of the topics and their subscription status: + Subscriber list contains all of the topics, their stream time and their subscription status: A True status for a given topic corresponds to a successful subscription - A False status indicates a failed subscription + A False status indicates a failed subscription. + + Stream time for an individual topic is specified in seconds. For Example: - self.subscriber_list[0] = [['/odom', False], ['/absodom', True]] + self.subscriber_list[0:1] = [['/odom', 300 ,False], ['/absodom', 300, True]] Indicates that '/odom' has not been subscribed to, but '/absodom' has been subscribed to @@ -83,26 +87,25 @@ def make_dicts(self): '/odom' is a potential topic name self.topic_message['/odom'] is a deque self.topic_message['/odom'][0] is the oldest message available in the deque - and its time stamp if available. It is a tuple with each element: (msg, time_stamp) + and its time stamp if available. It is a tuple with each element: (time_stamp, msg) + self.topic_message['/odom'][0][0] is the time stamp for the oldest message + self.topic_message['/odom'][0][1] is the message associated with the oldest topic """ - # Courtesy Kindall - # http://stackoverflow.com/questions/10003143/how-to-slice-a-deque - class SliceableDeque(deque): - def __getitem__(self, index): - if isinstance(index, slice): - return type(self)(itertools.islice(self, index.start, - index.stop, index.step)) - return deque.__getitem__(self, index) - self.topic_messages = {} - self.subscriber_list = [] + self.topic_list = [] - for topic in self.topics: - self.topic_messages[topic] = SliceableDeque(deque()) - self.subscriber_list.append([topic, False]) + rospy.loginfo('items: %s', len(self.subscriber_list)) - rospy.loginfo('failed topics: %s', self.subscriber_list) + for topic in self.subscriber_list: + if len(topic) == 1: + topic.append(self.stream_time) + rospy.loginfo('no stream time provided, default used for: %s', topic) + self.topic_messages[topic[0]] = deque() + topic.append(False) + self.topic_list.append(topic[0]) + + rospy.loginfo('topics status: %s', self.subscriber_list) def subscribe_loop(self): @@ -113,6 +116,7 @@ def subscribe_loop(self): """ i = 0 + # if self.successful_subscription_count == 0 and not rospy.is_shutdown(): while self.successful_subscription_count == 0 and not rospy.is_shutdown(): self.subscribe() rospy.sleep(0.1) @@ -137,14 +141,14 @@ def subscribe(self): """ for topic in self.subscriber_list: - if not topic[1]: + if not topic[2]: msg_class = rostopic.get_topic_class(topic[0]) if msg_class[1] is not None: self.successful_subscription_count = self.successful_subscription_count + 1 rospy.Subscriber(topic[0], msg_class[0], lambda msg, _topic=topic[0]: self.bagger_callback(msg, _topic)) - topic[1] = True # successful subscription + topic[2] = True # successful subscription def get_topic_duration(self, topic): @@ -191,8 +195,14 @@ def bagger_callback(self, msg, topic): rospy.logdebug('topic type: %s', type(msg)) rospy.logdebug('number of topic messages: %s', self.get_topic_message_count(topic)) - if time_diff > rospy.Duration(self.stream_time): + index = self.topic_list.index(topic) + + while time_diff > rospy.Duration(self.subscriber_list[index][1]) and not rospy.is_shutdown(): self.topic_messages[topic].popleft() + time_diff = self.get_topic_duration(topic) + + # re subscribe to failed topics if available later + self.subscribe() def get_topic_message_count(self, topic): @@ -218,11 +228,10 @@ def set_bag_directory(self,bag_name=''): """ Create ros bag save directory - If no bag name is provided, the current date/time is used as default. """ - types = ('', None) + types = ('', None, False) if bag_name in types: bag_name = str(datetime.date.today()) + '-' + str(datetime.datetime.now().time())[0:8] @@ -237,43 +246,14 @@ def set_bag_directory(self,bag_name=''): self.bag = rosbag.Bag(os.path.join(directory, bag_name + '.bag'), 'w') - def get_time_index(self, topic, requested_seconds): - - """ - Return the index for the time index for a topic at 'n' seconds from the end of the dequeue - - For example, to bag the last 10 seconds of data, the index for 10 seconds back from the most - recent message can be obtained with this function. - - The number of requested seconds should be the number of seoncds desired from - the end of deque. (ie. requested_seconds = 10 ) - - If the desired time length of the bag is greater than the available messages it will output a - message and return how ever many seconds of data are avaiable at the moment. - - Seconds is of a number type (not a rospy.Time type) (ie. int, float) - """ - - topic_duration = self.get_topic_duration(topic).to_sec() - - ratio = requested_seconds / topic_duration - index = int(self.get_topic_message_count(topic) * (1 - min(ratio, 1))) - - self.bag_report = 'The requested %s seconds were bagged' % requested_seconds - - if index == 0: - self.bag_report = 'Only %s seconds of the request %s seconds were available, all \ - messages were bagged' % (topic_duration, requested_seconds) - return index - def set_bagger_status(self): """ - Print status of online bagger + Set status of online bagger """ - self.bagger_status = ('Subscriber List: ' + str(self.subscriber_list) + ' Message Count: ' - + str(self.get_total_message_count())) + self.bagger_status = 'Subscriber List: ' + str(self.subscriber_list) + ' Message Count: ' \ + + str(self.get_total_message_count()) def start_bagging(self, req): @@ -282,48 +262,33 @@ def start_bagging(self, req): during the bagging process, resumes streaming when over. """ - rospy.loginfo('bagging %s s', req.bag_time) - self.streaming = False self.set_bag_directory(req.bag_name) - requested_seconds = req.bag_time - self.set_bagger_status() for topic in self.subscriber_list: - if topic[1] == False: + if topic[2] == False: continue topic = topic[0] rospy.loginfo('topic: %s', topic) - # if no number is provided or a zero is given, bag all messages - types = (0, 0.0, None) - if req.bag_time in types: - bag_index = 0 - self.bag_report = 'All messages were bagged' - - # get time index the normal way - else: - bag_index = self.get_time_index(topic, requested_seconds) - messages = 0 # message number in a given topic - for msgs in self.topic_messages[topic][bag_index:]: + for msgs in self.topic_messages[topic]: messages = messages + 1 self.bag.write(topic, msgs[1], t=msgs[0]) if messages % 100 == 0: # print every 100th topic, type and time rospy.loginfo('topic: %s, message type: %s, time: %s', topic, type(msgs[1]), type(msgs[0])) - # empty deque when done writing to bag - self.topic_messages[topic].clear() + # empty deque when done writing to bag + self.topic_messages[topic].clear() rospy.loginfo('Bag Report: %s', self.bagger_status) self.bag.close() rospy.loginfo('bagging finished!') self.streaming = True - return self.bagger_status if __name__ == "__main__": diff --git a/utils/mil_ros_tools/online_bagger/package.xml b/utils/mil_tools/mil_ros_tools/online_bagger/package.xml similarity index 100% rename from utils/mil_ros_tools/online_bagger/package.xml rename to utils/mil_tools/mil_ros_tools/online_bagger/package.xml diff --git a/utils/mil_ros_tools/online_bagger/srv/BaggerCommands.srv b/utils/mil_tools/mil_ros_tools/online_bagger/srv/BaggerCommands.srv similarity index 66% rename from utils/mil_ros_tools/online_bagger/srv/BaggerCommands.srv rename to utils/mil_tools/mil_ros_tools/online_bagger/srv/BaggerCommands.srv index cc52d84e..92dcaa6c 100644 --- a/utils/mil_ros_tools/online_bagger/srv/BaggerCommands.srv +++ b/utils/mil_tools/mil_ros_tools/online_bagger/srv/BaggerCommands.srv @@ -1,4 +1,3 @@ string bag_name -float32 bag_time --- string status From b3cec5a77dd69f71aff70fc7e6e5709d350e8adc Mon Sep 17 00:00:00 2001 From: kingkevlar Date: Fri, 24 Mar 2017 22:15:49 -0400 Subject: [PATCH 251/267] ONLINE_BAGGER: Add bag time to service call --- .../online_bagger/nodes/online_bagger.py | 63 ++++++++++++++++--- .../online_bagger/srv/BaggerCommands.srv | 1 + 2 files changed, 54 insertions(+), 10 deletions(-) diff --git a/utils/mil_tools/mil_ros_tools/online_bagger/nodes/online_bagger.py b/utils/mil_tools/mil_ros_tools/online_bagger/nodes/online_bagger.py index 11ce54b2..c64ee881 100755 --- a/utils/mil_tools/mil_ros_tools/online_bagger/nodes/online_bagger.py +++ b/utils/mil_tools/mil_ros_tools/online_bagger/nodes/online_bagger.py @@ -19,10 +19,11 @@ For example: bagging_server = rospy.ServiceProxy('/online_bagger/dump', BaggerCommands) - bag_status = bagging_server(bag_name) - bag_name = name of bag (leave blank to use default name: current date and time) : '' - * Blank is an empty string - + bag_status = bagging_server(bag_name, bag_time) + bag_name = name of bag (leave blank to use default name: current date and time) + Provide an empty string: '' to bag everything + bag_time = float32 (leave blank to dump entire bag): + Provide 0.0, or 0 to bag everything """ class OnlineBagger(object): @@ -32,7 +33,7 @@ def __init__(self): """ Make dictionary of dequeues. Subscribe to set of topics defined by the yaml file in directory - Stream topics up to a given ram limit, dump oldest messages when limit is reached + Stream topics up to a given stream time, dump oldest messages when limit is reached Set up service to bag n seconds of data default to all of available data """ @@ -95,13 +96,18 @@ def make_dicts(self): self.topic_messages = {} self.topic_list = [] - rospy.loginfo('items: %s', len(self.subscriber_list)) + class SliceableDeque(deque): + def __getitem__(self, index): + if isinstance(index, slice): + return type(self)(itertools.islice(self, index.start, + index.stop, index.step)) + return deque.__getitem__(self, index) for topic in self.subscriber_list: if len(topic) == 1: topic.append(self.stream_time) rospy.loginfo('no stream time provided, default used for: %s', topic) - self.topic_messages[topic[0]] = deque() + self.topic_messages[topic[0]] = SliceableDeque(deque()) topic.append(False) self.topic_list.append(topic[0]) @@ -169,11 +175,36 @@ def get_header_time(self, msg): else: return rospy.get_rostime() + def get_time_index(self, topic, requested_seconds): + + """ + Return the index for the time index for a topic at 'n' seconds from the end of the dequeue + For example, to bag the last 10 seconds of data, the index for 10 seconds back from the most + recent message can be obtained with this function. + The number of requested seconds should be the number of seoncds desired from + the end of deque. (ie. requested_seconds = 10 ) + If the desired time length of the bag is greater than the available messages it will output a + message and return how ever many seconds of data are avaiable at the moment. + Seconds is of a number type (not a rospy.Time type) (ie. int, float) + """ + + topic_duration = self.get_topic_duration(topic).to_sec() + + ratio = requested_seconds / topic_duration + index = int(self.get_topic_message_count(topic) * (1 - min(ratio, 1))) + + self.bag_report = 'The requested %s seconds were bagged' % requested_seconds + + if index == 0: + self.bag_report = 'Only %s seconds of the request %s seconds were available, all \ + messages were bagged' % (topic_duration, requested_seconds) + return index + def bagger_callback(self, msg, topic): """ Streaming callback function, stops streaming during bagging process - also pops off msgs from dequeue if stream size is greater than specified ram limit + also pops off msgs from dequeue if stream size is greater than specified stream_time Stream, callback function does nothing if streaming is not active """ @@ -266,6 +297,8 @@ def start_bagging(self, req): self.set_bag_directory(req.bag_name) self.set_bagger_status() + requested_seconds = req.bag_time + for topic in self.subscriber_list: if topic[2] == False: continue @@ -273,13 +306,23 @@ def start_bagging(self, req): topic = topic[0] rospy.loginfo('topic: %s', topic) + # if no number is provided or a zero is given, bag all messages + types = (0, 0.0, None) + if req.bag_time in types: + bag_index = 0 + self.bag_report = 'All messages were bagged' + + # get time index the normal way + else: + bag_index = self.get_time_index(topic, requested_seconds) + messages = 0 # message number in a given topic - for msgs in self.topic_messages[topic]: + for msgs in self.topic_messages[topic][bag_index:]: messages = messages + 1 self.bag.write(topic, msgs[1], t=msgs[0]) if messages % 100 == 0: # print every 100th topic, type and time rospy.loginfo('topic: %s, message type: %s, time: %s', - topic, type(msgs[1]), type(msgs[0])) + topic, type(msgs[1]), type(msgs[0])) # empty deque when done writing to bag self.topic_messages[topic].clear() diff --git a/utils/mil_tools/mil_ros_tools/online_bagger/srv/BaggerCommands.srv b/utils/mil_tools/mil_ros_tools/online_bagger/srv/BaggerCommands.srv index 92dcaa6c..cc52d84e 100644 --- a/utils/mil_tools/mil_ros_tools/online_bagger/srv/BaggerCommands.srv +++ b/utils/mil_tools/mil_ros_tools/online_bagger/srv/BaggerCommands.srv @@ -1,3 +1,4 @@ string bag_name +float32 bag_time --- string status From bc89570c0baaed8a4d236ef7d1970d3d4e50aec2 Mon Sep 17 00:00:00 2001 From: David Soto Date: Mon, 27 Mar 2017 22:23:48 -0400 Subject: [PATCH 252/267] PASSSIVE SONAR: remove legacy hydrophones pkg The ros package mil_passive_sonar should now be used for locating pingers. The old hydrophone package was only able to work under one hydrophone configuration. It should be removed from the SubjuGator repo. --- .../drivers/hydrophones/CMakeLists.txt | 71 ----- .great_merge/drivers/hydrophones/README.md | 21 -- .../drivers/hydrophones/msg/Debug.msg | 6 - .great_merge/drivers/hydrophones/msg/Ping.msg | 5 - .../drivers/hydrophones/msg/ProcessedPing.msg | 5 - .great_merge/drivers/hydrophones/package.xml | 38 --- .../drivers/hydrophones/scripts/hydrophones | 59 ---- .../drivers/hydrophones/scripts/ping_logger | 25 -- .../drivers/hydrophones/scripts/ping_plotter | 44 --- .../drivers/hydrophones/scripts/ping_printer | 20 -- .great_merge/drivers/hydrophones/setup.py | 13 - .../drivers/hydrophones/src/__init__.py | 0 .../hydrophones/src/hydrophones/__init__.py | 1 - .../hydrophones/src/hydrophones/algorithms.py | 256 ------------------ .../hydrophones/src/hydrophones/util.py | 73 ----- 15 files changed, 637 deletions(-) delete mode 100644 .great_merge/drivers/hydrophones/CMakeLists.txt delete mode 100644 .great_merge/drivers/hydrophones/README.md delete mode 100644 .great_merge/drivers/hydrophones/msg/Debug.msg delete mode 100644 .great_merge/drivers/hydrophones/msg/Ping.msg delete mode 100644 .great_merge/drivers/hydrophones/msg/ProcessedPing.msg delete mode 100644 .great_merge/drivers/hydrophones/package.xml delete mode 100755 .great_merge/drivers/hydrophones/scripts/hydrophones delete mode 100755 .great_merge/drivers/hydrophones/scripts/ping_logger delete mode 100755 .great_merge/drivers/hydrophones/scripts/ping_plotter delete mode 100755 .great_merge/drivers/hydrophones/scripts/ping_printer delete mode 100644 .great_merge/drivers/hydrophones/setup.py delete mode 100644 .great_merge/drivers/hydrophones/src/__init__.py delete mode 100644 .great_merge/drivers/hydrophones/src/hydrophones/__init__.py delete mode 100644 .great_merge/drivers/hydrophones/src/hydrophones/algorithms.py delete mode 100644 .great_merge/drivers/hydrophones/src/hydrophones/util.py diff --git a/.great_merge/drivers/hydrophones/CMakeLists.txt b/.great_merge/drivers/hydrophones/CMakeLists.txt deleted file mode 100644 index a444b1ca..00000000 --- a/.great_merge/drivers/hydrophones/CMakeLists.txt +++ /dev/null @@ -1,71 +0,0 @@ -# Catkin User Guide: http://www.ros.org/doc/groovy/api/catkin/html/user_guide/user_guide.html -# Catkin CMake Standard: http://www.ros.org/doc/groovy/api/catkin/html/user_guide/standards.html -cmake_minimum_required(VERSION 2.8.3) -project(hydrophones) -# Load catkin and all dependencies required for this package -# TODO: remove all from COMPONENTS that are not catkin packages. -find_package(catkin REQUIRED COMPONENTS tf std_msgs message_runtime message_generation rospy geometry_msgs) -catkin_python_setup() - -# CATKIN_MIGRATION: removed during catkin migration -# cmake_minimum_required(VERSION 2.4.6) - -# CATKIN_MIGRATION: removed during catkin migration -# include($ENV{ROS_ROOT}/core/rosbuild/rosbuild.cmake) - -# Set the build type. Options are: -# Coverage : w/ debug symbols, w/o optimization, w/ code-coverage -# Debug : w/ debug symbols, w/o optimization -# Release : w/o debug symbols, w/ optimization -# RelWithDebInfo : w/ debug symbols, w/ optimization -# MinSizeRel : w/o debug symbols, w/ optimization, stripped binaries -#set(ROS_BUILD_TYPE RelWithDebInfo) - - -# CATKIN_MIGRATION: removed during catkin migration -# rosbuild_init() - -#set the default path for built executables to the "bin" directory - -# CATKIN_MIGRATION: removed during catkin migration -# set(EXECUTABLE_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/bin) -#set the default path for built libraries to the "lib" directory - -# CATKIN_MIGRATION: removed during catkin migration -# set(LIBRARY_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/lib) - -#uncomment if you have defined messages -add_message_files( - FILES - ProcessedPing.msg - Debug.msg - Ping.msg -) -## Generate added messages and services with any dependencies listed here -generate_messages( - DEPENDENCIES std_msgs geometry_msgs -) - -# catkin_package parameters: http://ros.org/doc/groovy/api/catkin/html/dev_guide/generated_cmake_api.html#catkin-package -# TODO: fill in what other packages will need to use this package -catkin_package( - DEPENDS # TODO - CATKIN_DEPENDS tf std_msgs message_runtime message_generation rospy - INCLUDE_DIRS # TODO include - LIBRARIES # TODO -) - -include_directories( ${Boost_INCLUDE_DIR} ${catkin_INCLUDE_DIRS}) - -#uncomment if you have defined services -#rosbuild_gensrv() - -#common commands for building c++ executables and libraries -#rosbuild_add_library(${PROJECT_NAME} src/example.cpp) -#target_link_libraries(${PROJECT_NAME} another_library) -#rosbuild_add_boost_directories() -#rosbuild_link_boost(${PROJECT_NAME} thread) -#rosbuild_add_executable(example examples/example.cpp) -#target_link_libraries(example ${PROJECT_NAME}) - -install(PROGRAMS scripts/ping_printer scripts/ping_logger scripts/ping_plotter scripts/hydrophones DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}) diff --git a/.great_merge/drivers/hydrophones/README.md b/.great_merge/drivers/hydrophones/README.md deleted file mode 100644 index c8f50c34..00000000 --- a/.great_merge/drivers/hydrophones/README.md +++ /dev/null @@ -1,21 +0,0 @@ -Conventions used: - -Incoming data is assumed to be from four hydrophones, indexed from 0 to 3, -laid out as follows: - - 1 - | x ^ - 3--0 | - | y <--0 z (out of screen) - 2 - -The resulting published position point is in the coordinate frame shown above. - -Other trivia: - -`dist_h` is the distance (in meters, of course) from hydrophone 0 to 1 (and -equivalently, 0 to 2). `dist_h4` is the distance from 0 to 3. - -The deltas (seen internally and published on the debug topic) are _delays_, so -one being more positive means that the ping was heard by that hydrophone -later. diff --git a/.great_merge/drivers/hydrophones/msg/Debug.msg b/.great_merge/drivers/hydrophones/msg/Debug.msg deleted file mode 100644 index cc0d479b..00000000 --- a/.great_merge/drivers/hydrophones/msg/Debug.msg +++ /dev/null @@ -1,6 +0,0 @@ -Header header -float64[] deltas -float64[] delta_errors -float64 fft_sharpness -float64 heading -float64 declination diff --git a/.great_merge/drivers/hydrophones/msg/Ping.msg b/.great_merge/drivers/hydrophones/msg/Ping.msg deleted file mode 100644 index 9389665a..00000000 --- a/.great_merge/drivers/hydrophones/msg/Ping.msg +++ /dev/null @@ -1,5 +0,0 @@ -Header header -int32 channels -int32 samples -int32 sample_rate -uint16[] data # C1 C2 C3 C4 C1 C2 C3 C4 ... diff --git a/.great_merge/drivers/hydrophones/msg/ProcessedPing.msg b/.great_merge/drivers/hydrophones/msg/ProcessedPing.msg deleted file mode 100644 index 4951f733..00000000 --- a/.great_merge/drivers/hydrophones/msg/ProcessedPing.msg +++ /dev/null @@ -1,5 +0,0 @@ -Header header -geometry_msgs/Point position -float64 freq -float64 amplitude -bool valid diff --git a/.great_merge/drivers/hydrophones/package.xml b/.great_merge/drivers/hydrophones/package.xml deleted file mode 100644 index 5ddadb7b..00000000 --- a/.great_merge/drivers/hydrophones/package.xml +++ /dev/null @@ -1,38 +0,0 @@ - - hydrophones - 1.0.0 - hydrophones - Matthew Thompson - - BSD - - http://ros.org/wiki/hydrophones - - - Matthew Thompson - - - catkin - - - tf - rospy - message_runtime - message_generation - std_msgs - geometry_msgs - - - tf - rospy - message_runtime - message_generation - std_msgs - geometry_msgs - - - - - - - diff --git a/.great_merge/drivers/hydrophones/scripts/hydrophones b/.great_merge/drivers/hydrophones/scripts/hydrophones deleted file mode 100755 index 13896643..00000000 --- a/.great_merge/drivers/hydrophones/scripts/hydrophones +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env python - -import os -import numpy -import threading -import math - -import rospy - -from hydrophones import algorithms, util -from hydrophones.msg import Ping, ProcessedPing, Debug -from std_msgs.msg import Header -from geometry_msgs.msg import Point, PoseStamped, Pose - -def process_ping(ping): - samples = util.ping_to_samples(ping) - sample_rate = ping.sample_rate - - r = algorithms.run(samples, sample_rate, v_sound, dist_h, dist_h4) - - if len(r['pos']) > 0: - heading = math.atan2(r['pos'][1], r['pos'][0]) - declination = math.atan2(-r['pos'][2], numpy.linalg.norm(r['pos'][0:2])) - else: - heading = 0 - declination = 0 - - if len(r['errors']) > 0: - rospy.logwarn('Errors processing ping: ' + ", ".join(r['errors'])) - - valid = len(r['pos']) > 0 and len(r['errors']) == 0 - if valid: - pub.publish(header=Header(stamp=ping.header.stamp, - frame_id=ping.header.frame_id), - position=Point(*r['pos'].tolist()), - freq=r['freq'], - amplitude=r['amplitude'], - valid=valid) - pub_pose.publish(header=Header(stamp=ping.header.stamp, - frame_id=ping.header.frame_id), - pose=Pose(position=Point(*r['pos'].tolist()))) - pub_debug.publish(header=Header(stamp=ping.header.stamp, - frame_id=ping.header.frame_id), - deltas=r['deltas'].tolist(), - delta_errors=r['delta_errors'].tolist(), - fft_sharpness=r['fft_sharpness'], - heading=heading, - declination=declination) - -rospy.init_node('hydrophones') -dist_h = rospy.get_param('~dist_h') -dist_h4 = rospy.get_param('~dist_h4') -v_sound = rospy.get_param('~v_sound') -template_periods = rospy.get_param('~template_periods', 3) -pub = rospy.Publisher('hydrophones/processed', ProcessedPing, queue_size=10) -pub_pose = rospy.Publisher('hydrophones/pose', PoseStamped, queue_size=10) -pub_debug = rospy.Publisher('hydrophones/debug', Debug, queue_size=10) -sub = rospy.Subscriber('hydrophones/ping', Ping, process_ping) -rospy.spin() diff --git a/.great_merge/drivers/hydrophones/scripts/ping_logger b/.great_merge/drivers/hydrophones/scripts/ping_logger deleted file mode 100755 index 6747aa30..00000000 --- a/.great_merge/drivers/hydrophones/scripts/ping_logger +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python - -import os -import numpy - -import roslib -roslib.load_manifest('hydrophones') -import rospy - -from hydrophones.msg import Ping -from hydrophones import util - -rospy.init_node('ping_logger') -output_dir = rospy.get_param("~output_dir", "pings") -if not os.path.exists(output_dir): - os.makedirs(output_dir) - -def save_ping(ping): - path = os.path.join(output_dir, str(ping.header.seq) + ".csv") - samples = util.ping_to_samples(ping) - numpy.savetxt(path, samples.transpose(), fmt='%d', delimiter=',') - -sub = rospy.Subscriber('hydrophones/ping', Ping, save_ping) -rospy.spin() - diff --git a/.great_merge/drivers/hydrophones/scripts/ping_plotter b/.great_merge/drivers/hydrophones/scripts/ping_plotter deleted file mode 100755 index 53ca34e1..00000000 --- a/.great_merge/drivers/hydrophones/scripts/ping_plotter +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -import os -import numpy -import threading -import matplotlib -import matplotlib.pyplot as plt - -import roslib -roslib.load_manifest('hydrophones') -import rospy - -from hydrophones import algorithms, util -from hydrophones.msg import Ping - -class Node(object): - def __init__(self): - self.fig = plt.figure() - self.ax = self.fig.add_subplot(111) - self.fig.show() - self.cond = threading.Condition() - self.samples = None - self.sub = rospy.Subscriber('hydrophones/ping', Ping, self.ping_callback) - - def ping_callback(self, ping): - with self.cond: - self.samples = util.ping_to_samples(ping) - self.cond.notify_all() - - def run(self): - while not rospy.is_shutdown(): - with self.cond: - self.cond.wait(.5) - if self.samples is not None: - self.ax.clear() - self.ax.plot(self.samples.transpose()) - self.ax.autoscale() - self.fig.canvas.draw() - self.fig.show() - self.samples = None - -rospy.init_node('ping_plotter') -Node().run() - diff --git a/.great_merge/drivers/hydrophones/scripts/ping_printer b/.great_merge/drivers/hydrophones/scripts/ping_printer deleted file mode 100755 index fe2d083c..00000000 --- a/.great_merge/drivers/hydrophones/scripts/ping_printer +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python - -import math - -import roslib -roslib.load_manifest('hydrophones') -import rospy - -from hydrophones.msg import ProcessedPing - -def print_heading(ping_processed): - print '%s%dhz heading %d declination %d amplitude %d' % ('bad' if not ping_processed.valid else '', - ping_processed.freq, - ping_processed.heading / math.pi * 180, - ping_processed.declination / math.pi * 180, - ping_processed.amplitude) - -rospy.init_node('print_heading') -sub = rospy.Subscriber('hydrophones/processed', ProcessedPing, print_heading) -rospy.spin() diff --git a/.great_merge/drivers/hydrophones/setup.py b/.great_merge/drivers/hydrophones/setup.py deleted file mode 100644 index 8a4c344f..00000000 --- a/.great_merge/drivers/hydrophones/setup.py +++ /dev/null @@ -1,13 +0,0 @@ -## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD - -from distutils.core import setup -from catkin_pkg.python_setup import generate_distutils_setup - -# fetch values from package.xml -setup_args = generate_distutils_setup( - packages=['hydrophones'], - package_dir={'': 'src'}, - requires=[], # TODO -) - -setup(**setup_args) \ No newline at end of file diff --git a/.great_merge/drivers/hydrophones/src/__init__.py b/.great_merge/drivers/hydrophones/src/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/.great_merge/drivers/hydrophones/src/hydrophones/__init__.py b/.great_merge/drivers/hydrophones/src/hydrophones/__init__.py deleted file mode 100644 index b23e43d7..00000000 --- a/.great_merge/drivers/hydrophones/src/hydrophones/__init__.py +++ /dev/null @@ -1 +0,0 @@ -#autogenerated by ROS python message generators \ No newline at end of file diff --git a/.great_merge/drivers/hydrophones/src/hydrophones/algorithms.py b/.great_merge/drivers/hydrophones/src/hydrophones/algorithms.py deleted file mode 100644 index 122465ff..00000000 --- a/.great_merge/drivers/hydrophones/src/hydrophones/algorithms.py +++ /dev/null @@ -1,256 +0,0 @@ -from __future__ import division -from hydrophones import util - -import numpy -import scipy -import math -import sys -import matplotlib.pyplot as plt - -def run(samples, sample_rate, v_sound, dist_h, dist_h4): - # Perform algorithm - samples = zero_mean(samples) - freq, amplitude, samples_fft = compute_freq(samples, sample_rate, numpy.array([5e3, 40e3]), plot=True) - fft_sharpness = amplitude**2/numpy.sum(samples_fft) - upsamples, upsample_rate = preprocess(samples, sample_rate, 3e6) - deltas, delta_errors, template_pos, template_width = \ - compute_deltas(upsamples, upsample_rate, freq, 8) - delta_errors = delta_errors / amplitude - if len(deltas) == 3: - pos = compute_pos_4hyd(deltas, upsample_rate, v_sound, dist_h, dist_h4) - else: - pos = numpy.empty(0) - - # Check for errors - errors = [] - if amplitude < 80: - errors.append('Low amplitude at maximum frequency') - if template_pos is None: - errors.append('Failed to find template') - elif numpy.max(delta_errors) > 1e-3: - errors.append('High template match error (%s)' % str(delta_errors)) - - return dict( - errors=errors, - freq=freq, - amplitude=amplitude, - fft_sharpness=fft_sharpness, - samples_fft=samples_fft, - pos=pos, - deltas=deltas, - delta_errors=delta_errors, - upsamples=upsamples, - upsample_rate=upsample_rate, - template_pos=template_pos, - template_width=template_width) - -def zero_mean(samples): - """Zero means data by assuming that the first 32 samples are zeros""" - return samples - numpy.mean(samples[:, 0:32], 1)[:, numpy.newaxis] - -def normalize(samples): - """Rescapes samples so each individual channel lies between -1 and 1""" - return samples / numpy.amax(numpy.abs(samples), 1)[:, numpy.newaxis] - -def compute_freq(samples, sample_rate, freq_range, plot=False): - """Checks whether samples are likely a solid ping and returns the frequency.""" - samples_window = samples * numpy.hamming(samples.shape[1]) - - # Compute fft, find peaks in desired range - fft_length = 2048 - samples_fft = numpy.absolute(numpy.fft.fft(samples_window, fft_length, axis=1))[:, :fft_length/2] - bin_range = freq_to_bin(freq_range, sample_rate, fft_length) - peaks = bin_range[0] + numpy.argmax(samples_fft[:, bin_range[0]:bin_range[1]], axis=1) - - # Sort peaks, take mean of the middle - middle_peaks = numpy.sort(peaks)[len(peaks)//4:len(peaks) - len(peaks)//4] - peak = numpy.mean(middle_peaks) - - freq = bin_to_freq(peak, sample_rate, fft_length) - - amplitude = math.sqrt(numpy.mean(samples_fft[:, round(peak)])) - return freq, amplitude, samples_fft - -def bin_to_freq(bin, sample_rate, fft_length): - return (sample_rate/2) / (fft_length/2) * bin - -def freq_to_bin(freq, sample_rate, fft_length): - return freq * ((fft_length/2) / (sample_rate/2)) - -def preprocess(samples, sample_rate, desired_sample_rate): - """Upsamples data to have approx. desired_sample_rate.""" - # Trim to first ms of data and bandpass - samples = bandpass(samples[:, :.001*sample_rate], sample_rate) - samples = normalize(samples) - - # Upsample each channel - upfact = round(desired_sample_rate/sample_rate) - upsamples = numpy.empty((samples.shape[0], samples.shape[1]*upfact)) - for i in xrange(samples.shape[0]): - upsamples[i, :] = util.resample(samples[i, :], upfact, 1) - return upsamples, sample_rate*upfact - -def bandpass(samples, sample_rate): - """Applies a 20-30khz bandpass FIR filter""" - # 25-40KHz is the range of the pinger for the roboboat competition - fir = scipy.signal.firwin(128, - [19e3/(sample_rate/2), 41e3/(sample_rate/2)], - window='hann', - pass_zero=False) - return scipy.signal.lfilter(fir, 1, samples) - -def compute_deltas(samples, sample_rate, ping_freq, template_periods, plot=False): - """ - Computes N-1 position deltas for N channels, by making a template - for the first channel and matching to all subsequent channels. - """ - period = int(round(sample_rate / ping_freq)) - template_width = period*template_periods+1 - template, template_pos = make_template(samples[0, :], - .2, - template_width) - if template_pos is None: - return numpy.empty(0), numpy.empty(0), None, template_width - start = template_pos - period//2 - stop = template_pos + period//2 - - deltas = numpy.empty(samples.shape[0]-1) - errors = numpy.empty(samples.shape[0]-1) - for i in xrange(deltas.shape[0]): - pos, error = match_template(samples[i+1, :], start, stop, template) - deltas[i] = pos - template_pos - errors[i] = error - - return deltas, errors, template_pos, template_width - -def make_template(channel, thresh, width): - """ - Returns a template of the specified width, centered at the first - point of the channel to exceed thresh. - """ - for pos in xrange(0, channel.shape[0]-width): - if abs(channel[pos+width//4]) >= thresh: - return channel[pos:pos+width], pos - return None, None - -def match_template(channel, start, stop, template): - """ - Matches template to channel, returning the point where the start - of the template should be placed. - """ - start = max(start, 0) - stop = min(stop, channel.shape[0] - template.shape[0]) - mad = mean_absolute_difference(channel, start, stop, template) - min_pt = find_zero(mad) - - return start + min_pt, mad[round(min_pt)] - -def mean_absolute_difference(channel, start, stop, template): - """ - Slides the template along channel from start to stop, giving the - mean absolute difference of the template and channel at each - location. - """ - width = template.shape[0] - mad = numpy.zeros(stop-start) - for i in xrange(start, stop): - mad[i-start] = numpy.mean(numpy.abs(channel[i:i+width] - template)) - return mad - -def find_zero(data): - """ - Finds the sub-sample position of the first zero in a continuous signal, - by first finding the lowest absolute value, then taking the gradient - and performing a linear interpolation near the lowest absolute value. - """ - approx = numpy.argmin(numpy.abs(data)) - d_data = numpy.gradient(data) - - for pos in xrange(max(approx-3,0), min(approx+3, d_data.shape[0]-1)): - if numpy.sign(d_data[pos]) != numpy.sign(d_data[pos+1]): - y2 = d_data[pos+1] - y1 = d_data[pos] - x2 = pos+1 - x1 = pos - return -(x2-x1)/(y2-y1)*y1 + x1 - return approx - -def compute_pos_4hyd(deltas, sample_rate, v_sound, dist_h, dist_h4): - """ - Solves the 4 hydrophone case (3 deltas) for heading, declination, - and sph_dist using a bunch of trig. Future work would be to phrase - this as a NLLSQ problem or something, and adapt it to more - hydrophones. - """ - assert(len(deltas) == 3) - - y1 = deltas[0]/sample_rate*v_sound - y2 = deltas[1]/sample_rate*v_sound - y3 = deltas[2]/sample_rate*v_sound - - dist = abs((y1**2 + y2**2 - 2*dist_h**2)/(2*y1 + 2*y2)) - cos_alpha1 = (2*dist*y1 + y1**2 - dist_h**2)/(-2*dist*dist_h); - cos_alpha2 = -(2*dist*y2 + y2**2 - dist_h**2)/(-2*dist*dist_h); - cos_alpha = (cos_alpha1 + cos_alpha2)/2; - - cos_beta = (2*dist*y3 + y3**2 - dist_h4**2)/(-2*dist*dist_h4); - - dist_x = cos_alpha*dist - dist_y = cos_beta*dist - if dist**2 - (dist_x**2 + dist_y**2) < 0: - dist_z = 0 - else: - dist_z = -math.sqrt(dist**2 - (dist_x**2 + dist_y**2)) - return numpy.array([dist_x, dist_y, dist_z]) - -if __name__ == '__main__': - sample_rate = 300e3 - if len(sys.argv) > 1: - samples = numpy.loadtxt(sys.argv[1], delimiter=',').transpose() - else: - samples = util.make_ping([0, .25, 1.234, -5], {'freq': 23e3, 'sample_rate': sample_rate}) - - r = run(samples, sample_rate, 1497, 2.286000e-02, 2.286000e-02) - - if len(r['errors']) > 0: - print 'ERRORS', r['errors'] - print 'freq', r['freq'], 'amplitude', r['amplitude'] - print 'sharpness', r['fft_sharpness'] - print 'deltas', r['deltas'] - print 'delta errors', r['delta_errors']/r['amplitude'] - print 'pos (hyd coordinates)', r['pos'] - - plt.figure() - plt.plot(samples.transpose()) - plt.title('Raw ping') - - plt.figure() - fft_length = r['samples_fft'].shape[1]*2 - plt.plot(bin_to_freq(numpy.arange(0, fft_length//2), sample_rate, fft_length), - r['samples_fft'].transpose()) - plt.title('FFT') - - plt.figure() - plt.plot(r['upsamples'].transpose()) - plt.title('Upsampled ping') - - if r['template_pos'] is not None: - period = int(round(r['upsample_rate'] / r['freq'])) - template = r['upsamples'][0,r['template_pos']:r['template_pos']+r['template_width']] - plot_start = r['template_pos'] - 2*period - plot_stop = r['template_pos'] + r['template_width'] + 2*period - plt.ioff() - plt.figure() - plt.plot(template) - plt.title('Template') - - for i in xrange(r['deltas'].shape[0]): - plt.figure() - plt.plot(numpy.arange(plot_start, plot_stop), r['upsamples'][i+1, plot_start:plot_stop]) - pos = r['template_pos'] + int(round(r['deltas'][i])) - plt.plot(numpy.arange(pos, pos+r['template_width']), template) - plt.title('Channel %d' % (i+1)) - - plt.show() - - diff --git a/.great_merge/drivers/hydrophones/src/hydrophones/util.py b/.great_merge/drivers/hydrophones/src/hydrophones/util.py deleted file mode 100644 index f19e1d4c..00000000 --- a/.great_merge/drivers/hydrophones/src/hydrophones/util.py +++ /dev/null @@ -1,73 +0,0 @@ -from __future__ import division - -import numpy -import scipy -import scipy.signal -import math - -from hydrophones.msg import Ping - -def resample(x, p, q): - """Polyphase filter resample, based on MATLAB's resample.m""" - bta = 5 - N = 10 - pqmax = max(p, q) - fc = (1/2)/pqmax - L = 2*N*pqmax + 1 - h = p*scipy.signal.firwin(L-1, 2*fc, window=('kaiser', bta)) - Lhalf = (L-1)/2 - Lx = len(x) - - nz = math.floor(q-(Lhalf % q)) - z = numpy.zeros(nz) - Lhalf += nz - - delay = math.floor(math.ceil(Lhalf)/q) - nz1 = 0 - while math.ceil(((Lx - 1)*p + len(h) + nz1)/q) - delay < math.ceil(Lx*p/q): - nz1 = nz1+1; - h = numpy.hstack([h, numpy.zeros(nz1)]); - y = upfirdn(x,h,p,q) - Ly = math.ceil(Lx*p/q) - y = y[delay:] - y = y[:Ly] - return y - -def upfirdn(x, h, p, q): - # Allocate an expanded array to hold x interspersed with p-1 zeros, - # padded with enough zeros for the fir filter - x_expanded = numpy.zeros((x.shape[0] - 1)*p + h.shape[0]) - - # Insert x values every p elements - x_expanded[:x.shape[0]*p:p] = x - - # Run filter - x_filt = scipy.signal.lfilter(h, 1, x_expanded) - return x_filt - -def make_ping_channel(delay=0, - freq=25e3, - offset=0x7FFF, - ampl=200, - zeros=64, - count=1024, - sample_rate=300e3): - w = 2*math.pi*freq/sample_rate - sinwave = ampl*numpy.sin(w*(numpy.arange(count)-delay)) - - delay_i = round(delay) - window = numpy.zeros(count) - window[zeros+delay_i:] = numpy.minimum(numpy.exp(numpy.arange(count - zeros - delay_i)/10), 2)-1 - - noise = numpy.random.normal(0, .01, count) - - return offset + sinwave * window + noise - -def make_ping(delays=[0, 0, 0, 0], args={}): - return numpy.vstack(make_ping_channel(delay=delay, **args) for delay in delays) - -def samples_to_list(samples): - return samples.transpose().flatten().tolist() - -def ping_to_samples(ping): - return numpy.array(ping.data, dtype=numpy.float64).reshape((ping.samples, ping.channels)).transpose() From 1c3a53c5bdb6016ec8695e287cb9119ecb0b7275 Mon Sep 17 00:00:00 2001 From: David Soto Date: Mon, 27 Mar 2017 22:35:18 -0400 Subject: [PATCH 253/267] PASSIVE SONAR: move paulboard pkg to mil_passive_sonar --- .great_merge/CATKIN_IGNORE | 0 .../paulboard_driver/CMakeLists.txt | 0 .../mil_passive_sonar}/paulboard_driver/README.md | 0 .../paulboard_driver/blobs/SimpleHyd2013.bin | Bin .../mil_passive_sonar}/paulboard_driver/package.xml | 0 .../paulboard_driver/scripts/paulboard_driver | 0 .../mil_passive_sonar}/paulboard_driver/setup.py | 0 .../src/paulboard_driver/SimpleHyd2013.bin | Bin .../src/paulboard_driver/__init__.py | 0 .../src/paulboard_driver/ais_bootloader.py | 0 10 files changed, 0 insertions(+), 0 deletions(-) delete mode 100644 .great_merge/CATKIN_IGNORE rename {.great_merge/drivers => drivers/mil_passive_sonar}/paulboard_driver/CMakeLists.txt (100%) rename {.great_merge/drivers => drivers/mil_passive_sonar}/paulboard_driver/README.md (100%) rename {.great_merge/drivers => drivers/mil_passive_sonar}/paulboard_driver/blobs/SimpleHyd2013.bin (100%) rename {.great_merge/drivers => drivers/mil_passive_sonar}/paulboard_driver/package.xml (100%) rename {.great_merge/drivers => drivers/mil_passive_sonar}/paulboard_driver/scripts/paulboard_driver (100%) rename {.great_merge/drivers => drivers/mil_passive_sonar}/paulboard_driver/setup.py (100%) rename {.great_merge/drivers => drivers/mil_passive_sonar}/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin (100%) rename {.great_merge/drivers => drivers/mil_passive_sonar}/paulboard_driver/src/paulboard_driver/__init__.py (100%) rename {.great_merge/drivers => drivers/mil_passive_sonar}/paulboard_driver/src/paulboard_driver/ais_bootloader.py (100%) diff --git a/.great_merge/CATKIN_IGNORE b/.great_merge/CATKIN_IGNORE deleted file mode 100644 index e69de29b..00000000 diff --git a/.great_merge/drivers/paulboard_driver/CMakeLists.txt b/drivers/mil_passive_sonar/paulboard_driver/CMakeLists.txt similarity index 100% rename from .great_merge/drivers/paulboard_driver/CMakeLists.txt rename to drivers/mil_passive_sonar/paulboard_driver/CMakeLists.txt diff --git a/.great_merge/drivers/paulboard_driver/README.md b/drivers/mil_passive_sonar/paulboard_driver/README.md similarity index 100% rename from .great_merge/drivers/paulboard_driver/README.md rename to drivers/mil_passive_sonar/paulboard_driver/README.md diff --git a/.great_merge/drivers/paulboard_driver/blobs/SimpleHyd2013.bin b/drivers/mil_passive_sonar/paulboard_driver/blobs/SimpleHyd2013.bin similarity index 100% rename from .great_merge/drivers/paulboard_driver/blobs/SimpleHyd2013.bin rename to drivers/mil_passive_sonar/paulboard_driver/blobs/SimpleHyd2013.bin diff --git a/.great_merge/drivers/paulboard_driver/package.xml b/drivers/mil_passive_sonar/paulboard_driver/package.xml similarity index 100% rename from .great_merge/drivers/paulboard_driver/package.xml rename to drivers/mil_passive_sonar/paulboard_driver/package.xml diff --git a/.great_merge/drivers/paulboard_driver/scripts/paulboard_driver b/drivers/mil_passive_sonar/paulboard_driver/scripts/paulboard_driver similarity index 100% rename from .great_merge/drivers/paulboard_driver/scripts/paulboard_driver rename to drivers/mil_passive_sonar/paulboard_driver/scripts/paulboard_driver diff --git a/.great_merge/drivers/paulboard_driver/setup.py b/drivers/mil_passive_sonar/paulboard_driver/setup.py similarity index 100% rename from .great_merge/drivers/paulboard_driver/setup.py rename to drivers/mil_passive_sonar/paulboard_driver/setup.py diff --git a/.great_merge/drivers/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin b/drivers/mil_passive_sonar/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin similarity index 100% rename from .great_merge/drivers/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin rename to drivers/mil_passive_sonar/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin diff --git a/.great_merge/drivers/paulboard_driver/src/paulboard_driver/__init__.py b/drivers/mil_passive_sonar/paulboard_driver/src/paulboard_driver/__init__.py similarity index 100% rename from .great_merge/drivers/paulboard_driver/src/paulboard_driver/__init__.py rename to drivers/mil_passive_sonar/paulboard_driver/src/paulboard_driver/__init__.py diff --git a/.great_merge/drivers/paulboard_driver/src/paulboard_driver/ais_bootloader.py b/drivers/mil_passive_sonar/paulboard_driver/src/paulboard_driver/ais_bootloader.py similarity index 100% rename from .great_merge/drivers/paulboard_driver/src/paulboard_driver/ais_bootloader.py rename to drivers/mil_passive_sonar/paulboard_driver/src/paulboard_driver/ais_bootloader.py From 3e7454436f3af6cca17cd28b7243074e49de31fd Mon Sep 17 00:00:00 2001 From: David Soto Date: Mon, 27 Mar 2017 23:54:34 -0400 Subject: [PATCH 254/267] PASSIVE SONAR: make paulboard_driver a python pkg The scripts here were demoted from being a ros package. It should be removed from the SubjuGator repo. --- drivers/mil_passive_sonar/README.md | 16 +++-- drivers/mil_passive_sonar/launch/test.launch | 4 +- .../paulboard_driver/CMakeLists.txt | 66 ------------------ .../paulboard_driver/README.md | 19 ++++- .../{blobs => }/SimpleHyd2013.bin | Bin .../{src/paulboard_driver => }/__init__.py | 0 .../paulboard_driver => }/ais_bootloader.py | 0 .../paulboard_driver/package.xml | 29 -------- .../{scripts => }/paulboard_driver | 0 .../paulboard_driver/setup.py | 13 ---- .../src/paulboard_driver/SimpleHyd2013.bin | Bin 33440 -> 0 bytes drivers/mil_passive_sonar/setup.py | 4 +- 12 files changed, 33 insertions(+), 118 deletions(-) delete mode 100644 drivers/mil_passive_sonar/paulboard_driver/CMakeLists.txt rename drivers/mil_passive_sonar/paulboard_driver/{blobs => }/SimpleHyd2013.bin (100%) rename drivers/mil_passive_sonar/paulboard_driver/{src/paulboard_driver => }/__init__.py (100%) rename drivers/mil_passive_sonar/paulboard_driver/{src/paulboard_driver => }/ais_bootloader.py (100%) mode change 100644 => 100755 delete mode 100644 drivers/mil_passive_sonar/paulboard_driver/package.xml rename drivers/mil_passive_sonar/paulboard_driver/{scripts => }/paulboard_driver (100%) delete mode 100644 drivers/mil_passive_sonar/paulboard_driver/setup.py delete mode 100644 drivers/mil_passive_sonar/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin diff --git a/drivers/mil_passive_sonar/README.md b/drivers/mil_passive_sonar/README.md index 7e808471..9fc82f1d 100644 --- a/drivers/mil_passive_sonar/README.md +++ b/drivers/mil_passive_sonar/README.md @@ -2,15 +2,23 @@ This script interfaces with Jake's sonar board and provides a ROS service that returns the location and time of emission of the last heard pinger pulse. -### How to run and use +## How to run and use To start the driver, run: - roslaunch sub8_sonar sonar.launch + rosrun mil_passive_sonar sonar.launch -*Note: In /launch/sonar.launch, make sure that port and baud are set correctly, and that the hydrophones' coordinates in the sonar frame are accurate.* +> Note: make sure that port, and baud rate, and hydrophone locations are loaded into the parameter server. See launch/test.launch for an example. In order to ask for hydrophone information: rosservice call /sonar/get_pinger_pulse *double_tab* -The service should respond with the x, y, z, and t of the emission of the last heard pinger pulse. +The service should respond with the x, y, and z of the last heard pinger +pulse. Remember that this should be considered the tip of an arrow +pointing in the direction of the pinger, the range is completely unreliable +and varies wildly. + +## TODO ++ This package is not yet fully set up to use the paulbaurd. The interface to make this happen would be simple to implement. ++ This package should estimate the least squares solution for the actual 3d position of the pinger. ++ Visualize both individual heading veactors and the LS position estimate. diff --git a/drivers/mil_passive_sonar/launch/test.launch b/drivers/mil_passive_sonar/launch/test.launch index 7eff7b7f..206be6b9 100644 --- a/drivers/mil_passive_sonar/launch/test.launch +++ b/drivers/mil_passive_sonar/launch/test.launch @@ -1,5 +1,5 @@ - + - - Matthew Thompson - - - catkin - - - rospy - hydrophones - - - rospy - hydrophones - - - - - - \ No newline at end of file diff --git a/drivers/mil_passive_sonar/paulboard_driver/scripts/paulboard_driver b/drivers/mil_passive_sonar/paulboard_driver/paulboard_driver similarity index 100% rename from drivers/mil_passive_sonar/paulboard_driver/scripts/paulboard_driver rename to drivers/mil_passive_sonar/paulboard_driver/paulboard_driver diff --git a/drivers/mil_passive_sonar/paulboard_driver/setup.py b/drivers/mil_passive_sonar/paulboard_driver/setup.py deleted file mode 100644 index bc1213b0..00000000 --- a/drivers/mil_passive_sonar/paulboard_driver/setup.py +++ /dev/null @@ -1,13 +0,0 @@ -## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD - -from distutils.core import setup -from catkin_pkg.python_setup import generate_distutils_setup - -# fetch values from package.xml -setup_args = generate_distutils_setup( - packages=['paulboard_driver'], - package_dir={'': 'src'}, - requires=[], # TODO -) - -setup(**setup_args) \ No newline at end of file diff --git a/drivers/mil_passive_sonar/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin b/drivers/mil_passive_sonar/paulboard_driver/src/paulboard_driver/SimpleHyd2013.bin deleted file mode 100644 index 4b4c50a08d614cd4256faaceadbf2c4137bb507d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 33440 zcmcG%3tUv!wLiYkKEp7Q(K*AQ8I37t287{}IwIO67n>PWu&J+*njki}KoB1>6Khlu zlX^e}wImLR#-_Fzz#yW%A;#D?m;5@Sg0*QxQWM0q9S{+W5%W?&BmeK3aTJZo?Y;kd z#}5MM?8n+`ulHJe^OGOVwhx{Ac&fkzroop;KjVujiavccV@YvOWLl06@G$$s&PDDG zyhW#CY0k2O)Jt0wk;$Ic(2)909wLTJ)N;r4jKIdysYNrajwNZb#4+5tpp;;KM0$_? zRdOtOzkT7hEI#@W!$-CbDASm`%@J#hJRKU+tU)_`uV2nb*IT&67q&BwR-OdblhHD( zSXj=T!l_V+MITth7rQn5DQWg^@xCjImkDX?Pk{mNIbw73%9vimoToya`G9LJV~U3< zn9L)v`CKbLrFSgJZ7pL?yzo9}tgVjCkdo>f3Yg4TUuiMMn#5Z)#cZ~u~_HJN{j`6vV>RHsW0Ui@?7Sk#QT|*`|9{<@v7&4ebr} zSXVPPq~p9>BU+BFIlR!VIo$YWlRAEJ2+uMD*Gp^>7@(W*wwxPz7MJY+p{#9lghpTA_l{HiX%0~l0{32m)9nakzVtn(htXBDO$;@rx0wsN)0K2LEaer9wfPTuHB zDILme;DN^-OELo0`vYF7lN&EuxKpPvc5Y$2qANUJJBDEW613Whb$0R>uz%f!jM=(6 zxoifl#u1*uGiL0O`3gPL$>uDylQSM-Ewh}U|Ly2|JD)L$UGVeT-S|vV$>*>}CNI6N z+>Ot`1|C1J5&j4l@$4mozI$LSt(P%l0gMdz|RKO2~p zSy{|Zh1LarSymS~)szrmc`r1e+pP&Os2f&*r@9Qe!=(A<1aY1;NgTFby*?(sf(L%3 z<>#@Nibgm~_-+Xe@!~sh-NWaSr8gqeFgM{@DPU&^*Mw)_I|aB-43>PjCQLI)>LP5n z>dJJ$m(ITyKWz|y`EV|aKAbNq>g2gM@r?zx-NHGZdvWd*|HSJ@^Fc<~*~JYC-=+aNcVVE!)uJTDm>OG-L@c=zL+v3$?n@Ge~%m{qi*M=MMlhZCUBBpr9T=7%`|9OSwR9%gB+wWERf+OC!T(~ zm?bdV%Ory=`v)LLHVE^1vEW`m9a6Q&j{Upf-WZWGTVnMw`(6}ND-{9qNlefv!VLW` ztWWMr2vLMzs1&|NRCv{x$2KPnSA-~bAJ*IOrXsU$L+nuLIf3zozh~T{RfL6YiV%(V z{maVu6)WqezSKTb)UqX*Cu-lc;(V>oFv=dX65T2&IOJi_B@pzznMB6_k{!@06%?O5WdC0(NAJddc7{9FT zDd^RCN&=sw@5!$co#rc|VouR0ftK2@1)1&i+=){!K5eWX>lp(6KZErQNvqtTxx$ZX zV)lJKgm63w_=TJu(#fZD!tXBimG{;FXH{cLg-O6~s`I!ehfm?_n5~U){eq|ni-2DP z^WnPTROv{<8_1+1TE&2C3AlCw*Ymx&{;SlG7R|UuVeD*;vs?xFBDRi|Ku^>7_n{V@ zLzxVE7)SU6{Nv^-+BoR6aaml7b7&pYm5mXppb2zmoizcDV$$!_&rd_{^$u;x^?^s2 zB6s-v5pUrA5R5k@i@ztNIj0A8^4XBX`M+ufBq@+9|2Z_-7Ik#Z!%GAuevw=Auv?Ma zVnz*#OKp1h=cjZ{%RX#RSeE8&ii7U9wua7Lmc@?>y9(963f$z-rXJrctibP8L*vw80G;^Kr3eaZyM&8?PqSAyQs!sV^-F6O6S(??ydsg+W3>| zhRQ|bHVC9M=gxjtzy`Hhv%^(PW~}vRl9&TZas4@*|E(ec{C2x8n_>MX9#qknE)pK* z1D5xhaKVRz3pe2)`+@L4u+aP;6(nd{fRj||U4i6`mf1YC{^fZ0&G{gIRqP_!skiV? zHtH=L$U~KLx8wiwn{(5=S==q8Rx*`TFrbTO1q^emNS-IF8{S!tcL1Q{Yn7tU6bgi* zcMcR5uG6hcWCHtCsvAbO@+GExnp`>o_OgHN39yrAX6CM>J&+3WgO*}`?T!7Hu>W6S z{_UbB<+tK;UVNxU=jL@RGMv`x(<5a9^Q<*xFF8=y`Tb-yirIAon@Yb;`-igr7(k24+c%2WDaKQc|1h9s`ZfZq@xQ?`?qMc zY_?4~*s@0p9xgjdGNo`4xH~pk);~A!2!6MwRRY_Du0# zFr>Zl3gGL-^B2UGkO?&JEtn(^Y;Yf3?Cx;I1PAaaCv;SHMC2zP#tDaWBtsDF7g16v z9S2LEmD0uQl!$g$BV9>(FM8j&lA%h*nSH zi#}6;Bxl=V==&Izq9YwoW9D7b!S{rJJ2r$6*kLLUv8Gn4ZX4G>royB)|G3^g9xe0g zjR9g8;EcM|i~URYSPP#o?E1nk9xTaXi<*z|m?=}lE2HMX{wn5i8dt|N`2FXFXZ7i>6js2)xlAMyegtK}ubzI^Ui@*SbI( z5g2UFUXmVZf3cW*-g64+qH-pC^nT{W&7~iCZLWo!@fTr7ErR`k{+@`!+LwrC?30Fa zgSu)e**?()3Vct6!)b4ehs3n>E5c71_Dg2$SfIpPH%p+Y5a3t#c}O3ck1se_N;Gwt&5>+z z>=UA?Q#vi=YO!GHXYeB!+!~@Q=sVz?;tGT5YVy1Hfj4%kH%c!LA^HS8iNB2B2AXgV znIp~6Ta1zlHm1t`Vmb5+YqNd5L-oP|U{_-WpmKPy0i02?qU5@u16y2hB$;sEUtf za>f`|^3}jtpI!UHUG3Vt+h>DfuiqJ;aKeZ1$)Q%AQ$_gq&86iOqmWJ8#=?#Y15N;? z&^AwzQ~WWC&on#j(M+-G5uYF&Rj9)gV_YT<|Bju9zkgci)YSZCjmG(-uXQwTrmzWm z3UPr-72k&VK%QL6x0%a$o^Aw7wk5$nle$XmOil~ZTla)2^{~Ct^+8Pj^Y<4lluZ{O zVbH&6HG0GfvNsv5OvWCW5i2rJ-smz{Gew5{_L%a|J~9FLYtijAp*_RzKif0XH94(P zwjl;JIqgwFK)+1d+bzK?9dKu2J(<|o4D4$<_H`Ndb=ly3_3x?7l#aXYtA9_if4=zM z9sBytJvFnEFTc5`gri##b-#~rE0)fKJr`kXAEI^1wcm{K;c=TUp6=UIvQ-=MlOue4 z>Xo19;z9PmkpCb&$Qg@=`Fxh-h|IOc>;=er`fkJil02u_lNR5v zOw8Guv%c~5l%oTI(gNrkE}il5*;?V6h6P{KDEcuIi+Rm`E)E$7z026Pi6PBsgZxGN zt8;^(Z(Gw_dyh*j%%pB&Olc3FSlmo)9I@@MJ}W39#kYiXCMAgr9%Zu9wPlnqe*wAQ zY?GCL{!qi3k-IpdcLz1SpkmDlA*}gDku|3(_ckLor=xgLJZs$Yw6Kv9P946u9Mdgq zV>)lFsM!D=o(nyq3`>01DMIfI`>X!I{Vk!RAtzkJ{xP0?kL&@%wyz4?IHWod}bT}E6I{u}&98T7B^g|?-t!t|xQh+pE-CRr?!FNu&x;$_68FJ~sG zrha^~Zj0sX(8~pNqFla2cVK9QswUFKnby{U^?PI0cf%9t> ztQA_Z{yXEB^D3E`h!`ftYR^hfNfZA)>4^wvnbxr(-0;&N3Hx^@hg*27&d4Rl*E-9k zKeN~$?HlQD z$}`+;?`YrfIU8o`y>0J&v>k16c3t}?{NS^|dvo-7S2Mvo=EO@rx#^usBg5oREhV23mZZ(#12qXR9ljhm5MYl-q~IoYBM zITwnUYvxvS|zkW;?B`PMqsJ zY%f<=C)#BO+hoch+vKot;L+hM%UtY@&q2rEl!>?7CsRZ$Z;*X5#SA!a=BO@ni<$V( zXP4N#c8UHry9EBjMEGDf^kF8|+ch%iaI~t^EUVIFSrwFGRx!kP;j=)8j86Hhhc6eH~qrxlf6c7d-xgQkl%yslaK%!-*2B(alm0JgD$gKeD;at0bWuF7rO!M zXPW9nuYJ-(_6hI>{78SguFK5u$wx!o5NYDDnVG#C_VWH*_%N{Fc5h3z#mtf))IY%F z7X)P-=<~srP=(b*B3T0Zj)`On=2{x_n7jes73{}`d9qDYsji+pd7~w9Zl%iRDyg=Y zAikWYSiJktg8ga57p?7tTD`)*A9wBr?8`2ne!2#Gv6@qyDQ{B<@B`eZGy2@jU(tTt z+KVWrpFHr$E&U{^-b2Lk&V7J<@c8!Bw;$iR7uN6Ii^e3!^uhb!gHQ3urhosToN?Hj zPbB&*@!EzK$b#z0bDh<8!jTO$N_Z*2g~C?RXo*^!NP>cuOZfkU_sP>~pi~JMnhu z#qXryJMku)ViV+y(i{0#tH+BoFTLK5GdpmG_4(w_RwNa#yC1FUaK|-{X~YS+~z)CHk@5&2~_EZpt~gre#yl?@p4O8!Y3t{EIS<{Y&&QWl(PTb{Tg!`ncWq`WC)Q z|G%UU^1uA@&3ae)Rx((=g$#|fikarBRRLxC*v$74=^j@PA14W3&}Y0 zpCIRC$T%nPpAH$f9p9ahaohcTI#|Z(_1~0ngZcGNUM2Z?yUw|TSDD=5lTSW=y`5L1 znSR&x@04+1k2cab|FVpm{oj;v7@x+LO%apij3YN?oE~)%^C0X06&a`Z>4m$>II_>a zO*fE?!}>bDyD#_rd6BW^E839>!d(i}F-}sd-o6f`zmu=MZ=;=qR1P>doI& z1vsXYf8d9s{}YMD0*36Ph!Z28>K5`kju7!CC|hY@3x z5Ic3MZop`R%(r0kWVC4CQz%;iV;JmR3t$Wbd=|hblf0?Pk;xz6D?XlDSGxRasHuF1 z2KoP+|1_bti)7!84gbjHv$GDPE^A6+3FJ_P`tW2eo900*{P7mu_C(4f7Ul;0-Y0`3 z2T?HhO^`{}`_7 zyTv#Bwj7$a>tiwIez_&)LDmF5{s45=gj&rO%+azKJ}mh0@#C(XKcFU1Bp$u=>S4Ry zYrpyUljsyat*(=Azrm*miBAs@pZe&}co+YaY9S$|Nxyqzc2w8*!d>X^4*&GEuWz## z{%`zKU;OgU`abeav8S*VzV~l(#<83Hs`uGyj}PWoJNQvxzK>V;{r-PWU;g-l;W6g6 z!1lhw-u__EALW)--C@KxG%YnI)Ey-~w4Iy=8S99( zd+Q7RdsWnx+%q_~>a$N=kpI_&uk|zZMbH-{Q!&QPePYmGzL@G@nQFNs7XIxvHiLfr z->^>x%Lo%<;eHLnMoSk^2P^OSN`5dg%4SK0EJd#0M*8G&XJb3KrHkrhfjiV*mEDvh zUfVc7BqecUNTTGe4fe^^hGl;J1#K)fuq|lMNG35! z(?zuY8DtXK5tXnRCU2aQ==9S1x7!VVT?iq4BR)gaSyp%!&y2*D=9`Xh*$<#t?L{1bGb2ty%^dwM;gd>QnAkci^u% z(y-YVVo=LOwG33ZOtmaD9_pWUt5L@^cuWa(XwGM^-d4-NWcfVRN%`v;{Np+o!8d>` zfcKOVcB%_J`^jtij6p zd6gSKaNc1ytWrth+)vJU8Rv&>smb>HX+_ru%os zyz%&3bq&3k|MvQy$?v`*Rs}?&wqY3RWJZe&{s(Gj_*vnI9`=Wbni&!FDY{?0UuFB+ zh5k0rUqXJ1+Lz*CSMdJe-2ugre=EAB*HjB8j8 zSB4V&D|i`d8@?QRI&}0f)XEIK%$E+$;=NSg0QZn;W&E`bI@C7g{@1k)&E8s=Z>?>B zj>kEe0S(pHAlIa6{OsV%QJc$E6(bI-<~gUR3O78y6txbjF99zX<#y8dr$TqwQE!@9 zo`3i;ThvaKAs96d&clhPbcelXK5QGOOh)|(Y8#r=McdbH>oiMZn)5Kn`p&?wiVR8_ zC896z<+y7s1(%W!%7J}1n<-eWZp8uwKl1eE)?VE$<_R+86k?mevOiq<| zhR;3l*ylI4txCp!VTeq&b|a+9GK;tbL~bQPmvIOUw^1HFrafj=6hz@_oDY) zLr(^sYC!r}#V$neU&DJ)LBAL_H`UBxZ&05Yp~l__RKG{LJJ1)_j(W>5)PbUIA2A9c zLS4HaI75A&x0ZckFn^&E#A$`FnRr&5=yt4b1fSj#K53XLE$SYqMib@(y zds|@$&l#ChRN4J&oX-natqM9KSXhCoZ;d2s;9r+7`IS()J3?HYm&o@$vsKfuG6(g8 zZt?1C+r9a>H*<~Rs-WKpZ)xme<&xJ`)$F=>b-<|xUisy+I^oQLVz#>Gex@#)|FNfP zySgNb&0c+Mn!M_r`)y<&1R0aX(+?=T_qFiA)tf_D?lHZaVjm{c7&$NRdn_+6iRT7Q zMa`(2=jGLlr6~A=N^l4=e=r?J4@?? zoQ9l6Y)Dtm$(#>+gq-=67w+R%=2r!5`kuu9VvzU-L$R=qrMGE{UlRJ(IC=HJ8onxM z41a4#8Rq$|*oOM63iN3Ul*HA#9lBqlUvIdISFc<-YDn=`;TXnX$X{L^xR2?d2iz*w zf_$%=#=>09n&KlTWP7LFyT;iJJXnc149_vS4{@rW-~U(Si(`4;;&A@fGkU&9xXv*q z&wFQrwQpHPprc>B-v0;G=OTaG_a`UsAK1bo)J0p*Cf1ov_6KM8{&9k}|5-h!^#>UZ zym!xo^1-OOgHggS#L5d>IAGEAE#Ap~`Moj@IJo|2%-Oft$c`DxP=Cstl@|;JXtOTN z{{P&4f1)<8=L60Hk^vPL-k{vRP_b$P-v}ISy1$H9)J+suzg7&qg>cuRKk9-2>(w`$ zV*7_Lh?##GP4$5qUSw2Nw5S7Gbw_pBe;xJ+@?O(Mv8o3RW|TSt0j~gOOu%;|WV65? z6p>@3=S8yjc|&Hwv;6}GUb(xJYbJ!y`MkEPe=6pyJh{ALiC(;V{1x89oI>x?x>-0Hlzy)7H3@YI>vXHX((#s0 zw+QO3>MFy>fzpXL--&>JIbYFd0G$={ha+aZ*H}_`Ah;z^s>HL&p0z{Wh(7@rI^;ui zcA4p1v1P&{=GHxfuhXFkYP=f?zSbtn%CI+cC3R(j6wr5e73u;y?Z$jUp(d-xE)p=2F3*$p8V=ZGK&m#K%v^ruTb8GWnjbW>{#_-jzk5^TW z*ehC?60k}cuD<&0{X;hz_6BULN`7M1rcW8-0z>Qi2IRi>tU7Rg-~o!0k`D#{=zOHQDxk7&3#|?7 zvT?v6fgdVgt=sXm~0kJj2ZFi<~F-h0v&E7j+|6AxaitH=2uU0&WK-rM^-q5qFt zoBRLhtj~MH$<14Upd_n-e)Mrx?0;m?J zBc(3Dr(YNgFDUVz5dqDL(Tc)95AMpg)UM^G{N43rf6V972gq;M&r{0kbKlsGz0KFH zs=3B~_5VJyk^I-g#s}BRgQ3)t1-^tF=W$5?-*N#GI#R6?zeLnmoY`d&YsdS!4k`m6J)O3#Zr@O3X`Ma zlsLqyWRpG2jdoUQQys4~M==F$LYfexR`6btGeX=Z)Ac68AK^PHLE)u%M`wP$@9lVR zzi`6)-o_a0XPh28QRt1s%ORvw&(IkM*VkJ(>=e|0J7U#49oiGn)3lGszntqpJ0Fb7 z2``K_TEIAsVDz3(^T9dV4LtebJmG~C?VLe=I7b7{yE3$zAV zQ9Nkd)%V`+VUT$d>! z{7(4!JyyU!%j(5D;K2Uc!Sl0&0lUdGO(FOJpUnq<^!L9Fe%Yk;(Tja3@VwFozYX>% z(U6^R|MAUdx8s*}5`Mir4g3?04StXLZ+m|yop5IL$oB7fX~jz?rtjUAPGpmG13o=q zndyZ^0DSbpnuXv%AHDeCmCz>XuJ8_`1uvaIRD9~El|k>`xf$R3p6jB)%a6BcW-z{E zxx4))&UJ+I1Z5Du$5Q@?aDIb7eRPj~l1;;ItVeR618+Y3W5;xYJ$l;1KOV%3v5I2r z27EX9@eUdReaKyd`Ei{s1a%y9B=Dpv>8&0+&R?Bx!C6DHE57a1&Wwy~gm3$Pxoi?^ zn4G}BOvLxK395#t6T~B72|OA$;WO=wPx$mH3GpQ{hDqE4Tb`cL*=65+vWLzBL>(9~ zk^IEk!eG~%;47InHhAf~i|l{o;&7M2OTxrW#UgwsRhr^d{k$+~8OG=$|JobFTpflw zlF_R9sDta|J4N)LPVaN`M#E1kI|%vIjIVlHEBvT~n$?>R;_S}BD3x!eT5V6QrW`_9ry zAYE#qc)kqzp{lVmfj?d_VQ&a2b~v;S7hvCj;9Oy@(}Y--YF0;>uS@i^J)v!nKb)BeL3&FhwPBDF3V+ld}1ooBQ;mm{T z`@@-PiO_H_(J$dWEm~$-iD&f@fuBknP){k_TjZ=|jm=i6pu0ne2*gM*FldZjft8=od}*>V3`lAZdg#6|oQVp{X#er~ zu^L6}-wr=w`*!j<1PS|Oc_(<1PzO;|N7o7zYvuz6NE{7JAq^D59+V2YC64AyDox%(^P<1`7wFqzt`o>Vr1=RJn)@ak z1SjAi8ltw|cQ@elt^w_-KaK09Ia<$+_6ft8Nb~sDMeV)oqCWoi)b~dFZ<8zk-RHEY zcl(e(pR)~h-9Kf?77awUFtb?c}>m$hWi<=WYz^lNh~lqsVbn;Y*8CNR!I}y z3w$Lh`H5^fLAhWVVB#0SfhVJ`R@uJIeNL0Tc!Sl} z9W}1vM2%T#HW-<0^`?*}#H~(9*O+^Zxj2z*Q9j|(eV!K<{Mbdm$D8-^ zc)nNYZ1%+kC?=3IzefQ6E?$}MSRys~)J3Xul9*}QAk3pXhC(fPDC2xeG#gs?)CK8d z$aQ*e$9vr8;Cm1F@<(~!V@+Cn;Z%K<0vWO7a4>2aMdZ)UrUg0GP1omKAUM+A_%&)u zs&UHO5v$jtjsx`zk(OD=+f~Qyd6+%Fs~{@wT-SmclYQCjbBPI7iVxK=<@t)JvSnzK zcsVg4k?7$0w}yVaEAmhThP*JoD@0tqX*-WeP;`j<2YlY0GHO+xQ-Ckzjh|MUxAWDT zUJy%Azw%j3v5@;J&h9;6Rsr*sJ?<9V5B{9<*$M4Y+n%uJ|P z3AXlQnKM4dT~hU5X4ciuxaL?g`}&NJ=Ui}1@BNvc@qGCl*4f+|n&4(OtZ%c_6wgB< z(btYj6R9_=&m5!u}mq$F28TiUvxr|`oeY@j@3B0(`^ZHgH5a;ts0+x3B}7yX{rLhHk)oz}O&;+Q@UYg@JH0k-*-1vNd3Bf|3#>tbqKXE?7l$r%y@eVO8U zZ3NYxHyjLn1$jt{FLvO(f4xJyAU4%AmWRhV4?1EyVbTn`#}DX^eN04la!ss_wtBv=u6BgurQFfr#N*oU0CI>Q%K) z^@2@vKGv^b)|!EVvGcLk%LNH$lJAg7W}MM5U#_Sddw~DR9L}{YH>ehRB2FRu!j!LX z%Hpy0xSOi}X+$)Z^H`2HocIWF5stVW@)vsr$8>uH=$qboZ3`NM^p{+I={1-I7-R22 zsZ4Yu_Hfc`E5XK^>zss`XDp}l1&vlOrmG0&IB28>~FncE8w|O>dN_3#fIDCyw zvua_B#hPo!JbR7cCHf#-(0VpSjpSjKU9Vt_gIBW43OT?H z^~XD$X$JlwljzJ9!8L=hRrtj+X3f8FfIq@#*`Ab9vn6*I&ba2qSMAzNIU`OkhJ7Z< z1CK~cxlH)ZG!+V8;~W#+eUm9#S!Tv2A#iI_J)LcWJoE0eoKeC0o{4An(y1SdvfL%T zGjN>x(%#*Wz2d1*3*uiSf9X7+!U(=z^oddwVKw58F-#C=eEd`$UUzL+)BRPAc7aAe zg2{`{Etn?TQ{?gDqH~Xnvupv?>7SMir~O`Zj^IAWL!Pe4F9Yn4$$A~Ls<1zJ7M7-z znM>faUNP61D?Y{9HKmN3OyR6k4#63uI*wS^_(kU&THIYHQ+vdM%orz*=+l%DeY0PK zXf&TgpP=YD&~Q`r@M|s*>_OR|O7NrW4&ZDo{apF!7GAq;!d~rK_xY)M$8_}b;$}|W zR0FjqyNK4ySY(^DZQwZ3#DbtJLEydP;JbRzLp}KIIPo8Nt{yxQkRs&==FHD&D9SF} z>Wg__na_e)4~_L*pBwUHOmfl;edGM3Y}Ab)c46Rl^;DF;dOqniNovQE!UKlz07<=V zS5gvBN|vTEw|;a$lfrFhvUqj<^Jew7=D&RYXOf@gYA2%@cR#+Uek3fzsiePZWfhzB zEHYwQJ!*vO!wY^VKyDxs#oB7(6ut?u_x|6NH1y?$2#7poBlG6Mez}M~tIcemDL-A# zUF@uNXj9sQjTAqUys-i5FWX1DJ5myG_Ga&ibK?_E;&8J2X;IsRc;AQqI3IjyWm$XS zfugM=j`;ej6zL(>v=s4O>8C2CCDh6h8$8*HG0^WM#`zF2g(m5zUJ_(2q5DCb>jm|; zqOC5qMa-FBG@o#cAnvEg6wfCd(-{^2bN@4i%jeYFfV*5lD%v{i$l&*mB@S(Au&bwP zeshcgyfvR!n<5Jv*3RdaxTx_=_55Gdhu6w-cMW`%s%Hyd4qXl}M!0YY9pkDzG#;V56i0)6R9dtYVw`%V=1RwT7eP_%T{ce6hzgv@&atgov31YnH zpR^!iR`hV(sbEwB5D;pg5!~>$WjMnPSxHG`E0TYKOKpbVXF&mSTCq{(hjEk5i z9|aB$o2%fTquwNp`r|!2Fwux-qr)9bdb;|94$83eZT&%GDD2U84&y}UHLk5R9X01n zQ!mHa3W`ljReTb}x2=ufb4<^!-ZG3?E%&lG{b&)qAX(*%kG=LW<@cAHEu3tuM>kFa zuJ;OAkmK<29%G8o4V>4vuwuU4TwRZ^6NI?dM^V--zV|7W4Hk=Th7?q zgy;3t4*2oTpIT`FK9m5mQ>ZQJhul}mJ`y@eHc6q(E!o5-;4v0>JV-VWjg1Cvgh#@s z<$eX{fT}gXjjBKB0o?aulZk%%M-BDj(u>D-;4!oq=g)k2r2aQ?Y5OJ~$^Q4_(v)%G zIPFQ}+MH<>z$M|3L4FV(35TeYj@1V<>x#tcagS@2AGC(dqV$#d3;*IogPGWhu}W zwLLJW(ArV_4-|EKyhJlunTr?fzXgWGl|-z$(DdqI!pb6pc*DQl`hG0?RblB zFZ?m39&NpJ0iRM81M3z!ME=X)RH@`gO9a5lJNq13&B{)TTt1}|xDU19`&Y7N~h@{f!ns?lSM<@D~r0O3Nb&bH1G8yQfc-Z_MH$7$W@f-f}N3uIR9 zsW^62Bfq*`IM?gY_O6)%xv#j%dt^2d;BV+?cp_;f?!*c(I7Lal{^i#V38EDE%2Ruz zj7*)Vk+k=yFzm8ZHxDfECdLCh}wm;Fn;!uDTfX(z!Z0aG~?*wN*>g-XP(gNJM z)O+i5m6wG0iGL8nt$z^4r^S`x_Em?g?LP%meqs}BzWUh_?VC_0*A5&<#=_lR+oXltKfE#~btn*RvP$Jta_`@!$yow!>SxqG5-%->=5=E!u! zyYbdPfCpmh<%|(-@GAC-&dZV6Y0`guRTrtIT|hks=n~*`l6<>Gn|p82CZd!rSC6aK z**gBGv`KWSFs1w3XtPxps|myXPHCRWns2Ai4L9kt^?}3C7qHtvqn#R}&}=%OaKT5P zt^lG-FMW1#(C6k?R;Nb=5Pe30*NH|?2U!^IedDI`(YVXiqNcoz^x$BcTq^{x6Q#lQ zNcuh1GafWKo_P5NP5S6j`Wp0T9Yl}txqbBbARnK$uf!2s{Ss*Xf{!N0d1=xXky#TD zemhZCDEOix9CyR1SagblO|W?Sy`!I;QhbG@gbyIqjy#X z5ba(0IaOrjoGjet_ij;Nx8BwBdmbM*f&al&^vYAPFOmI=m?Qbyf_{6xEpO{xFaEoN zJ1ie}bKN3RaG<1SECNEKNEvqt1{jN)6dBzikU zHmq9wChnK4Q>|XCXXR0U(Df}E=iL1E`&HyW^>s%nd9TLHl~Lhwh;PK%Ww_qgUKxkn zdRYIHVrF{Of$YAJw;4V z$!DeO1DmB>QNf)c1r3EsNe=B(b&vDHZ1s9|6;RgG5d4=-{Ypr{x!`wCAZFIoqIvt^?{zMtwLw;yWW~0b&=0LMY{ln z^2ewJXwemaYmDW}#K=a-G7-KP&)M4ediRw-iRZ82=fb3IhpcjUP5&f8wNGSRx$8&mSEJq)}s)JveIjH#fPR4h=> z9mVHj8}2FE>OdVQYOO@qrtLyH>=TOXQ2SccG1Uf^p{~<@5gupx%(YLfWFzH^B)_9k zYX*4>ea4vURSWLxiLQ}iQA4V*j==Rrulsc3TA}E5-TF+xnkw%NkOXG6VSQ5g)klEm zIQUL+)J{qm&|Slwn)Ea7XhD2NLVjM$Cl8qfG%`OzKA9jT78$&LaW_(;&I6u)U2A+%vr*RK4`K&KC_k(Z5=16`Q z$a0T8;;D}#0t*WRPdc=em&-!DFT1M|TMJ|{Pe-b5sBHKf^mV1`P)dRli0WbZ;JylC z_}>?yr@!-aWy}}nn@1(L!`^!m`ZXyFwq|(2ZlQ60MC6mq(@l4Sv>GMW_jVWJd)09( z5-nj-53*+54|389`azuBZNRO~s}?((ED5+KxBBWVi$)A74UqF|nb@J|;q*@pW8hq~qRdf4_~L=#+= zIHvN}2mM+KqxpN^k;YFmZWeRT%c{J0M)UsHWS;k$5vBXs%h{qVWeL_u)Ehqy--_;i z8G|wK45kas2LE*{(t(KIO*865z=F*mAFKuyS391Y#%1~9eB>nOS+yTTZ))6Edol2< z-}4Zx$vN)tk0mgdQ|TtXL}LwkQEM<^VR^s13n~@j3bn=nsF#QV+if zIZRcPdbvYeWtI-Xe>{}t*-}q*m3K^bV;sbnWY2!SV;t2jfxiU!!r!Q0%89%Y|6QK| z|EgHs8tMv2s!G~cyFMOB_FTx4wWu9*OgCkG(lkGDe?4kP0#ZFO^;IcJGVQM^-{@Ys zF!DOl2Jr;V@8>fWn+^O>M7Z&ZY5y;Kp_Stw0-T9ld{bF1r2wf z!}-JMeYqCM0<9Wy;=`-(G1~L>e5yVf_Y))jhdA7PzTX7<$BKI+R`}wq-?v^>qB!NK zkvsdyfV$Ag-~|@sb5Ek))7lDK$r0NVAR|67&G>ITaGN7G6EuVvJKc}!&J<(at)bR> zqA$F!PxZXvm2K3|WRInJaB8+E0BgnlsMSI^{x(M7wuY$;AG-gK#=-3(ZrC3lygx|e zp$%fPR^U&2gvO(F(8rW7d*P=uzkt_9aHH0s$s#F7T7x9iZc9gKpJlt%N^?##j=cfz z2J80`{Hw&eapebJ5s=$@ z@O01=RypP{hYZ0TK?!kv&XBmq5|Y_iAM8iuv>45Oi|)qzWVN<4R*_lvuIrt#jRk|V z7vqETT$1W)&{S5yJ!jzW;Vz(`%BPI-xYp3x@m1&LKQvAVd|M#7f%6Yl;YBAK*!$`8 zMit-lsL@cJ7Zf}sujf(ls4;3NTk0vl_eUncJ`VK+doID}L@ee1*JKp$&FP4XOc|WtE|074uG9)$6Ysl4mBzkbR*+H>LbC?o5;=YBRm7`sjX|=wx*v6w4-^Z%_m!CY{&Pp%ya!w&{2kS*#iLo?c^Nui z+uYU=!f?;m_)#RAAhVn}qyt|@Idjs?6qz+^$pjDr@v!d+szixT`ZZx&F+n3u196qDBK;EL=%Hhigo+vUm^b zi~4`1ZE8E~7W$4%;r*v%_p0==c$a!dxL{k$PK}k~FKLY7`-ar!rZ16s`XY(<%?jq8 zZ2J2*@1>CbaH*%I`1XQ!`3USXkrvDUsX?cTcXlKe#*toU)liV6( z6XGFC+@6KIoWq$LwMwe1#=)fv|P@Zj_pDH2>6W7 zsGu%#iT`hB(0lk8_|W;9>PK#CGt2k)HK+|5v;{4&MNogDfmYpC#DD3$nU>RYKM)a}*J=VKWCoHfzX|3MR21J^t&R!A3Op* zf5?(s;k^er^}LSNOiPlnZrlT?_1e*=Lz&a6D4uszj^gLQw`6~G#$n8K=DnjmQv_s6 zx`%#;UdP`R*)KqkGx)3OPS|twoIW`dcd27y8LIGewss$wmE84uPT|bL58qz>jwGKM z<~m1qdCq#oI77E1zflc7kpg7pJXc_w!j=?a?}kZB$mfdn`m$8xPveg( zw1`m}hSKHZXV4nT>~QM|BPH7)f74F$+rZIom3wzKYt`d43FE6I*rCH)0wTi?S#8LPuX5S z-Aw%?@S#Nv%TT$g9S46>TYnpZp`jUn4~-co<1Q^u{1+v;gfD+m7eKg1-6`-+_wBFY z=kbMFE1Yk_ov$l)za16qG6idx{In=p(H1oZ#U_$JUz!sFC@MnqWeIk9>5;}E{Ly>* zKhvOPnfYsl=wZIQBZeZ<=zGsPP(5Px(qcNUb z-uuQb5Q29ghl#&wgm`lRh}Ef$}sU-_fCvqBEe8;dx{MwMp4w3XB5ZVqv4kiRG|b`Lpz>HW16_)!bu$Mmu+rdekTYcPpz%;^=+1iL?`&;gS!e9jNmnnu zCr)`%%NLApn3(iI(g?Bvh8nXj63?aS?q~C`=X2S}u;t?R@OiWFvtkVUiMTy1o^KDE zHx|F!jA8NS?O_x6c1w$H8mINqXD)Jm?`J+yZ!laLXBffU<%^7bQPBcT`J!SzTI24h zsN2kO9#eL0EN7Wum-_N3l5f4wIGg(Z;&kV|qhW8u%Fi3h8{cxoUimp>k=J%0=v`;#O>(li_=;mIMzk_(ZAOADlM~rYLvr#U$O)Hf`o)>tA zNTs%ElGnbt(fk|Vym#iJwi0@P9iEU7kaB4t@K+kAF!OT55hka1QT+Vf``CzG>Rrfr z)$*jI_p=R;e)PJxek@SwOvOFpRBr~KOqyV=6(VO|8CCfDu=UiA-ODZ-;S)mM{Zq0E zP2lZj&|h=)2>gu^-GgqN$zk!($Ab;NLAS=+KQ^SKQxk8W2P(QKCK?{$0@LLra;EWxzsxm zSGe`L;U6ZQ9wo7v3!d0^ido{4KUlW5;qAzo#qsH%l06q3I}qBM8pT^%Yk1xp6a(99 zg3iI;VYINKmB^iYbY|3bnNcTTruvoY5sX*k{#+P<9{A>XznSvVr~%Nzc8G>g_c*t82GVq z*P*ISWgj35adFlN+eNC67HpFlodG_;UE(dix`jW@gfbRoC=e?8?p0z!SOKc8oV5$9 z^f(9eH?-=Ava!GIfg#_&y;#}(2#bd=6n{z={z19g^#SSxGqGAs>AfD2Gn<<9`eZ9M2l!>8VE(aR)zc`FI-my-ovEWzdY3Sz@{*RpT{_+ zKi37_EMw2c7~-#^3K{oLeXKr`qR9^|S^-W$_B@B~Rk-na4Dt#5y&mHRsLZkO6WRY) z+||c6bzSlM-gDyQ703%QC`=$Ofq)ZCXWW8Yl%ntzx>n0tJ~HSiu^qqKDT!^_PT~aC zISGSIN*kxRD3Kz~Y03yyBv1#fHce{+Yg(plicmE%gtUAOfjR-*Mhndb`<>?xl8n{W z{@GESckeytp8N6MJ@4Fm&wVF@b)3@Hu|l@FBKVIkYhE;>IwFH1+rZF@);Sj_Ya|o$ z5yFe!S=)~d57PQ!EOW6)Jg8oKc)yUP%*%iZHhM1`cNRS1-=_n87$@j^}VAoX6qMZ}%(Vf^`eDQ9h@&eW$ zc3_`u$Au-i+8klL-5|Q2TZwPTMyQ-gdSBp9|5r%JuL@^DzvzSwMwx5)oX2$8^UG7$ zf3GvzZmOI6JIUwlm8D9H^2vh^==v?t?T^6D_>Upm>z!4Y8(hvYlYNswd-i1CM0|h~ zNdJMoh;X$2$dAdkE@}IbtPG~kW5xGB)Z^TZ{%_eI>Wg13(BnHcdg#_7e4}LJCmU^d z=sORB@u>n))1~Z3pu=T}#?gOj>BK9gzm3#uzEvIA8FJX$a4uCRCeJA(%bb0s@$+T- zw%O2{h3Q+*J9GdXQiKE>|Muvsvs2U!F;=? zAhr7`VYP_mlgb0Ys)0b zm&?!mXS2l*25%zK&9 zkdOQi@yTK(ERJ^@B= zDc^J$etG+Ic~|c*9o|?z4|>sKY;~UGuUIHqD&~t8%&&;nk>_SjBl%KOtV_JR0N*>f zeH`O6s_~+4a!~)=RTb%;EsA}lpNfq!+{?r}@KNvV>E%2v|mm8%WYY1sFkms$BH{0qAMYQQ~%S3JpENCk;>OTZjqU<+T&tAug4`bw_4+4TfDWkc8@c#fxEmW{;((GVf3C! ze%KfjPlVBpsj$`-lRT}H(QH$q9m!5N$%$H+IMslfQ61p!MyKEFb9+51cX;i7CwJQe zJW%IT{k+QCz#9~dW-fYYV{BSN@HbZ5#=B#+V|~!Gmw;c=n$3Ozz;nFLwFF#v+imj zX!i%vmAuX;rv{2fOaF`nsaPt|rp$z`DM+Mm(m;3w#0fTQAU*;J&r=ooKLyNdEl)hT zZoT!Xr|pg^r~1mn_@>QUwr(q9g<8et%B3HWOxb;Bn{i+&E>nD4n{%DOLaeZS$d_ab zD86`BoFkr;R)k-HIG=*O1%TqlS@FMuw}v1BfL8X^{f|MMY4Yqg@X)YW=?CH58vK$p zwtqbDxSF70&w>Y}ZSmki2K|9yJqfA-QO<-f%q6V&OjU~J+WHbrA5T(g_-Gn@Bn`gQ z@%8+D_OHj+q`|w>;GJpkwlw&GH0|q8gMTLtK9&aG^}plk*?BkTE&cU>!MGFQ@R#A- Fe* Date: Tue, 28 Mar 2017 12:49:14 -0400 Subject: [PATCH 255/267] PASSIVE SONAR: move pinger finder node to mil_passive_sonar It was previously in the great_merge_purgatory of mil_vision --- .../mil_passive_sonar}/nodes/pinger_finder.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {perception/mil_vision/great_merge_purgatory => drivers/mil_passive_sonar}/nodes/pinger_finder.py (100%) diff --git a/perception/mil_vision/great_merge_purgatory/nodes/pinger_finder.py b/drivers/mil_passive_sonar/nodes/pinger_finder.py similarity index 100% rename from perception/mil_vision/great_merge_purgatory/nodes/pinger_finder.py rename to drivers/mil_passive_sonar/nodes/pinger_finder.py From af635016c03ce9dfc7fed8aa528686ea3dcf36ff Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Sat, 8 Apr 2017 17:25:21 -0400 Subject: [PATCH 256/267] TOOLS: add script to rename topics/frames in bags --- utils/mil_tools/mil_ros_tools/fix_bag.py | 78 ++++++++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100755 utils/mil_tools/mil_ros_tools/fix_bag.py diff --git a/utils/mil_tools/mil_ros_tools/fix_bag.py b/utils/mil_tools/mil_ros_tools/fix_bag.py new file mode 100755 index 00000000..2fafdbe1 --- /dev/null +++ b/utils/mil_tools/mil_ros_tools/fix_bag.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +import argparse +import rosbag +import rospy +from sensor_msgs.msg import CameraInfo +from roslib.message import get_message_class + +class BagFixer(): + ''' + Dictionary of topics to remap. If ends in /, remaps everything after + Otherwise, topic must match exactly + ''' + def fix_topic(self, topic): + for k, t in self.topic_map.iteritems(): + if topic.find(k) == 0: + return t+topic[len(k):] + return topic + + def fix_tf(self, tf): + for i, t in enumerate(tf.transforms): + tf.transforms[i].header.frame_id = self.fix_frame(tf.transforms[i].header.frame_id) + tf.transforms[i].child_frame_id = self.fix_frame(tf.transforms[i].child_frame_id) + return tf + + def fix_frame(self, frame): + fixed_frame = self.frame_map.get(frame) + return fixed_frame if fixed_frame != None else frame + + def fix_bag(self, infile, outfile=None): + if outfile == None: + split = infile.rsplit('.bag', 1) + outfile = split[0]+'_fixed.bag' + out = rosbag.Bag(outfile, 'w') + bag = rosbag.Bag(infile) + for topic, msg, time in bag.read_messages(): + topic = self.fix_topic(topic) + if hasattr(msg, 'header') and msg.header._type == 'std_msgs/Header': + msg.header.frame_id = self.fix_frame(msg.header.frame_id) + if msg._type == 'tf2_msgs/TFMessage' or msg._type == 'tf/tfMessage': + msg = self.fix_tf(msg) + out.write(topic, msg, time) + out.flush() + + def fix_strings(self, strings): + if type(strings) == dict: + return strings + assert type(strings) == list + d = {} + for s in strings: + split = s.split(':') + assert len(split) == 2 + d[split[0]] = split[1] + return d + + def __init__(self, topic_map={}, frame_map={}): + self.topic_map = self.fix_strings(topic_map) + self.frame_map = self.fix_strings(frame_map) + print self.topic_map + print self.frame_map + #self.fix_bag() + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='Fix bag topics/frame_ids') + parser.add_argument('--in', '-i', dest='infile', type=str, required=True, + help='Bag to read and fix') + parser.add_argument('--out', '-o', type=str, dest='outfile', + help='Bag to output with fixed content, defaults to INPUTFILENAME_fixed.bag') + parser.add_argument('--topics',required=False, nargs='+', type=str, metavar='oldtopic:newtopic', + help="list of topics to remap, seperated by a colon, ex /down_cam/:/camera/down/ /my_odom:/odom\ + If ends in a slash (ex: /cameras/:/cams/, all topics after slash will be remaped") + parser.add_argument('--frames',required=False, nargs='+', type=str, metavar='oldframe:newframe', + help="list of frame_ids in headers to be maped, ex: my_camera:cam_front") + args = parser.parse_args() + fixer = BagFixer(args.topics, args.frames) + fixer.fix_bag(args.infile, args.outfile) + #fixer = BagFixer(args.inbag, args.outbag) + + From acc6c81be13d1572cf37fdcb91aed17536a3c547 Mon Sep 17 00:00:00 2001 From: David Soto Date: Mon, 10 Apr 2017 13:15:59 -0400 Subject: [PATCH 257/267] GREAT MERGE: make DepthStamped.msg (mil_msgs) This is new mesage should be used by the depth driver instead of Float64Stamped --- utils/mil_msgs/msg/DepthStamped.msg | 4 ++++ utils/mil_msgs/msg/Float64Stamped.msg | 2 -- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 utils/mil_msgs/msg/DepthStamped.msg delete mode 100644 utils/mil_msgs/msg/Float64Stamped.msg diff --git a/utils/mil_msgs/msg/DepthStamped.msg b/utils/mil_msgs/msg/DepthStamped.msg new file mode 100644 index 00000000..87158d40 --- /dev/null +++ b/utils/mil_msgs/msg/DepthStamped.msg @@ -0,0 +1,4 @@ +Header header + +# A depth in meters +float64 depth diff --git a/utils/mil_msgs/msg/Float64Stamped.msg b/utils/mil_msgs/msg/Float64Stamped.msg deleted file mode 100644 index 5c2f1136..00000000 --- a/utils/mil_msgs/msg/Float64Stamped.msg +++ /dev/null @@ -1,2 +0,0 @@ -Header header -float64 data From f9bf045bb57d901a0ba21f3ed944035a8a8fec2e Mon Sep 17 00:00:00 2001 From: David Soto Date: Mon, 10 Apr 2017 13:35:44 -0400 Subject: [PATCH 258/267] GREAT MERGE: deprecate Float64Stamped.msg Replaced with RangeStamped. DVL related cold that previously used Float64Stamped should be transitioned to use RangeStamped instead. Other depth driver related code should be changed to use DepthStamped. The only usage of Float64Stamped that is not DVL or depth or depth driver related is in the start gate mission and should be removed (@RustyBamboo). --- utils/mil_msgs/msg/RangeStamped.msg | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 utils/mil_msgs/msg/RangeStamped.msg diff --git a/utils/mil_msgs/msg/RangeStamped.msg b/utils/mil_msgs/msg/RangeStamped.msg new file mode 100644 index 00000000..a9381076 --- /dev/null +++ b/utils/mil_msgs/msg/RangeStamped.msg @@ -0,0 +1,4 @@ +Header header + +# A range in meters +float64 range From d7972e24f8c8e13766b6f666836617f827926412 Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 2 Apr 2017 01:24:37 -0400 Subject: [PATCH 259/267] VISION: integrate camera_lidar_transformer Node used to be in the great_merge_transformer subdirectory. All other nodes were considered not generic and removed from the repo. --- perception/mil_vision/CMakeLists.txt | 9 + .../great_merge_purgatory/README.txt | 5 - .../nodes/camera_to_lidar/main.cpp | 9 - .../nodes/database_color.py | 533 ------------------ .../nodes/object_classifier.py | 75 --- .../nodes/shooter_finder.py | 264 --------- .../camera_lidar_transformer.hpp} | 0 .../camera_lidar_transformer.cpp} | 41 +- 8 files changed, 36 insertions(+), 900 deletions(-) delete mode 100644 perception/mil_vision/great_merge_purgatory/README.txt delete mode 100644 perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/main.cpp delete mode 100755 perception/mil_vision/great_merge_purgatory/nodes/database_color.py delete mode 100755 perception/mil_vision/great_merge_purgatory/nodes/object_classifier.py delete mode 100755 perception/mil_vision/great_merge_purgatory/nodes/shooter_finder.py rename perception/mil_vision/{great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.hpp => include/camera_lidar_transformer.hpp} (100%) rename perception/mil_vision/{great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.cpp => src/camera_lidar_transformer.cpp} (89%) diff --git a/perception/mil_vision/CMakeLists.txt b/perception/mil_vision/CMakeLists.txt index dda345ad..7ce15ebb 100644 --- a/perception/mil_vision/CMakeLists.txt +++ b/perception/mil_vision/CMakeLists.txt @@ -95,3 +95,12 @@ target_link_libraries( ${OpenCV_INCLUDE_DIRS} ) +add_executable(camera_lidar_transformer src/camera_lidar_transformer.cpp) +add_dependencies(camera_lidar_transformer mil_vision_lib ${catkin_EXPORTED_TARGETS}) +target_link_libraries( + camera_lidar_transformer + mil_vision_lib + ${catkin_LIBRARIES} + ${Boost_LIBRARIES} + ${OpenCV_INCLUDE_DIRS} +) diff --git a/perception/mil_vision/great_merge_purgatory/README.txt b/perception/mil_vision/great_merge_purgatory/README.txt deleted file mode 100644 index 48fe2616..00000000 --- a/perception/mil_vision/great_merge_purgatory/README.txt +++ /dev/null @@ -1,5 +0,0 @@ -Some of these files can be made generic and therefore be able to stay as a part of mil_common. Until that is done, they will remain in this folder. -Anything that remains in the purgatory after the end of the month will be removed from this repo. - -- David Soto - 3/17/16 diff --git a/perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/main.cpp b/perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/main.cpp deleted file mode 100644 index ff70110c..00000000 --- a/perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/main.cpp +++ /dev/null @@ -1,9 +0,0 @@ -#include -#include "CameraLidarTransformer.hpp" - -int main (int argc, char** argv) -{ - ros::init (argc, argv, "camera_lidar_transformer"); - CameraLidarTransformer transformer; - ros::spin (); -} diff --git a/perception/mil_vision/great_merge_purgatory/nodes/database_color.py b/perception/mil_vision/great_merge_purgatory/nodes/database_color.py deleted file mode 100755 index 7052a20a..00000000 --- a/perception/mil_vision/great_merge_purgatory/nodes/database_color.py +++ /dev/null @@ -1,533 +0,0 @@ -#!/usr/bin/env python -from __future__ import division - -import cv2 -import numpy as np -from collections import deque -from copy import deepcopy - -import rospy -import tf -import tf.transformations as trns - -import sensor_msgs.point_cloud2 as pc2 -from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest, ColorRequest, ColorRequestResponse -from navigator_msgs.msg import ColoramaDebug -from sensor_msgs.msg import CameraInfo, Image -from nav_msgs.msg import Odometry - -from cv_bridge import CvBridge, CvBridgeError -from image_geometry import PinholeCameraModel - -import navigator_tools -from navigator_tools import fprint as _fprint - - -camera_root = "/stereo/right" # /camera_root/root - -ros_t = lambda t: rospy.Duration(t) -fprint = lambda *args, **kwargs: _fprint(title="COLORAMA", time="", *args, **kwargs) -p2np = navigator_tools.point_to_numpy - -class ImageHolder(object): - @classmethod - def from_msg(cls, msg): - time = msg.header.stamp - fprint("Got image! {}".format(time.to_sec())) - try: - image = CvBridge().imgmsg_to_cv2(msg) - return cls(image, time) - except CvBridgeError, e: - # Intentionally absorb CvBridge Errors - rospy.logerr(e) - - return cls() - - def __init__(self, image=None, time=None): - self.image = image - self.time = time - - def __repr__(self): - if self.contains_valid_image: - return "Image from t={}".format(self.time.to_sec()) - return "Invalid Image" - - @property - def contains_valid_image(self): - return self.image is not None and self.time is not None - - -class ImageHistory(object): - def __init__(self, image_topic, history=30): - assert history > 1 - - self._history_length = history - self._images = [] - rospy.Subscriber(image_topic, Image, self._add_image) - - @property - def newest_image(self): - if len(self._images) == 0: - return ImageHolder() - return max(self._images, key=lambda image: image.time) - - def _add_image(self, msg): - image = ImageHolder.from_msg(msg) - self._push(image) - - def _push(self, data): - ''' - Push to history list and only keep the specified length of history. - This enforces that all images are valid. - ''' - if data.contains_valid_image: - self._images.append(data) - self._images = self._images[-self._history_length:] - - def get_around_time(self, time, margin=.5, timeout=.5): - ''' - Returns the image that is closest to the specified time. - Returns `None` if closest image is more than `margin` seconds away. - ''' - start_time = rospy.Time.now() - while rospy.Time.now() - start_time <= ros_t(timeout): - if len(self._images) == 0: - continue - - closest = min(self._images, key=lambda image: abs(image.time - time)) - if abs(closest.time - time) > ros_t(margin): - continue # Return invalid image - - return closest - - return ImageHolder() - -class DebugImage(object): - def __init__(self, topic, encoding="bgr8", queue_size=1, prd=1): - self.bridge = CvBridge() - self.encoding = encoding - self.im_pub = rospy.Publisher(topic, Image, queue_size=queue_size) - self.image = None - rospy.Timer(ros_t(prd), self.publish) - - def publish(self, *args): - if self.image is not None: - try: - image_message = self.bridge.cv2_to_imgmsg(self.image, self.encoding) - self.im_pub.publish(image_message) - except CvBridgeError, e: - # Intentionally absorb CvBridge Errors - rospy.logerr(e) - - -class Observation(object): - history_length = 100 # Default to 100 - - @classmethod - def as_message(self): - msg = ColoramaDebug() - msg.num_observations = len(self.hues) - - msg.mean_value = np.mean(self.values) - msg.hues = self.hues - - def __init__(self): - self.hues = deque([], maxlen=self.history_length) - self.values = deque([], maxlen=self.history_length) - self.q_errs = deque([], maxlen=self.history_length) - self.dists = deque([], maxlen=self.history_length) - - def __len__(self): - return len(self.hues) - - def __iadd__(self, (hue, value, dist, q_diff)): - """ - Add observations like: - >>> co += [0.65, 0.4, ..., 0.1] - """ - self.hues.append(hue) - self.values.append(value) - self.dists.append(dist) - self.q_errs.append(q_diff) - - return self - - def __repr__(self): - _str = 'hues: {}\nvalues: {}\ndists: {}\nq_errs: {}' - return _str.format(*np.round(map(np.array, [self.hues, self.values, self.dists, self.q_errs]), 3)) - - def extend(self, (hues, values, dists, q_diffs)): - """Add lists of data in at once""" - self.hues.extend(hues) - self.values.extend(values) - self.dists.extend(dists) - self.q_errs.extend(q_diffs) - - def compute_confidence(self, (value_w, dist_w, q_diff_w), get_conf=False, **kwargs): - """Don't try to compute weights with bad data (too few samples)""" - - # Get some value information - v_u = kwargs.get('v_u', 230) - v_sig = kwargs.get('v_sig', 30) - dist_sig = kwargs.get('dist_sig', 70) - q_sig = kwargs.get('q_sig', 1.3) - - # Compute data required before applying weights - guass = self._guass - value_errs = guass(self.values, v_u, v_sig) - dists = guass(self.dists, 5, dist_sig) - q_diffs = guass(self.q_errs, 0, q_sig) - - # Normalize weights - w_norm = value_w + dist_w + q_diff_w - value_w /= w_norm - dist_w /= w_norm - q_diff_w /= w_norm - - #print "1", hue_std_w, np.round(hue_std, 2), hue_std_w * hue_std - #print "2", value_w, np.round(value_errs, 2), value_w * value_errs - #print "3", dist_w, np.round(dists, 2), dist_w * dists - #print "4", q_diff_w, np.round(q_diffs, 2), q_diff_w * q_diffs - - # Compute normalized confidence - c = value_w * value_errs + dist_w * dists + q_diff_w * q_diffs - if get_conf: - return c, [value_errs, dists, q_diffs] - - return c - - def _guass(self, data, mean, sig): - return np.exp(-np.power(np.array(data).astype(np.float64) - mean, 2) / (2 * np.power(sig, 2))) - - -class Colorama(object): - def __init__(self): - info_topic = camera_root + "/camera_info" - image_topic = camera_root + "/image_rect_color" - - self.tf_listener = tf.TransformListener() - self.status_pub = rospy.Publisher("/database_color_status", ColoramaDebug, queue_size=1) - - self.odom = None - set_odom = lambda msg: setattr(self, "odom", navigator_tools.pose_to_numpy(msg.pose.pose)) - rospy.Subscriber("/odom", Odometry, set_odom) - fprint("Waiting for odom...") - while self.odom is None and not rospy.is_shutdown(): - rospy.sleep(1) - fprint("Odom found!", msg_color='green') - - db_request = rospy.ServiceProxy("/database/requests", ObjectDBQuery) - self.make_request = lambda **kwargs: db_request(ObjectDBQueryRequest(**kwargs)) - - self.image_history = ImageHistory(image_topic) - - # Wait for camera info, and exit if not found - fprint("Waiting for camera info on: '{}'".format(info_topic)) - while not rospy.is_shutdown(): - try: - camera_info_msg = rospy.wait_for_message(info_topic, CameraInfo, timeout=3) - except rospy.exceptions.ROSException: - rospy.sleep(1) - continue - break - - fprint("Camera info found!", msg_color="green") - self.camera_model = PinholeCameraModel() - self.camera_model.fromCameraInfo(camera_info_msg) - - self.debug = DebugImage("/colorama/debug", prd=.5) - - # These are color mappings from Hue values [0, 360] - self.color_map = {'red': np.radians(0), 'yellow': np.radians(60), - 'green': np.radians(120), 'blue': np.radians(200)} - - # RGB map for database setting - self.database_color_map = {'red': (255, 0, 0), 'yellow': (255, 255, 0), 'green': (0, 255, 0), 'blue': (0, 0, 255)} - - # ========= Some tunable parameters =================================== - self.update_time = 1 # s - - # Observation parameters - self.saturation_reject = 20 # Reject color obs with below this saturation - self.value_reject = 50 # Reject color obs with below this value - self.height_remove = 0.4 # Remove points that are this percent down on the object (%) - # 1 keeps all, .4 removes the bottom 40% - # Update parameters - self.history_length = 100 # How many of each color to keep - self.min_obs = 5 # Need atleast this many observations before making a determination - self.conf_reject = .5 # When to reject an observation based on it's confidence - - # Confidence weights - self.v_factor = 0.6 # Favor value values close to the nominal value - self.v_u = 200 # Mean of norm for variance error - self.v_sig = 60 # Sigma of norm for variance error - self.dist_factor = 0.3 # Favor being closer to the totem - self.dist_sig = 30 # Sigma of distance (m) - self.q_factor = 0 # Favor not looking into the sun - self.q_sig = 1.2 # Sigma of norm for quaternion error (rads) - - # Maps id => observations - self.colored = {} - - rospy.Timer(ros_t(self.update_time), self.do_observe) - - def _compute_average_angle(self, angles, weights): - """ - Returns weighted average of angles. - Tends to break down with too many angles 180 degrees of each other - try not to do that. - """ - angles = np.array(angles) - if np.linalg.norm(weights) == 0: - return None - - w = weights / np.linalg.norm(weights) - s = np.sum(w * np.sin(angles)) - c = np.sum(w * np.cos(angles)) - avg_angle = np.arctan2(s, c) - return avg_angle - - def _get_quaternion_error(self, q, target_q): - """ - Returns an angluar differnce between q and target_q in radians - """ - dq = trns.quaternion_multiply(np.array(target_q), trns.quaternion_inverse(np.array(q))) - return 2 * np.arccos(dq[3]) - - def _get_closest_color(self, hue_angle): - """ - Returns a pair of the most likely color and the radian error associated with that prediction - `hue_angle` := The radian value associated with hue [0, 2*pi] - Colors are found from `self.color_map` - """ - c = np.cos(hue_angle) - s = np.sin(hue_angle) - error = np.inf - likely_color = 'undef' - for color, h_val in self.color_map.iteritems(): - cg = np.cos(h_val) - sg = np.sin(h_val) - # We need a signed error for some math later on so no absolute value - this_error = np.arctan2(sg*c - cg*s, cg*c + sg*s) - if np.abs(this_error) < np.abs(error): - error = this_error - likely_color = color - - fprint("Likely color: {} with an hue error of {} rads.".format(likely_color, np.round(error, 3))) - return [likely_color, error] - - def do_estimate(self, totem_id): - """Calculates best color estimate for a given totem id""" - fprint("DOING ESTIMATE", msg_color='blue') - if totem_id not in self.colored: - fprint("Totem ID {} not found!".format(totem_id), msg_color='red') - return None - - t_color = self.colored[totem_id] - - if len(t_color) < self.min_obs: - fprint("Only {} observations. {} required.".format(len(t_color), self.min_obs), msg_color='red') - return None - - kwargs = {'v_u': self.v_u, 'v_sig': self.v_sig, 'dist_sig': self.dist_sig, - 'q_factor': self.q_factor, 'q_sig': self.q_sig} - - w, weights = t_color.compute_confidence([self.v_factor, self.dist_factor, self.q_factor], True, **kwargs) - fprint("CONF: {}".format(w)) - if np.mean(w) < self.conf_reject: - return None - - hue_angles = np.radians(np.array(t_color.hues) * 2) - angle = self._compute_average_angle(hue_angles, w) - color = self._get_closest_color(angle) - - msg = t_color.as_message - msg.id = totem_id - msg.confidence = w - msg.labels = ["value_errs", "dists", "q_diffs"] - msg.weights = weights - msg.color = colors[0] - msg.est_hues = angle * 2 - msg.hues = np.array(t_color.hues) * 2 - self.status_pub.publish(msg) - - fprint("Color: {}".format(color[0])) - return color - - def got_request(self, req): - # Threading blah blah something unsafe - colored_ids = [_id for _id, color_err in self.colored.iteritems() if self.valid_color(_id) == req.color] - - fprint("Colored IDs: {}".format(colored_ids), msg_color='blue') - print '\n' * 50 - if len(colored_ids) == 0: - return ColorRequestResponse(found=False) - - return ColorRequestResponse(found=True, ids=colored_ids) - - def do_observe(self, *args): - resp = self.make_request(name='totem') - - # If atleast one totem was found start observing - if resp.found: - # Time of the databse request - time_of_marker = resp.objects[0].header.stamp# - ros_t(1) - fprint("Looking for image at {}".format(time_of_marker.to_sec()), msg_color='yellow') - image_holder = self.image_history.get_around_time(time_of_marker) - if not image_holder.contains_valid_image: - t = self.image_history.newest_image.time - if t is None: - fprint("No images found.") - return - - fprint("No valid image found for t={} ({}) dt: {}".format(time_of_marker.to_sec(), t.to_sec(), (rospy.Time.now() - t).to_sec()), msg_color='red') - return - header = navigator_tools.make_header(frame='/enu', stamp=image_holder.time) - image = image_holder.image - self.debug.image = np.copy(image) - - cam_tf = self.camera_model.tfFrame() - try: - fprint("Getting transform between /enu and {}...".format(cam_tf)) - self.tf_listener.waitForTransform("/enu", cam_tf, time_of_marker, ros_t(1)) - t_mat44 = self.tf_listener.asMatrix(cam_tf, header) - except tf.ExtrapolationException as e: - fprint("TF error found and excepted: {}".format(e)) - return - - for obj in resp.objects: - if len(obj.points) <= 1: - fprint("No points in object") - continue - - fprint("{} {}".format(obj.id, "=" * 50)) - - # Get object position in px coordinates to determine if it's in frame - object_cam = t_mat44.dot(np.append(p2np(obj.position), 1)) - object_px = map(int, self.camera_model.project3dToPixel(object_cam[:3])) - if not self._object_in_frame(object_cam): - fprint("Object not in frame") - continue - - # Get enu points associated with this totem and remove ones that are too low - points_np = np.array(map(p2np, obj.points)) - height = np.max(points_np[:, 2]) - np.min(points_np[:, 2]) - if height < .1: - # If the height of the object is too small, skip (units are meters) - fprint("Object too small") - continue - - threshold = np.min(points_np[:, 2]) + self.height_remove * height - points_np = points_np[points_np[:, 2] > threshold] - - # Shove ones in there to make homogenous points to get points in image frame - points_np_homo = np.hstack((points_np, np.ones((points_np.shape[0], 1)))).T - points_cam = t_mat44.dot(points_np_homo).T - points_px = map(self.camera_model.project3dToPixel, points_cam[:, :3]) - - [cv2.circle(self.debug.image, tuple(map(int, p)), 2, (255, 255, 255), -1) for p in points_px] - - # Get color information from the points - roi = self._get_ROI_from_points(points_px) - h, s, v = self._get_hsv_from_ROI(roi, image) - - # Compute parameters that go into the confidence - boat_q = self.odom[1] - target_q = self._get_solar_angle() - q_err = self._get_quaternion_error(boat_q, target_q) - - dist = np.linalg.norm(self.odom[0] - p2np(obj.position)) - - fprint("H: {}, S: {}, V: {}".format(h, s, v)) - fprint("q_err: {}, dist: {}".format(q_err, dist)) - - # Add to database and setup debug image - if s < self.saturation_reject or v < self.value_reject: - err_msg = "The colors aren't expressive enough s: {} ({}) v: {} ({}). Rejecting." - fprint(err_msg.format(s, self.saturation_reject, v, self.value_reject), msg_color='red') - - else: - if obj.id not in self.colored: - self.colored[obj.id] = Observation() - - # Add this observation in - self.colored[obj.id] += h, v, dist, q_err - print self.colored[obj.id] - - rgb = (0, 0, 0) - color = self.do_estimate(obj.id) - # Do we have a valid color estimate - if color: - fprint("COLOR IS VALID", msg_color='green') - rgb = self.database_color_map[color[0]] - - cmd = '{name}={rgb[0]},{rgb[1]},{rgb[2]},{_id}' - self.make_request(cmd=cmd.format(name=obj.name,_id=obj.id, rgb=rgb)) - - bgr = rgb[::-1] - cv2.circle(self.debug.image, tuple(object_px), 10, bgr, -1) - font = cv2.FONT_HERSHEY_SIMPLEX - cv2.putText(self.debug.image, str(obj.id), tuple(object_px), font, 1, bgr, 2) - - def _get_solar_angle(self): - return [0, 0, 0, 1] - - def _get_ROI_from_points(self, image_points): - cnt = np.array([image_points]).astype(np.float32) - rect = cv2.minAreaRect(cnt) - box = cv2.cv.BoxPoints(rect) - box = np.int0(box) - fprint("Drawing rectangle") - cv2.drawContours(self.debug.image, [box], 0, (0, 0, 255), 2) - return box - - def _get_hsv_from_ROI(self, roi, img): - mask = np.zeros(img.shape[:2], np.uint8) - cv2.drawContours(mask, [roi], 0, 255, -1) - bgr = cv2.mean(img, mask) - # We have to treat that bgr value as a 3d array to get cvtColor to work - bgr = np.array([[bgr]])[:, :3].astype(np.uint8) - h, s, v = cv2.cvtColor(bgr, cv2.COLOR_BGR2HSV)[0, 0] - - return h, s, v - - # Now check that s and v are in a good range - if s < self.saturation_reject or v < self.value_reject: - err_msg = "The colors aren't expressive enough s: {} ({}) v: {} ({}). Rejecting." - fprint(err_msg.format(s, self.saturation_reject, v, self.value_reject), msg_color='red') - return None - - # Compute hue error in SO2 - hue_angle = np.radians(h * 2) - - # Display the current values - font = cv2.FONT_HERSHEY_SIMPLEX - cv2.putText(self.debug.image, "h_val: {}".format(np.degrees(hue_angle)), - tuple(roi[1]), font, 1, (255, 255, 255), 2) - - likely_color, error = self.get_closest_color(hue_angle) - - if error > self.hue_error_reject: - fprint("Closest color was {} with an error of {} rads (> {}). Rejecting.".format(likely_color, np.round(error, 3), - self.hue_error_reject), msg_color='red') - return None - - fprint("Likely color: {} with an hue error of {} rads.".format(likely_color, np.round(error, 3))) - return [likely_color, error] - - def _object_in_frame(self, object_point): - """ - Returns if the object is in frame given the centroid of the object. - `object_point` should be in the camera_model's frame. - """ - if object_point[2] < 0: - return False - - px = np.array(self.camera_model.project3dToPixel(object_point)) - resoultion = self.camera_model.fullResolution() - return not (np.any([0, 0] > px) or np.any(px > resoultion)) - - -if __name__ == "__main__": - rospy.init_node("database_colorer") - c = Colorama() - rospy.spin() diff --git a/perception/mil_vision/great_merge_purgatory/nodes/object_classifier.py b/perception/mil_vision/great_merge_purgatory/nodes/object_classifier.py deleted file mode 100755 index 83c19b31..00000000 --- a/perception/mil_vision/great_merge_purgatory/nodes/object_classifier.py +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python -import txros -from txros import util -from twisted.internet import reactor, defer -from object_classification import LidarToImage -from navigator_msgs.srv import CameraDBQuery, CameraDBQueryResponse -from object_classification import Config, depicklify -from navigator_tools import CvDebug, fprint -import os.path -import numpy as np -import cv2 - - -class ObjectClassifier(object): - - def __init__(self, nh, classifier, config): - self.nh = nh - # load up the trained svm via pickle - self.classifier = classifier - self.config = config - self.debug = CvDebug(nh=nh) - - @util.cancellableInlineCallbacks - def init_(self): - yield self.nh.advertise_service("/camera_database/requests", CameraDBQuery, self.database_request) - self.lidar_to_image = yield LidarToImage(self.nh).init_() - defer.returnValue(self) - - @util.cancellableInlineCallbacks - def database_request(self, req): - id_ = req.id - name = req.name - img, rois = yield self.lidar_to_image.get_object_rois(name=str(id_)) - resp = CameraDBQueryResponse() - if img is None or len(rois) == 0: - fprint("Incorrect from reading from the lidar", msg_color='red') - resp.found = False - defer.returnValue(resp) - - r = rois[0] - roi = r["img"] - bbox = r["bbox"] - draw = img.copy() - - desc = self.config.descriptor.get_descriptor(roi) - clss, prob = self.classifier.classify(desc) - clss = self.config.to_class(clss) - - cv2.rectangle(draw, (bbox[0], bbox[1]), (bbox[0] + bbox[2], bbox[1] + bbox[3]), (0, 0, 255)) - cv2.putText(draw, clss + ": " + str(np.round(prob)), (bbox[0], bbox[1]), 1, 1.0, (0, 255, 0)) - self.debug.add_image(draw, "stuff", topic="panda") - - if name == clss and prob > .8: - fprint("Object Found", msg_color='green') - resp.found = True - else: - fprint("Object missclassified with class {}, prob {}".format(clss, prob), msg_color='red') - resp.found = False - defer.returnValue(resp) - - -@util.cancellableInlineCallbacks -def main(): - nh = yield txros.NodeHandle.from_argv("object_classifier") - config = Config() - class_file = os.path.abspath(__file__) - class_file = class_file.split("nodes")[0] - class_file = class_file + "object_classification/train.p" - - cl = depicklify(class_file) - oc = yield ObjectClassifier(nh, cl, config).init_() - -if __name__ == "__main__": - reactor.callWhenRunning(main) - reactor.run() diff --git a/perception/mil_vision/great_merge_purgatory/nodes/shooter_finder.py b/perception/mil_vision/great_merge_purgatory/nodes/shooter_finder.py deleted file mode 100755 index 2b0e4ab2..00000000 --- a/perception/mil_vision/great_merge_purgatory/nodes/shooter_finder.py +++ /dev/null @@ -1,264 +0,0 @@ -#!/usr/bin/python - -from __future__ import division - -import math -import numpy -from twisted.internet import defer, threads -import txros -from txros import util, tf -import cv2 -import traceback -import time -import operator -import random - -defer.setDebugging(True) - -from navigator_tools import msg_helpers - -from tf import transformations -from sensor_msgs.msg import PointCloud2 -import sensor_msgs.point_cloud2 as pc2 -from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest -from visualization_msgs.msg import Marker, MarkerArray -from std_msgs.msg import Header, ColorRGBA -from geometry_msgs.msg import Pose, Vector3, Point, Quaternion, PoseStamped -from nav_msgs.msg import Odometry - -def rect_lines(rect): - for i in xrange(4): - angle = rect[4] + i/4 * 2*math.pi - normal = numpy.array([math.cos(angle), math.sin(angle)]) - d_normal_over_d_angle = numpy.array([-math.sin(angle), math.cos(angle)]) - p = rect[:2] + (rect[2] if i%2 == 0 else rect[3])/2 * normal - yield (p, normal), numpy.array([ - [1, 0, normal[0]/2 if i%2==0 else 0, normal[0]/2 if i%2==1 else 0, (rect[2] if i%2 == 0 else rect[3])/2 * d_normal_over_d_angle[0]], - [0, 1, normal[1]/2 if i%2==0 else 0, normal[1]/2 if i%2==1 else 0, (rect[2] if i%2 == 0 else rect[3])/2 * d_normal_over_d_angle[1]], - [0, 0, 0, 0, d_normal_over_d_angle[0]], - [0, 0, 0, 0, d_normal_over_d_angle[1]], - ]) - -def dist_from_line(p, line, J): - return (p - line[0]).dot(line[1]), numpy.array([-line[1][0], -line[1][1], p[0]-line[0][0], p[1]-line[0][1]]).dot(J) - -def dist_from_rect(p, rect): - return min([dist_from_line(p, line, J) for line, J in rect_lines(rect)], key=lambda (dist, J): abs(dist)) - -def fit(good_points, rect): - print 'count', len(good_points) - for i in xrange(1): - r, J = zip(*[dist_from_rect(p, rect) for p in good_points]) - r = numpy.array(r) - J = numpy.vstack(list(J)) - cost = numpy.sum(r*r) - print 'cost', i, cost - try: - rect = rect - numpy.linalg.inv(J.T.dot(J)).dot(J.T).dot(r) - except numpy.linalg.LinAlgError: - traceback.print_exc() - return None - #print rect - return rect - -center_holder = [None] - -@util.cancellableInlineCallbacks -def poller(nh): - db_srv = nh.get_service_client('/database/requests', ObjectDBQuery) - - while True: - yield util.wall_sleep(3) - - while True: - try: - db_res = yield db_srv(ObjectDBQueryRequest( - name='shooter', - cmd='', - )) - except: - traceback.print_exc() - yield util.wall_sleep(1) - else: - break - - if not db_res.found: - print 'shooter not found' - continue - - obj = db_res.objects[0] - - points = map(msg_helpers.ros_to_np_3D, obj.points) - - if points: - center_holder[0] = numpy.mean(points, axis=0) - -@util.cancellableInlineCallbacks -def main(): - try: - center = None - nh = yield txros.NodeHandle.from_argv('shooter_finder', anonymous=True) - - pc_sub = nh.subscribe('/velodyne_points', PointCloud2) - tf_listener = tf.TransformListener(nh) - marker_pub = nh.advertise('/shooter_fit', MarkerArray) - result_pub = nh.advertise('/shooter_pose', PoseStamped) - odom_sub = nh.subscribe('/odom', Odometry) - - poller(nh) - - while True: - ts = [time.time()] - yield util.wall_sleep(.1) - ts.append(time.time()) - - center = center_holder[0] - odom = yield odom_sub.get_next_message() - cloud = yield pc_sub.get_next_message() - - if center is None: - yield util.wall_sleep(1) - continue - - #print center - - Z_MIN = 0 - RADIUS = 5 - - ts.append(time.time()) - - print 'start' - print cloud.header.stamp - try: - transform = yield tf_listener.get_transform('/enu', cloud.header.frame_id, cloud.header.stamp) - except tf.TooPastError: - print 'TooPastError!' - continue - gen = numpy.array(list(pc2.read_points(cloud, skip_nans=True, field_names=("x", "y", "z")))).T - print gen.shape - print transform._p - gen = (transform._q_mat.dot(gen) + numpy.vstack([transform._p]*gen.shape[1]).T).T - good_points = numpy.array([(x[0], x[1]) for x in gen if x[2] > Z_MIN and math.hypot(x[0]-center[0], x[1]-center[1]) < RADIUS]) - print 'end' - - if len(good_points) < 10: - print 'no good points', len(good_points) - continue - - #if len(good_points) > 100: - # good_points = numpy.array(random.sample(list(good_points), 100)) - #print good_points - - ts.append(time.time()) - - rect = cv2.minAreaRect(good_points.astype(numpy.float32)) - rect = numpy.array(rect[0] + rect[1] + (math.radians(rect[2]),)) - - - ts.append(time.time()) - - #rect = yield threads.deferToThread(fit, good_points, rect) - if rect is None: - print 'bad fit' - continue - - rect = rect[:2], rect[2:4], rect[4] - - print 'rect:', rect - - - ts.append(time.time()) - - marker_pub.publish(MarkerArray( - markers=[Marker( - header=Header( - frame_id='/enu', - stamp=nh.get_time(), - ), - ns='shooter_ns', - id=1, - type=Marker.CUBE, - action=Marker.ADD, - pose=Pose( - position=Point(rect[0][0], rect[0][1], .5), - orientation=Quaternion(*transformations.quaternion_about_axis(rect[2], [0, 0, 1])), - ), - scale=Vector3(rect[1][0], rect[1][1], 2), - color=ColorRGBA(1, 1, 1, .5), - )], - )) - - possibilities = [] - for i in xrange(4): - angle = rect[2] + i/4 * 2*math.pi - normal = numpy.array([math.cos(angle), math.sin(angle)]) - p = numpy.array(rect[0]) + (rect[1][0] if i%2 == 0 else rect[1][1])/2 * normal - possibilities.append((normal, p, transformations.quaternion_about_axis(angle, [0, 0, 1]), rect[1][0] if i%2 == 1 else rect[1][1])) - best = max(possibilities, key=lambda (normal, p, q, size): (msg_helpers.ros_to_np_3D(odom.pose.pose.position)[:2] - rect[0]).dot(normal)) - - print 'side length:', best[-1] - if abs(best[-1] - 1.8) > .25: - print 'filtered out based on side length' - continue - - front_points = [p for p in good_points if abs((p - best[1]).dot(best[0])) < .2] - print 'len(front_points)', len(front_points) - a, b = numpy.linalg.lstsq( - numpy.vstack([ - [p[0] for p in front_points], - [p[1] for p in front_points], - ]).T, - numpy.ones(len(front_points)), - )[0] - m, b_ = numpy.linalg.lstsq( - numpy.vstack([ - [p[0] for p in front_points], - numpy.ones(len(front_points)), - ]).T, - [p[1] for p in front_points], - )[0] - print 'a, b', a, b, m, b_ - - print 'RES', numpy.std([numpy.array([a, b]).dot(p) for p in good_points]) - - # x = a, b - # x . p = 1 - # |x| (||x|| . p) = 1 - # ||x|| . p = 1 / |x| - - normal = numpy.array([a, b]) - dist = 1 / numpy.linalg.norm(normal) - normal = normal / numpy.linalg.norm(normal) - p = dist * normal - - print 'ZZZ', p.dot(numpy.array([a, b])) - - perp = numpy.array([normal[1], -normal[0]]) - x = (best[1] - p).dot(perp) - p = p + x * perp - if normal.dot(best[0]) < 0: normal = -normal - q = transformations.quaternion_about_axis(math.atan2(normal[1], normal[0]), [0, 0, 1]) - - print 'XXX', p, best[1] - - #sqrt(a2 + b2) (a x + b y) = 1 - - result_pub.publish(PoseStamped( - header=Header( - frame_id='/enu', - stamp=nh.get_time(), - ), - pose=Pose( - position=Point(p[0], p[1], 0), - orientation=Quaternion(*q), - ), - )) - - - ts.append(time.time()) - - print 'timing', ts[-1] - ts[0], map(operator.sub, ts[1:], ts[:-1]) - except: - traceback.print_exc() - -util.launch_main(main) diff --git a/perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.hpp b/perception/mil_vision/include/camera_lidar_transformer.hpp similarity index 100% rename from perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.hpp rename to perception/mil_vision/include/camera_lidar_transformer.hpp diff --git a/perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.cpp b/perception/mil_vision/src/camera_lidar_transformer.cpp similarity index 89% rename from perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.cpp rename to perception/mil_vision/src/camera_lidar_transformer.cpp index ceba6808..019d56b2 100644 --- a/perception/mil_vision/great_merge_purgatory/nodes/camera_to_lidar/CameraLidarTransformer.cpp +++ b/perception/mil_vision/src/camera_lidar_transformer.cpp @@ -1,17 +1,17 @@ -#include "CameraLidarTransformer.hpp" +#include "camera_lidar_transformer.hpp" ros::Duration CameraLidarTransformer::MAX_TIME_ERR = ros::Duration(0,6E7); CameraLidarTransformer::CameraLidarTransformer() - : nh(ros::this_node::getName()), - tfBuffer(), - tfListener(tfBuffer, nh), - lidarSub(nh, "/velodyne_points", 10), - lidarCache(lidarSub, 10), - camera_info_received(false) - #ifdef DO_ROS_DEBUG - , - image_transport(nh) - #endif + : nh(ros::this_node::getName()), + tfBuffer(), + tfListener(tfBuffer, nh), + lidarSub(nh, "/velodyne_points", 10), + lidarCache(lidarSub, 10), + camera_info_received(false) + #ifdef DO_ROS_DEBUG + , + image_transport(nh) + #endif { #ifdef DO_ROS_DEBUG points_debug_publisher = image_transport.advertise("points_debug", 1); @@ -23,23 +23,27 @@ CameraLidarTransformer::CameraLidarTransformer() nh.param("camera_to_lidar_transform_topic",camera_to_lidar_transform_topic,"transform_camera"); transformServiceServer = nh.advertiseService(camera_to_lidar_transform_topic, &CameraLidarTransformer::transformServiceCallback, this); } + void CameraLidarTransformer::cameraInfoCallback(sensor_msgs::CameraInfo info) { camera_info = info; cam_model.fromCameraInfo(camera_info); camera_info_received = true; } + bool CameraLidarTransformer::inCameraFrame(cv::Point2d& point) { return point.x > 0 && point.x < camera_info.width && point.y > 0 && point.y < camera_info.height; } + void CameraLidarTransformer::drawPoint(cv::Mat& mat, cv::Point2d& point, cv::Scalar color) { if (point.x - 20 > 0 && point.x + 20 < mat.cols && point.y - 20 > 0 && point.y + 20 < mat.rows) cv::circle(mat, point, 3, color, -1); } -bool CameraLidarTransformer::transformServiceCallback(navigator_msgs::CameraToLidarTransform::Request &req, navigator_msgs::CameraToLidarTransform::Response &res) + +bool CameraLidarTransformer::transformServiceCallback(mil_msgs::CameraToLidarTransform::Request &req, mil_msgs::CameraToLidarTransform::Response &res) { if (!camera_info_received) { @@ -47,12 +51,14 @@ bool CameraLidarTransformer::transformServiceCallback(navigator_msgs::CameraToLi res.error = "NO CAMERA INFO"; return true; } + if (camera_info.header.frame_id != req.header.frame_id) { res.success = false; res.error = "DIFFERENT FRAME ID THAN SUBSCRIBED CAMERA"; return true; } + visualization_msgs::MarkerArray markers; std::vector nearbyLidar = lidarCache.getInterval(req.header.stamp-MAX_TIME_ERR, req.header.stamp+MAX_TIME_ERR); sensor_msgs::PointCloud2ConstPtr scloud; @@ -68,7 +74,7 @@ bool CameraLidarTransformer::transformServiceCallback(navigator_msgs::CameraToLi } if (!scloud) { res.success = false; - res.error = navigator_msgs::CameraToLidarTransform::Response::CLOUD_NOT_FOUND; + res.error = mil_msgs::CameraToLidarTransform::Response::CLOUD_NOT_FOUND; return true; } if (!tfBuffer.canTransform(req.header.frame_id, "velodyne", ros::Time(0), ros::Duration(1))) @@ -181,7 +187,7 @@ bool CameraLidarTransformer::transformServiceCallback(navigator_msgs::CameraToLi res.distance = res.closest.z; res.success = true; } else { - res.error = navigator_msgs::CameraToLidarTransform::Response::NO_POINTS_FOUND; + res.error = mil_msgs::CameraToLidarTransform::Response::NO_POINTS_FOUND; res.success = false; } @@ -197,3 +203,10 @@ bool CameraLidarTransformer::transformServiceCallback(navigator_msgs::CameraToLi return true; } + +int main (int argc, char** argv) +{ + ros::init (argc, argv, "camera_lidar_transformer"); + CameraLidarTransformer transformer; + ros::spin (); +} From 96744be5e3469f43f82b73ed21b2807d1596568c Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Sun, 2 Apr 2017 01:29:27 -0400 Subject: [PATCH 260/267] VISION: copy CameraToLidarTransform.srv from NaviGator --- utils/mil_msgs/srv/CameraToLidarTransform.srv | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 utils/mil_msgs/srv/CameraToLidarTransform.srv diff --git a/utils/mil_msgs/srv/CameraToLidarTransform.srv b/utils/mil_msgs/srv/CameraToLidarTransform.srv new file mode 100644 index 00000000..152ab55a --- /dev/null +++ b/utils/mil_msgs/srv/CameraToLidarTransform.srv @@ -0,0 +1,12 @@ +Header header #Stamp of time point was seen for tf +geometry_msgs/Point point #X and Y of point in camera frame, Z is ignored +uint16 tolerance #Number of pixels the projected 3D lidar point can be from the target point to be included in the response +--- +bool success #True if at least one point found in lidar and transformed +geometry_msgs/Point[] transformed #Points in 3-D in camera frame if success is true +geometry_msgs/Point closest #3D point that is the closest to the target point when transformed and projected +geometry_msgs/Vector3 normal #Normal unit vector in camera frame estimated from transformed points +float64 distance #mean z of transformed points +string error #Describes when went wrong if success if false +string CLOUD_NOT_FOUND=pointcloud not found +string NO_POINTS_FOUND=no points From a7fc227aa36dcda078b1c322a71b61475ff5d3a7 Mon Sep 17 00:00:00 2001 From: David Soto Date: Sun, 2 Apr 2017 01:36:05 -0400 Subject: [PATCH 261/267] VISION: move camera_stream_demo.cpp from nodes to src It was decided that all the nodes directory would only be used for python nodes, wile the src and include directories will include c++ nodes, not just library code. --- perception/mil_vision/CMakeLists.txt | 2 +- perception/mil_vision/{nodes => src}/camera_stream_demo.cpp | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename perception/mil_vision/{nodes => src}/camera_stream_demo.cpp (100%) diff --git a/perception/mil_vision/CMakeLists.txt b/perception/mil_vision/CMakeLists.txt index 7ce15ebb..82d5c948 100644 --- a/perception/mil_vision/CMakeLists.txt +++ b/perception/mil_vision/CMakeLists.txt @@ -85,7 +85,7 @@ add_library( target_include_directories(mil_vision_lib PUBLIC include/mil_vision_lib) target_link_libraries(mil_vision_lib mil_tools_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES} ${PCL_COMMON_LIBRARIES} ${PCL_COMMON_LIBRARIES}) -add_executable(camera_stream_demo nodes/camera_stream_demo.cpp) +add_executable(camera_stream_demo src/camera_stream_demo.cpp) add_dependencies(camera_stream_demo mil_vision_lib ${catkin_EXPORTED_TARGETS}) target_link_libraries( camera_stream_demo diff --git a/perception/mil_vision/nodes/camera_stream_demo.cpp b/perception/mil_vision/src/camera_stream_demo.cpp similarity index 100% rename from perception/mil_vision/nodes/camera_stream_demo.cpp rename to perception/mil_vision/src/camera_stream_demo.cpp From 7cd0f5e4dcf131d8f6027da012ec61b52e09c5c1 Mon Sep 17 00:00:00 2001 From: David Soto Date: Mon, 10 Apr 2017 23:23:50 -0400 Subject: [PATCH 262/267] GREAT MERGE: include new msgs in mil_msgs cmkaelists --- utils/mil_msgs/CMakeLists.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/utils/mil_msgs/CMakeLists.txt b/utils/mil_msgs/CMakeLists.txt index a65ab09c..a023a1dc 100644 --- a/utils/mil_msgs/CMakeLists.txt +++ b/utils/mil_msgs/CMakeLists.txt @@ -22,7 +22,8 @@ add_message_files(FILES PoseTwistStamped.msg PoseTwist.msg VelocityMeasurements.msg - Float64Stamped.msg + DepthStamped.msg + RangeStamped.msg VelocityMeasurement.msg Acceleration.msg ) From a61f148d266beeb12175090125ba086d50be92ff Mon Sep 17 00:00:00 2001 From: David Soto Date: Mon, 10 Apr 2017 23:34:08 -0400 Subject: [PATCH 263/267] GREAT MERGE: add camera_lidar_transform to mil_msgs cmake --- utils/mil_msgs/CMakeLists.txt | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/utils/mil_msgs/CMakeLists.txt b/utils/mil_msgs/CMakeLists.txt index a65ab09c..1bbaccef 100644 --- a/utils/mil_msgs/CMakeLists.txt +++ b/utils/mil_msgs/CMakeLists.txt @@ -27,6 +27,10 @@ add_message_files(FILES Acceleration.msg ) +add_service_files(FILES + CameraToLidarTransform.srv +) + find_package(Eigen 3 REQUIRED) include_directories(${Eigen_INCLUDE_DIRS}) generate_messages( From 652d9ae9e2ccbeadb2cc5969f96f866b769f7e9f Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Mon, 10 Apr 2017 19:45:42 -0400 Subject: [PATCH 264/267] REPO: various changes for sub8 integration * fix up namespaces in mil_vision * move mil_msgs stuff that isnt messages to mil_tools --- perception/mil_vision/CMakeLists.txt | 3 ++- .../include/mil_vision_lib/active_contours.hpp | 4 ++-- .../mil_vision_lib/colorizer/camera_observer.hpp | 4 ++-- .../colorizer/color_observation.hpp | 4 ++-- .../include/mil_vision_lib/colorizer/common.hpp | 4 ++-- .../mil_vision_lib/colorizer/pcd_colorizer.hpp | 4 ++-- .../colorizer/single_cloud_processor.hpp | 4 ++-- .../include/mil_vision_lib/cv_tools.hpp | 2 +- .../image_acquisition/camera_frame.hpp | 6 +++--- .../image_acquisition/camera_frame_sequence.hpp | 4 ++-- .../image_acquisition/camera_model.hpp | 4 ++-- .../image_acquisition/ros_camera_stream.hpp | 12 ++++++------ .../include/mil_vision_lib/image_filtering.hpp | 4 ++-- .../mil_vision_lib/pcd_sub_pub_algorithm.hpp | 4 ++-- .../mil_vision_lib/point_cloud_algorithms.hpp | 4 ++-- .../include/mil_vision_lib/visualization.hpp | 2 +- perception/mil_vision/src/camera_stream_demo.cpp | 2 +- .../src/mil_vision_lib/active_contours.cpp | 8 ++++---- .../mil_vision_lib/colorizer/camera_observer.cpp | 6 +++--- .../colorizer/color_observation.cpp | 4 ++-- .../mil_vision_lib/colorizer/pcd_colorizer.cpp | 8 ++++---- .../colorizer/single_cloud_processor.cpp | 8 ++++---- .../mil_vision/src/mil_vision_lib/cv_utils.cc | 16 ++++++++-------- .../src/mil_vision_lib/image_filtering.cpp | 10 +++++----- .../src/mil_vision_lib/point_cloud_algorithms.cc | 4 ++-- .../src/mil_vision_lib/visualization.cc | 2 +- utils/mil_msgs/CMakeLists.txt | 14 ++------------ utils/mil_msgs/mil_msgs/__init__.py | 0 utils/mil_msgs/package.xml | 5 ----- utils/mil_msgs/setup.py | 10 ---------- utils/mil_tools/CMakeLists.txt | 16 ++++++++++++---- utils/mil_tools/include/mil_tools/mil_tools.hpp | 8 ++------ .../include/mil_tools}/msg_helpers.h | 2 +- .../include/mil_tools}/param_helpers.h | 2 +- utils/mil_tools/package.xml | 2 ++ .../scripts/param_sever} | 2 -- utils/mil_tools/src/mil_tools/mil_tools.cpp | 9 ++++----- 37 files changed, 93 insertions(+), 114 deletions(-) delete mode 100644 utils/mil_msgs/mil_msgs/__init__.py delete mode 100644 utils/mil_msgs/setup.py rename utils/{mil_msgs/include/mil_msgs => mil_tools/include/mil_tools}/msg_helpers.h (98%) rename utils/{mil_msgs/include/mil_msgs => mil_tools/include/mil_tools}/param_helpers.h (99%) rename utils/{mil_msgs/scripts/param_saver => mil_tools/scripts/param_sever} (95%) diff --git a/perception/mil_vision/CMakeLists.txt b/perception/mil_vision/CMakeLists.txt index 82d5c948..d5e3fc31 100644 --- a/perception/mil_vision/CMakeLists.txt +++ b/perception/mil_vision/CMakeLists.txt @@ -24,6 +24,7 @@ find_package(catkin pcl_ros tf2_sensor_msgs tf2_geometry_msgs + mil_tools ) find_package(PCL 1.7 REQUIRED) @@ -83,7 +84,7 @@ add_library( src/mil_vision_lib/colorizer/color_observation.cpp ) target_include_directories(mil_vision_lib PUBLIC include/mil_vision_lib) -target_link_libraries(mil_vision_lib mil_tools_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES} ${PCL_COMMON_LIBRARIES} ${PCL_COMMON_LIBRARIES}) +target_link_libraries(mil_vision_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES} ${PCL_COMMON_LIBRARIES} ${PCL_COMMON_LIBRARIES}) add_executable(camera_stream_demo src/camera_stream_demo.cpp) add_dependencies(camera_stream_demo mil_vision_lib ${catkin_EXPORTED_TARGETS}) diff --git a/perception/mil_vision/include/mil_vision_lib/active_contours.hpp b/perception/mil_vision/include/mil_vision_lib/active_contours.hpp index f3a4d658..ed187f0e 100644 --- a/perception/mil_vision/include/mil_vision_lib/active_contours.hpp +++ b/perception/mil_vision/include/mil_vision_lib/active_contours.hpp @@ -14,7 +14,7 @@ #include #include -namespace nav +namespace mil_vision { class SegmentDescription; @@ -65,4 +65,4 @@ class ActiveContour ActiveContour(); }; -} // namespace nav +} // namespace mil_vision diff --git a/perception/mil_vision/include/mil_vision_lib/colorizer/camera_observer.hpp b/perception/mil_vision/include/mil_vision_lib/colorizer/camera_observer.hpp index 73950829..58eefcc3 100644 --- a/perception/mil_vision/include/mil_vision_lib/colorizer/camera_observer.hpp +++ b/perception/mil_vision/include/mil_vision_lib/colorizer/camera_observer.hpp @@ -6,7 +6,7 @@ #include #include -namespace nav{ +namespace mil_vision{ class CameraObserver { @@ -42,4 +42,4 @@ class CameraObserver }; -} // namespace nav +} // namespace mil_vision diff --git a/perception/mil_vision/include/mil_vision_lib/colorizer/color_observation.hpp b/perception/mil_vision/include/mil_vision_lib/colorizer/color_observation.hpp index 890f8dff..fb7ec651 100644 --- a/perception/mil_vision/include/mil_vision_lib/colorizer/color_observation.hpp +++ b/perception/mil_vision/include/mil_vision_lib/colorizer/color_observation.hpp @@ -6,7 +6,7 @@ #include -namespace nav{ +namespace mil_vision{ struct alignas(16) ColorObservation { @@ -42,4 +42,4 @@ struct PointColorStats int n; }; -} //namespace nav \ No newline at end of file +} //namespace mil_vision \ No newline at end of file diff --git a/perception/mil_vision/include/mil_vision_lib/colorizer/common.hpp b/perception/mil_vision/include/mil_vision_lib/colorizer/common.hpp index 465e6240..2930fd3f 100644 --- a/perception/mil_vision/include/mil_vision_lib/colorizer/common.hpp +++ b/perception/mil_vision/include/mil_vision_lib/colorizer/common.hpp @@ -20,11 +20,11 @@ #include #include -namespace nav{ +namespace mil_vision{ template using PCD = pcl::PointCloud; template using PCDPtr = std::shared_ptr>; template using SPtrVector = std::vector>; template using UPtrVector = std::vector>; -} // namespace nav \ No newline at end of file +} // namespace mil_vision \ No newline at end of file diff --git a/perception/mil_vision/include/mil_vision_lib/colorizer/pcd_colorizer.hpp b/perception/mil_vision/include/mil_vision_lib/colorizer/pcd_colorizer.hpp index a81b1747..03a81e67 100644 --- a/perception/mil_vision/include/mil_vision_lib/colorizer/pcd_colorizer.hpp +++ b/perception/mil_vision/include/mil_vision_lib/colorizer/pcd_colorizer.hpp @@ -8,7 +8,7 @@ #include #include -namespace nav{ +namespace mil_vision{ class PcdColorizer{ /* @@ -69,4 +69,4 @@ class PcdColorizer{ }; // end class PcdColorizer -} // namespace nav +} // namespace mil_vision diff --git a/perception/mil_vision/include/mil_vision_lib/colorizer/single_cloud_processor.hpp b/perception/mil_vision/include/mil_vision_lib/colorizer/single_cloud_processor.hpp index 675c0a5c..446b9b78 100644 --- a/perception/mil_vision/include/mil_vision_lib/colorizer/single_cloud_processor.hpp +++ b/perception/mil_vision/include/mil_vision_lib/colorizer/single_cloud_processor.hpp @@ -7,7 +7,7 @@ #include #include -namespace nav { +namespace mil_vision { class SingleCloudProcessor { @@ -38,4 +38,4 @@ class SingleCloudProcessor }; -} // namespace nav +} // namespace mil_vision diff --git a/perception/mil_vision/include/mil_vision_lib/cv_tools.hpp b/perception/mil_vision/include/mil_vision_lib/cv_tools.hpp index 3a9fba30..bd608942 100644 --- a/perception/mil_vision/include/mil_vision_lib/cv_tools.hpp +++ b/perception/mil_vision/include/mil_vision_lib/cv_tools.hpp @@ -23,7 +23,7 @@ // #define SEGMENTATION_DEBUG -namespace nav { +namespace mil_vision { /// UTILS diff --git a/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame.hpp b/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame.hpp index dee3c788..c3cf6c5a 100644 --- a/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame.hpp +++ b/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame.hpp @@ -11,7 +11,7 @@ #include -namespace nav +namespace mil_vision { enum class PixelType @@ -139,7 +139,7 @@ class CameraFrame float_t _img_scale = 1.0f; // Stores the pixel data type - nav::PixelType TYPE = nav::PixelType::_UNKNOWN; + mil_vision::PixelType TYPE = mil_vision::PixelType::_UNKNOWN; /////////////////////////////////////////////////////////////////////////////////////////////// @@ -198,5 +198,5 @@ catch(const std::exception &e) ROS_WARN(e.what()); } -} // namespace nav +} // namespace mil_vision diff --git a/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame_sequence.hpp b/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame_sequence.hpp index 7b34d7ee..3369f122 100644 --- a/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame_sequence.hpp +++ b/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_frame_sequence.hpp @@ -6,7 +6,7 @@ #include #include -namespace nav +namespace mil_vision { template @@ -68,4 +68,4 @@ class CameraFrameSequence }; -} // namespace nav \ No newline at end of file +} // namespace mil_vision \ No newline at end of file diff --git a/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_model.hpp b/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_model.hpp index 477b5497..3e6c7ee2 100644 --- a/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_model.hpp +++ b/perception/mil_vision/include/mil_vision_lib/image_acquisition/camera_model.hpp @@ -3,7 +3,7 @@ #include #include -namespace nav{ +namespace mil_vision{ template class CameraModel{ @@ -59,4 +59,4 @@ Eigen::Matrix CameraModel::get_projection_matrix() return K * R * aug; } -} // namespace nav \ No newline at end of file +} // namespace mil_vision \ No newline at end of file diff --git a/perception/mil_vision/include/mil_vision_lib/image_acquisition/ros_camera_stream.hpp b/perception/mil_vision/include/mil_vision_lib/image_acquisition/ros_camera_stream.hpp index d68f23e3..bfe1a120 100644 --- a/perception/mil_vision/include/mil_vision_lib/image_acquisition/ros_camera_stream.hpp +++ b/perception/mil_vision/include/mil_vision_lib/image_acquisition/ros_camera_stream.hpp @@ -18,7 +18,7 @@ #include -namespace nav +namespace mil_vision { template @@ -130,7 +130,7 @@ class ROSCameraStream : public CameraFrameSequence bool ROSCameraStream::init(std::string &camera_topic) { - using mil::tools::operator "" _s; // convert raw string literal to std::string + using mil_tools::operator "" _s; // convert raw string literal to std::string _img_topic = camera_topic; bool success = false; image_transport::CameraSubscriber cam_sub; @@ -237,7 +237,7 @@ template typename ROSCameraStream::CamFrameConstPtr ROSCameraStream::getFrameFromTime(ros::Time desired_time) { - using mil::tools::operator "" _s; + using mil_tools::operator "" _s; // Check bounds on time if(desired_time < this->_start_time || desired_time > this->_end_time) @@ -269,7 +269,7 @@ template typename ROSCameraStream::CamFrameConstPtr ROSCameraStream::operator[](int i) { - using mil::tools::operator "" _s; + using mil_tools::operator "" _s; // Prevent adding new frames while frames are being accessed _mtx.lock(); @@ -337,4 +337,4 @@ ROSCameraStream::~ROSCameraStream() _cb_queue.disable(); } -} // namespace nav \ No newline at end of file +} // namespace mil_vision diff --git a/perception/mil_vision/include/mil_vision_lib/image_filtering.hpp b/perception/mil_vision/include/mil_vision_lib/image_filtering.hpp index 7200f102..3b7b08cf 100644 --- a/perception/mil_vision/include/mil_vision_lib/image_filtering.hpp +++ b/perception/mil_vision/include/mil_vision_lib/image_filtering.hpp @@ -7,7 +7,7 @@ #include -namespace nav +namespace mil_vision { /* @@ -39,4 +39,4 @@ cv::Mat makeRotInvariant(const cv::Mat &kernel, int rotations=8); */ float getRadialSymmetryAngle(const cv::Mat &kernel, float ang_res=0.1, bool deg=false); -} // namespace nav +} // namespace mil_vision diff --git a/perception/mil_vision/include/mil_vision_lib/pcd_sub_pub_algorithm.hpp b/perception/mil_vision/include/mil_vision_lib/pcd_sub_pub_algorithm.hpp index 42f3b700..7ef4bd3e 100644 --- a/perception/mil_vision/include/mil_vision_lib/pcd_sub_pub_algorithm.hpp +++ b/perception/mil_vision/include/mil_vision_lib/pcd_sub_pub_algorithm.hpp @@ -7,7 +7,7 @@ #include -namespace nav{ +namespace mil_vision{ template class PcdSubPubAlgorithm{ @@ -78,4 +78,4 @@ class PcdSubPubAlgorithm{ std::string _err_msg; }; -} //namespace nav \ No newline at end of file +} //namespace mil_vision \ No newline at end of file diff --git a/perception/mil_vision/include/mil_vision_lib/point_cloud_algorithms.hpp b/perception/mil_vision/include/mil_vision_lib/point_cloud_algorithms.hpp index 12558c2b..295dddcf 100644 --- a/perception/mil_vision/include/mil_vision_lib/point_cloud_algorithms.hpp +++ b/perception/mil_vision/include/mil_vision_lib/point_cloud_algorithms.hpp @@ -8,7 +8,7 @@ #include #include -namespace nav{ +namespace mil_vision { class PcdSubPubAlgorithm{ /* @@ -67,4 +67,4 @@ class PcdColorizer : public PcdSubPubAlgorithm{ sensor_msgs::CameraInfoConstPtr latest_frame_info_msg; }; -} // namwspace nav \ No newline at end of file +} // namwspace nav diff --git a/perception/mil_vision/include/mil_vision_lib/visualization.hpp b/perception/mil_vision/include/mil_vision_lib/visualization.hpp index d3c420c0..00b92f56 100644 --- a/perception/mil_vision/include/mil_vision_lib/visualization.hpp +++ b/perception/mil_vision/include/mil_vision_lib/visualization.hpp @@ -8,7 +8,7 @@ #include "ros/ros.h" #include "geometry_msgs/Pose.h" -namespace nav { +namespace mil_vision { // ******* 3D Visualization ******* class RvizVisualizer { diff --git a/perception/mil_vision/src/camera_stream_demo.cpp b/perception/mil_vision/src/camera_stream_demo.cpp index b6d6fae1..adbab43a 100755 --- a/perception/mil_vision/src/camera_stream_demo.cpp +++ b/perception/mil_vision/src/camera_stream_demo.cpp @@ -15,7 +15,7 @@ int main(int argc, char **argv) // Template argument should be cv::Vec3b for color images or uint8_t // For grayscale images - nav::ROSCameraStream left_cam_stream(nh, history_length); // Constructs empty inactive + mil_vision::ROSCameraStream left_cam_stream(nh, history_length); // Constructs empty inactive // camera stream object if(!left_cam_stream.init(cam_topic)) // Initializes object, if sucessful, object will automatically return -1; // store a history of images published to cam_topic in its buffer diff --git a/perception/mil_vision/src/mil_vision_lib/active_contours.cpp b/perception/mil_vision/src/mil_vision_lib/active_contours.cpp index e12ad7ed..a5fb1006 100644 --- a/perception/mil_vision/src/mil_vision_lib/active_contours.cpp +++ b/perception/mil_vision/src/mil_vision_lib/active_contours.cpp @@ -3,7 +3,7 @@ using namespace std; using namespace cv; -namespace nav +namespace mil_vision { namespace Perturbations @@ -226,7 +226,7 @@ bool ClosedCurve::validateCurve(std::vector& curve) if(count > 1) { auto conflict_pt = find_if(forbidden_neighbors.begin(), forbidden_neighbors.end(), - [pt](Point2i test_pt){ return nav::Perturbations::isNeighborPoint(pt, test_pt); }); + [pt](Point2i test_pt){ return mil_vision::Perturbations::isNeighborPoint(pt, test_pt); }); cout << "failure pts: " << curve[1] << "\t" << (conflict_pt != forbidden_neighbors.end()? Point2i(0,0) : *conflict_pt) << endl; return false; } @@ -236,7 +236,7 @@ bool ClosedCurve::validateCurve(std::vector& curve) if(count > 0) { auto conflict_pt = find_if(forbidden_neighbors.begin(), forbidden_neighbors.end(), - [pt](Point2i test_pt){ return nav::Perturbations::isNeighborPoint(pt, test_pt); }); + [pt](Point2i test_pt){ return mil_vision::Perturbations::isNeighborPoint(pt, test_pt); }); cout << "failure pts: " << curve[1] << "\t" << (conflict_pt != forbidden_neighbors.end()? Point2i(0,0) : *conflict_pt) << endl; return false; } @@ -250,4 +250,4 @@ vector calcCosts(const Mat& img, vector candid } -} // namespace nav +} // namespace mil_vision diff --git a/perception/mil_vision/src/mil_vision_lib/colorizer/camera_observer.cpp b/perception/mil_vision/src/mil_vision_lib/colorizer/camera_observer.cpp index eac494fe..99b83fdd 100644 --- a/perception/mil_vision/src/mil_vision_lib/colorizer/camera_observer.cpp +++ b/perception/mil_vision/src/mil_vision_lib/colorizer/camera_observer.cpp @@ -1,8 +1,8 @@ #include -namespace nav{ +namespace mil_vision{ -using mil::tools::operator "" _s; // converts to std::string +using mil_tools::operator "" _s; // converts to std::string CameraObserver::CameraObserver(ros::NodeHandle &nh, std::string &pcd_in_topic, std::string &cam_topic, size_t hist_size) : _nh{nh}, _cam_stream{nh, hist_size} @@ -70,4 +70,4 @@ ColorObservation::VecImg CameraObserver::get_color_observations(const PCD -namespace nav{ +namespace mil_vision{ UnoccludedPointsImg::UnoccludedPointsImg() // : frame_ptr(frame_ptr), @@ -12,4 +12,4 @@ UnoccludedPointsImg::UnoccludedPointsImg() } -} // namespace nav \ No newline at end of file +} // namespace mil_vision \ No newline at end of file diff --git a/perception/mil_vision/src/mil_vision_lib/colorizer/pcd_colorizer.cpp b/perception/mil_vision/src/mil_vision_lib/colorizer/pcd_colorizer.cpp index 8e3f67a4..210843e6 100644 --- a/perception/mil_vision/src/mil_vision_lib/colorizer/pcd_colorizer.cpp +++ b/perception/mil_vision/src/mil_vision_lib/colorizer/pcd_colorizer.cpp @@ -3,13 +3,13 @@ using namespace std; using namespace cv; -namespace nav{ +namespace mil_vision{ PcdColorizer::PcdColorizer(ros::NodeHandle nh, string input_pcd_topic) : _nh(nh), _img_hist_size{10}, _cloud_processor{nh, input_pcd_topic, _img_hist_size}, _input_pcd_topic{input_pcd_topic}, _output_pcd_topic{input_pcd_topic + "_colored"} { - using mil::tools::operator "" _s; // converts to std::string + using mil_tools::operator "" _s; // converts to std::string try { @@ -52,7 +52,7 @@ void PcdColorizer::_cloud_cb(const PCD<>::ConstPtr &cloud_in) // { // cout << "img hist size: " << _img_hist_size << endl; // // Subscribe to all rectified color img topics -// auto rect_color_topics = mil::tools::getRectifiedImageTopics(true); +// auto rect_color_topics = mil_tools::getRectifiedImageTopics(true); // if(rect_color_topics.size() == 0) // { // _err_msg += "COLORIZER: There are no rectified color camera topics currently publishing on this ROS master ("; @@ -170,4 +170,4 @@ void PcdColorizer::_cloud_cb(const PCD<>::ConstPtr &cloud_in) // } -} // namespace nav +} // namespace mil_vision diff --git a/perception/mil_vision/src/mil_vision_lib/colorizer/single_cloud_processor.cpp b/perception/mil_vision/src/mil_vision_lib/colorizer/single_cloud_processor.cpp index cd24af4e..4fdc8bf0 100644 --- a/perception/mil_vision/src/mil_vision_lib/colorizer/single_cloud_processor.cpp +++ b/perception/mil_vision/src/mil_vision_lib/colorizer/single_cloud_processor.cpp @@ -2,15 +2,15 @@ // using namespace std; -namespace nav { +namespace mil_vision { -using mil::tools::operator "" _s; // converts to std::string +using mil_tools::operator "" _s; // converts to std::string SingleCloudProcessor::SingleCloudProcessor(ros::NodeHandle nh, std::string& in_pcd_topic, size_t hist_size) : _nh{nh}, _hist_size{hist_size} { ROS_INFO(("SingleCloudProcessor: Initializing with " + in_pcd_topic).c_str()); - auto rect_color_topics = mil::tools::getRectifiedImageTopics(true); + auto rect_color_topics = mil_tools::getRectifiedImageTopics(true); if(rect_color_topics.size() == 0) { _err_msg += "COLORIZER: There are no rectified color camera topics currently publishing on this ROS master ("; @@ -55,4 +55,4 @@ void SingleCloudProcessor::operator()(const PCD::ConstPtr &pcd) // Summarize Confidence in each point color observation } -} // namespace nav +} // namespace mil_vision diff --git a/perception/mil_vision/src/mil_vision_lib/cv_utils.cc b/perception/mil_vision/src/mil_vision_lib/cv_utils.cc index a7f6de7d..07364acd 100644 --- a/perception/mil_vision/src/mil_vision_lib/cv_utils.cc +++ b/perception/mil_vision/src/mil_vision_lib/cv_utils.cc @@ -1,6 +1,6 @@ #include -namespace nav { +namespace mil_vision { cv::Point contour_centroid(Contour &contour) { cv::Moments m = cv::moments(contour, true); @@ -208,7 +208,7 @@ void statistical_image_segmentation(const cv::Mat &src, cv::Mat &dest, // Smooth histogram const int kernel_size = 11; - hist_smooth = nav::smooth_histogram(hist, kernel_size, sigma); + hist_smooth = mil_vision::smooth_histogram(hist, kernel_size, sigma); // Calculate histogram derivative (central finite difference) hist_derivative = hist_smooth.clone(); @@ -218,12 +218,12 @@ void statistical_image_segmentation(const cv::Mat &src, cv::Mat &dest, hist_derivative.at(i) = (hist_smooth.at(i + 1) - hist_smooth.at(i - 1)) / 2.0; } - hist_derivative = nav::smooth_histogram(hist_derivative, kernel_size, sigma); + hist_derivative = mil_vision::smooth_histogram(hist_derivative, kernel_size, sigma); // Find target mode std::vector histogram_modes = - nav::find_local_maxima(hist_smooth, 0.1); - int target_mode = nav::select_hist_mode(histogram_modes, target); + mil_vision::find_local_maxima(hist_smooth, 0.1); + int target_mode = mil_vision::select_hist_mode(histogram_modes, target); ros_log << "Target: " << target << std::endl; ros_log << "Mode Selected: " << target_mode << std::endl; @@ -238,9 +238,9 @@ void statistical_image_segmentation(const cv::Mat &src, cv::Mat &dest, int low_abs_derivative_thresh = std::abs(hist_deriv_stddev[0] * low_thresh_gain); std::vector derivative_maxima = - nav::find_local_maxima(hist_derivative, 0.01); + mil_vision::find_local_maxima(hist_derivative, 0.01); std::vector derivative_minima = - nav::find_local_minima(hist_derivative, 0.01); + mil_vision::find_local_minima(hist_derivative, 0.01); int high_thresh_search_start = target_mode; int low_thresh_search_start = target_mode; ros_log << "high_thresh_search_start: " << target_mode << std::endl; @@ -653,4 +653,4 @@ FrameHistory::get_frame_history(unsigned int frames_requested) { return frame_history; } -} // namespace nav +} // namespace mil_vision diff --git a/perception/mil_vision/src/mil_vision_lib/image_filtering.cpp b/perception/mil_vision/src/mil_vision_lib/image_filtering.cpp index 13c52ef4..a7bf1b1b 100644 --- a/perception/mil_vision/src/mil_vision_lib/image_filtering.cpp +++ b/perception/mil_vision/src/mil_vision_lib/image_filtering.cpp @@ -1,11 +1,11 @@ #include -namespace nav +namespace mil_vision { cv::Mat rotateKernel(const cv::Mat &kernel, float theta, bool deg, bool no_expand) { - theta = deg? theta : theta * mil::PI / 180.0f; + theta = deg? theta : theta * mil_tools::PI / 180.0f; cv::Point2f c_org{kernel.cols * 0.5f, kernel.rows * 0.5f}; // center of original if(no_expand) // rotates without expanding the canvas @@ -73,12 +73,12 @@ float getRadialSymmetryAngle(const cv::Mat &kernel, float ang_res, bool deg) cv::Mat elem_wise_mult{original.size(), CV_32S}; cv::multiply(original, original, elem_wise_mult); auto standard = cv::sum(elem_wise_mult)[0]; - float max = deg? 360.0f : 2 * mil::PI; + float max = deg? 360.0f : 2 * mil_tools::PI; float result = max; float best_score = 0; bool left_starting_region = false; - for(float theta = 0.0f; theta < max; theta += (deg? ang_res * 180.0f / mil::PI : ang_res)) + for(float theta = 0.0f; theta < max; theta += (deg? ang_res * 180.0f / mil_tools::PI : ang_res)) { cv::multiply(original, rotateKernel(original, theta, deg, true), elem_wise_mult); double score = standard / cv::sum(elem_wise_mult)[0]; @@ -109,4 +109,4 @@ float getRadialSymmetryAngle(const cv::Mat &kernel, float ang_res, bool deg) return result; } -} // namespace nav +} // namespace mil_vision diff --git a/perception/mil_vision/src/mil_vision_lib/point_cloud_algorithms.cc b/perception/mil_vision/src/mil_vision_lib/point_cloud_algorithms.cc index 88d7b854..a0d23231 100644 --- a/perception/mil_vision/src/mil_vision_lib/point_cloud_algorithms.cc +++ b/perception/mil_vision/src/mil_vision_lib/point_cloud_algorithms.cc @@ -3,7 +3,7 @@ using namespace std; using namespace cv; -namespace nav{ +namespace mil_vision { Mat g_color_sequence; @@ -139,4 +139,4 @@ void PcdColorizer::_color_pcd(){ } } -} //namespace nav \ No newline at end of file +} //namespace mil_vision diff --git a/perception/mil_vision/src/mil_vision_lib/visualization.cc b/perception/mil_vision/src/mil_vision_lib/visualization.cc index f3f8293e..6f4ed049 100644 --- a/perception/mil_vision/src/mil_vision_lib/visualization.cc +++ b/perception/mil_vision/src/mil_vision_lib/visualization.cc @@ -1,7 +1,7 @@ #include #include -namespace nav { +namespace mil_vision { RvizVisualizer::RvizVisualizer(std::string rviz_topic) { rviz_pub = nh.advertise(rviz_topic, 1); diff --git a/utils/mil_msgs/CMakeLists.txt b/utils/mil_msgs/CMakeLists.txt index 3ad33864..23439bc5 100644 --- a/utils/mil_msgs/CMakeLists.txt +++ b/utils/mil_msgs/CMakeLists.txt @@ -5,14 +5,11 @@ find_package(catkin message_generation message_runtime geometry_msgs - tf actionlib interactive_markers std_msgs actionlib_msgs - cmake_modules ) -catkin_python_setup() add_action_files(FILES MoveTo.action @@ -32,17 +29,10 @@ add_service_files(FILES CameraToLidarTransform.srv ) -find_package(Eigen 3 REQUIRED) -include_directories(${Eigen_INCLUDE_DIRS}) generate_messages( - DEPENDENCIES std_msgs actionlib_msgs geometry_msgs mil_msgs + DEPENDENCIES std_msgs actionlib_msgs geometry_msgs ) catkin_package( - DEPENDS Eigen - CATKIN_DEPENDS message_generation message_runtime geometry_msgs tf actionlib interactive_markers std_msgs actionlib_msgs - INCLUDE_DIRS ${EIGEN_INCLUDE_DIRS} include - LIBRARIES + CATKIN_DEPENDS message_generation message_runtime geometry_msgs actionlib std_msgs actionlib_msgs ) -include_directories(${EIGEN_INCLUDE_DIRS} include ${Boost_INCLUDE_DIR} ${catkin_INCLUDE_DIRS}) -#install(PROGRAMS scripts/interactive_marker_moveto scripts/velocitymeasurements_to_vector3 scripts/param_saver DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}) diff --git a/utils/mil_msgs/mil_msgs/__init__.py b/utils/mil_msgs/mil_msgs/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/utils/mil_msgs/package.xml b/utils/mil_msgs/package.xml index f0cb7cb1..96494287 100644 --- a/utils/mil_msgs/package.xml +++ b/utils/mil_msgs/package.xml @@ -13,21 +13,16 @@ message_runtime actionlib - interactive_markers std_msgs message_generation geometry_msgs - tf actionlib_msgs - cmake_modules message_runtime actionlib - interactive_markers std_msgs message_generation geometry_msgs - tf actionlib_msgs diff --git a/utils/mil_msgs/setup.py b/utils/mil_msgs/setup.py deleted file mode 100644 index aa9637cd..00000000 --- a/utils/mil_msgs/setup.py +++ /dev/null @@ -1,10 +0,0 @@ -# ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD - -from distutils.core import setup -from catkin_pkg.python_setup import generate_distutils_setup - -setup_args = generate_distutils_setup( - packages=['mil_msgs'], -) - -setup(**setup_args) diff --git a/utils/mil_tools/CMakeLists.txt b/utils/mil_tools/CMakeLists.txt index 4c1e7b9f..0b304579 100644 --- a/utils/mil_tools/CMakeLists.txt +++ b/utils/mil_tools/CMakeLists.txt @@ -8,13 +8,17 @@ find_package(catkin REQUIRED COMPONENTS rospy roscpp cv_bridge + cmake_modules ) +find_package(Eigen 3 REQUIRED) + catkin_package( + DEPENDS Eigen INCLUDE_DIRS include LIBRARIES - mil_tools_lib + mil_tools CATKIN_DEPENDS roscpp cv_bridge @@ -32,11 +36,15 @@ include_directories( SYSTEM ${catkin_INCLUDE_DIRS} ${Boost_INCLUDE_DIRS} + ${Eigen_INCLUDE_DIRS} ) -add_library(mil_tools_lib +add_library(mil_tools src/mil_tools/mil_tools.cpp ) -target_include_directories(mil_tools_lib PUBLIC include) -target_link_libraries(mil_tools_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES}) +target_include_directories(mil_tools PUBLIC include) +target_link_libraries(mil_tools ${catkin_LIBRARIES} ${Boost_LIBRARIES}) + +install(DIRECTORY include/ + DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}) diff --git a/utils/mil_tools/include/mil_tools/mil_tools.hpp b/utils/mil_tools/include/mil_tools/mil_tools.hpp index 90b174e1..30a7b63e 100644 --- a/utils/mil_tools/include/mil_tools/mil_tools.hpp +++ b/utils/mil_tools/include/mil_tools/mil_tools.hpp @@ -7,12 +7,10 @@ #include #include -namespace mil{ +namespace mil_tools{ static const double PI = 3.1415926535897932; -namespace tools -{ // Returns a vector of topic names (std::string) that end with "image_rect_color" // if color is false then it looks for those that end in "image_rect" @@ -28,6 +26,4 @@ inline std::string operator "" _s(const char* str, size_t /*length*/) // Sorts contours by curve length void sortContours(std::vector>& contour_vec, bool ascending=true); -} // namespace mil::tools -} // namespace mil - +} diff --git a/utils/mil_msgs/include/mil_msgs/msg_helpers.h b/utils/mil_tools/include/mil_tools/msg_helpers.h similarity index 98% rename from utils/mil_msgs/include/mil_msgs/msg_helpers.h rename to utils/mil_tools/include/mil_tools/msg_helpers.h index 578467b8..fddbfbfe 100644 --- a/utils/mil_msgs/include/mil_msgs/msg_helpers.h +++ b/utils/mil_tools/include/mil_tools/msg_helpers.h @@ -3,7 +3,7 @@ #include #include -namespace uf_common { +namespace mil_tools { template inline T make_rgba(double r, double g, double b, double a) { diff --git a/utils/mil_msgs/include/mil_msgs/param_helpers.h b/utils/mil_tools/include/mil_tools/param_helpers.h similarity index 99% rename from utils/mil_msgs/include/mil_msgs/param_helpers.h rename to utils/mil_tools/include/mil_tools/param_helpers.h index 09c353b5..4e2881a1 100644 --- a/utils/mil_msgs/include/mil_msgs/param_helpers.h +++ b/utils/mil_tools/include/mil_tools/param_helpers.h @@ -6,7 +6,7 @@ #include "msg_helpers.h" -namespace uf_common { +namespace mil_tools { void fail(std::string const &error_string) { throw std::runtime_error(error_string); } template diff --git a/utils/mil_tools/package.xml b/utils/mil_tools/package.xml index 260b7ddc..03116810 100644 --- a/utils/mil_tools/package.xml +++ b/utils/mil_tools/package.xml @@ -14,6 +14,8 @@ rospy cv_bridge cv_bridge + cmake_modules + diff --git a/utils/mil_msgs/scripts/param_saver b/utils/mil_tools/scripts/param_sever similarity index 95% rename from utils/mil_msgs/scripts/param_saver rename to utils/mil_tools/scripts/param_sever index 95c114fa..4cddcace 100755 --- a/utils/mil_msgs/scripts/param_saver +++ b/utils/mil_tools/scripts/param_sever @@ -4,8 +4,6 @@ import os import random import yaml -import roslib -roslib.load_manifest('uf_common') import rospy diff --git a/utils/mil_tools/src/mil_tools/mil_tools.cpp b/utils/mil_tools/src/mil_tools/mil_tools.cpp index 00fd1c9f..27bb316a 100644 --- a/utils/mil_tools/src/mil_tools/mil_tools.cpp +++ b/utils/mil_tools/src/mil_tools/mil_tools.cpp @@ -1,7 +1,8 @@ #include +#include +#include -namespace mil{ -namespace tools{ +namespace mil_tools { using namespace std; @@ -52,6 +53,4 @@ void sortContours(vector>& contour_vec, bool ascending) } -} // namespace mil::tools -} // namespace mil - +} From 5aa72f6b4577b14ae17f8b2454bd9be495f364f8 Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Mon, 10 Apr 2017 19:59:43 -0400 Subject: [PATCH 265/267] REPO: remove sub8 specific visualizaton --- perception/mil_vision/CMakeLists.txt | 1 - .../include/mil_vision_lib/visualization.hpp | 24 --- .../src/mil_vision_lib/visualization.cc | 151 ------------------ 3 files changed, 176 deletions(-) delete mode 100644 perception/mil_vision/include/mil_vision_lib/visualization.hpp delete mode 100644 perception/mil_vision/src/mil_vision_lib/visualization.cc diff --git a/perception/mil_vision/CMakeLists.txt b/perception/mil_vision/CMakeLists.txt index d5e3fc31..a5508733 100644 --- a/perception/mil_vision/CMakeLists.txt +++ b/perception/mil_vision/CMakeLists.txt @@ -74,7 +74,6 @@ set_target_properties(mil_sparsestereo PROPERTIES COMPILE_FLAGS "-O3 -DNDEBUG -f add_library( mil_vision_lib - src/mil_vision_lib/visualization.cc src/mil_vision_lib/cv_utils.cc src/mil_vision_lib/image_filtering.cpp src/mil_vision_lib/active_contours.cpp diff --git a/perception/mil_vision/include/mil_vision_lib/visualization.hpp b/perception/mil_vision/include/mil_vision_lib/visualization.hpp deleted file mode 100644 index 00b92f56..00000000 --- a/perception/mil_vision/include/mil_vision_lib/visualization.hpp +++ /dev/null @@ -1,24 +0,0 @@ -#pragma once - -// #include "geometry.hpp" -// #include "segmentation.hpp" -// #include "typedefs.hpp" -#include -#include -#include "ros/ros.h" -#include "geometry_msgs/Pose.h" - -namespace mil_vision { - -// ******* 3D Visualization ******* -class RvizVisualizer { - public: - ros::Publisher rviz_pub; - ros::NodeHandle nh; - RvizVisualizer(std::string rviz_topic); - void visualize_buoy(geometry_msgs::Pose &pose, std::string &frame_id); - void visualize_torpedo_board(geometry_msgs::Pose& pose, Eigen::Quaterniond orientation, - std::vector& targets, std::vector& corners3d, - std::string& frame_id); -}; -} diff --git a/perception/mil_vision/src/mil_vision_lib/visualization.cc b/perception/mil_vision/src/mil_vision_lib/visualization.cc deleted file mode 100644 index 6f4ed049..00000000 --- a/perception/mil_vision/src/mil_vision_lib/visualization.cc +++ /dev/null @@ -1,151 +0,0 @@ -#include -#include - -namespace mil_vision { - -RvizVisualizer::RvizVisualizer(std::string rviz_topic) { - rviz_pub = nh.advertise(rviz_topic, 1); - ros::spinOnce(); - // Give these guys some time to get ready - ros::Duration(0.5).sleep(); -} - -void RvizVisualizer::visualize_buoy(geometry_msgs::Pose& pose, std::string& frame_id) { - visualization_msgs::Marker marker; - - // Generate sphere marker for the buoy - marker.header.frame_id = frame_id; - marker.header.stamp = ros::Time(); - marker.ns = "sub"; - marker.id = 0; - marker.type = visualization_msgs::Marker::SPHERE; - marker.action = visualization_msgs::Marker::ADD; - marker.pose = pose; - marker.scale.x = 0.2; - marker.scale.y = 0.2; - marker.scale.z = 0.2; - marker.color.a = 1.0; - marker.color.r = 0.0; - marker.color.g = 1.0; - marker.color.b = 0.0; - marker.lifetime = ros::Duration(); - rviz_pub.publish(marker); - - // Generate text to overlay on the buoy (TODO: Put the text offset from the buoy) - marker.header.stamp = ros::Time(); - marker.ns = "sub"; - marker.id = 1; - marker.type = visualization_msgs::Marker::TEXT_VIEW_FACING; - marker.action = visualization_msgs::Marker::ADD; - marker.pose.position.x = pose.position.x; - // y is down in computer vision world - marker.pose.position.y = pose.position.y - 0.25; - marker.pose.position.z = pose.position.z; - // Orientation doesn't matter for this guy - marker.scale.x = 0.2; - marker.scale.y = 0.2; - marker.scale.z = 0.2; - - marker.color.a = 1.0; - marker.color.r = 1.0; - marker.color.g = 0.0; - marker.color.b = 0.0; - marker.lifetime = ros::Duration(); - marker.text = "Buoy Bump Target"; - rviz_pub.publish(marker); -} - -void RvizVisualizer::visualize_torpedo_board(geometry_msgs::Pose& pose, Eigen::Quaterniond orientation, - std::vector& targets, std::vector& corners3d, - std::string& frame_id) { - visualization_msgs::Marker centroid, target_markers, text, borders; - centroid.header.frame_id = target_markers.header.frame_id = text.header.frame_id = borders.header.frame_id = frame_id; - centroid.header.stamp = target_markers.header.stamp = text.header.stamp = borders.header.stamp = ros::Time::now(); - centroid.ns = target_markers.ns = text.ns = borders.ns = "torpedo_board"; - centroid.lifetime = target_markers.lifetime = text.lifetime = borders.lifetime = ros::Duration(); - - // Generate sphere marker for the torpedo_board centroid - centroid.id = 0; - centroid.type = visualization_msgs::Marker::SPHERE; - centroid.action = visualization_msgs::Marker::ADD; - centroid.pose = pose; - centroid.scale.x = 0.1; - centroid.scale.y = 0.1; - centroid.scale.z = 0.1; - centroid.color.a = 1.0; - centroid.color.r = 0.0; - centroid.color.g = 1.0; - centroid.color.b = 0.0; - // centroid.pose.orientation.x = orientation.x(); - // centroid.pose.orientation.y = orientation.y(); - // centroid.pose.orientation.z = orientation.z(); - // centroid.pose.orientation.w = orientation.w(); - rviz_pub.publish(centroid); - - // Generate markers denoting the centers of individual targets - target_markers.id = 1; - target_markers.type = visualization_msgs::Marker::SPHERE_LIST; - target_markers.action = visualization_msgs::Marker::ADD; - geometry_msgs::Point p; - for(size_t i = 0; i < targets.size(); i++){ - p.x = targets[i](0); - p.y = targets[i](1); - p.z = targets[i](2); - target_markers.points.push_back(p); - } - target_markers.scale.x = 0.1; - target_markers.scale.y = 0.1; - target_markers.scale.z = 0.1; - target_markers.color.a = 1.0; - target_markers.color.r = 0.0; - target_markers.color.g = 1.0; - target_markers.color.b = 0.0; - // rviz_pub.publish(target_markers); - - // Generate text to overlay on the torpedo_board centroid - text.id = 2; - text.type = visualization_msgs::Marker::TEXT_VIEW_FACING; - text.action = visualization_msgs::Marker::ADD; - text.pose.position.x = pose.position.x; - text.pose.position.y = pose.position.y - 0.25; - text.pose.position.z = pose.position.z; - // Orientation doesn't matter for this guy - text.scale.x = 0.1; - text.scale.y = 0.1; - text.scale.z = 0.1; - text.color.a = 1.0; - text.color.r = 1.0; - text.color.g = 1.0; - text.color.b = 0.0; - text.text = "Torpedo Board Centroid"; - rviz_pub.publish(text); - - // Generate border line markers - borders.id = 3; - borders.type = visualization_msgs::Marker::LINE_STRIP; - borders.action = visualization_msgs::Marker::ADD; - borders.pose.orientation.w = 1.0; - for(size_t i = 0; i <= corners3d.size(); i++){ - if(i == corners3d.size()){ - p.x = corners3d[0](0); - p.y = corners3d[0](1); - p.z = corners3d[0](2); - } - else{ - p.x = corners3d[i](0); - p.y = corners3d[i](1); - p.z = corners3d[i](2); - } - borders.points.push_back(p); - } - borders.scale.x = 0.05; - borders.scale.y = 0.05; - borders.scale.z = 0.05; - borders.color.a = 1.0; - borders.color.r = 1.0; - borders.color.g = 1.0; - borders.color.b = 0.0; - rviz_pub.publish(borders); -} - -} // End namespace "sub" From 711eb4a7f9c06db6f0d9457523bde60dd9a4d19e Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Tue, 11 Apr 2017 00:28:23 -0400 Subject: [PATCH 266/267] DEPENDENCIES: update odometry_utils submodule --- odometry_tools | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/odometry_tools b/odometry_tools index b23dc5d7..299eb06d 160000 --- a/odometry_tools +++ b/odometry_tools @@ -1 +1 @@ -Subproject commit b23dc5d74b941dd36159fce030479d9772c58113 +Subproject commit 299eb06d1c538085ac2cb86a2ff2ba070042fed0 From 0c8a90510ccac4e55f18d85635f281fde12f6b0c Mon Sep 17 00:00:00 2001 From: Kevin Allen Date: Tue, 11 Apr 2017 00:54:29 -0400 Subject: [PATCH 267/267] REPO: replace navigator dependencies with mil_common --- .../mil_vision/object_classification/lidar_to_image.py | 7 +++++-- .../object_classification/object_classification.py | 2 +- .../mil_vision/object_classification/roi_generator.py | 2 +- perception/mil_vision/package.xml | 2 -- perception/mil_vision/ros_tools/easy_thresh.py | 3 ++- perception/mil_vision/ros_tools/on_the_fly_thresholder.py | 2 +- utils/mil_tools/nodes/clicked_point_recorder.py | 2 +- utils/mil_tools/nodes/rviz_waypoint.py | 1 - 8 files changed, 11 insertions(+), 10 deletions(-) diff --git a/perception/mil_vision/object_classification/lidar_to_image.py b/perception/mil_vision/object_classification/lidar_to_image.py index b67b5357..8b79bff5 100644 --- a/perception/mil_vision/object_classification/lidar_to_image.py +++ b/perception/mil_vision/object_classification/lidar_to_image.py @@ -12,12 +12,15 @@ from sensor_msgs.msg import Image, CameraInfo import genpy import cv2 -import navigator_tools as nt +from mil_ros_tools import odometry_to_numpy from navigator_msgs.srv import ObjectDBQuery, ObjectDBQueryRequest from image_geometry import PinholeCameraModel import numpy as np ___author___ = "Tess Bianchi" +''' +Needs to be refactored to be generic and non depend on navigator +''' class LidarToImage(object): @@ -45,7 +48,7 @@ def init_(self, cam): yield self.nh.subscribe(image_sub, Image, self._img_cb) self._database = yield self.nh.get_service_client('/database/requests', ObjectDBQuery) self._odom_sub = yield self.nh.subscribe('/odom', Odometry, - lambda odom: setattr(self, 'pose', nt.odometry_to_numpy(odom)[0])) + lambda odom: setattr(self, 'pose', odometry_to_numpy(odom)[0])) self.cam_info_sub = yield self.nh.subscribe(cam_info, CameraInfo, self._info_cb) self.tf_listener = tf.TransformListener(self.nh) defer.returnValue(self) diff --git a/perception/mil_vision/object_classification/object_classification.py b/perception/mil_vision/object_classification/object_classification.py index 679cf78e..0b16b603 100644 --- a/perception/mil_vision/object_classification/object_classification.py +++ b/perception/mil_vision/object_classification/object_classification.py @@ -1,6 +1,6 @@ from roi_generator import ROI_Collection -from navigator_tools import CvDebug, BagCrawler +from mil_ros_tools import CvDebug, BagCrawler import numpy as np from HOG_descriptor import HOGDescriptor from SVM_classifier import SVMClassifier diff --git a/perception/mil_vision/object_classification/roi_generator.py b/perception/mil_vision/object_classification/roi_generator.py index cc726829..3980a7a1 100644 --- a/perception/mil_vision/object_classification/roi_generator.py +++ b/perception/mil_vision/object_classification/roi_generator.py @@ -1,5 +1,5 @@ #!/usr/bin/python -from navigator_tools import BagCrawler +from mil_ros_tools import BagCrawler import argparse from cv_bridge import CvBridge import cv2 diff --git a/perception/mil_vision/package.xml b/perception/mil_vision/package.xml index 01ac5999..708fcf4d 100644 --- a/perception/mil_vision/package.xml +++ b/perception/mil_vision/package.xml @@ -33,8 +33,6 @@ tf2_sensor_msgs tf2_sensor_msgs tf2_geometry_msgs - navigator_tools - navigator_tools pcl_ros pcl_ros diff --git a/perception/mil_vision/ros_tools/easy_thresh.py b/perception/mil_vision/ros_tools/easy_thresh.py index 5fb36d6e..584143f8 100755 --- a/perception/mil_vision/ros_tools/easy_thresh.py +++ b/perception/mil_vision/ros_tools/easy_thresh.py @@ -28,8 +28,9 @@ prefix = 'hsv' if args.hsv else 'bgr' # Importing these late so that argcomplete can run quickly +# NEEDS TO NOT BE DEPENDENT ON sub8 SOON. MOVE THESE FUNCTIONS TO MIL_VISION from sub8_vision_tools import visual_threshold_tools # noqa -from sub8_ros_tools.image_helpers import Image_Subscriber # noqa +from mil_ros_tools.image_helpers import Image_Subscriber # noqa import cv2 # noqa import numpy as np # noqa import os # noqa diff --git a/perception/mil_vision/ros_tools/on_the_fly_thresholder.py b/perception/mil_vision/ros_tools/on_the_fly_thresholder.py index ebad11fe..5a4b0119 100755 --- a/perception/mil_vision/ros_tools/on_the_fly_thresholder.py +++ b/perception/mil_vision/ros_tools/on_the_fly_thresholder.py @@ -4,7 +4,7 @@ from sensor_msgs.msg import Image, CompressedImage from cv_bridge import CvBridge, CvBridgeError -from navigator_tools import image_helpers +from mil_ros_tools import image_helpers import numpy as np import argcomplete import sys diff --git a/utils/mil_tools/nodes/clicked_point_recorder.py b/utils/mil_tools/nodes/clicked_point_recorder.py index fc01a03e..51814ece 100755 --- a/utils/mil_tools/nodes/clicked_point_recorder.py +++ b/utils/mil_tools/nodes/clicked_point_recorder.py @@ -3,7 +3,7 @@ """ Listens to RVIZ clicked points, storing points in a csv file -Usage: rosrun navigator_tools clicked_point_recorder.py +Usage: rosrun mil_tools clicked_point_recorder.py """ import rospy diff --git a/utils/mil_tools/nodes/rviz_waypoint.py b/utils/mil_tools/nodes/rviz_waypoint.py index a38d5035..e70817d7 100755 --- a/utils/mil_tools/nodes/rviz_waypoint.py +++ b/utils/mil_tools/nodes/rviz_waypoint.py @@ -3,7 +3,6 @@ import txros import numpy as np -import navigator_tools from twisted.internet import defer from geometry_msgs.msg import PoseStamped, PointStamped