Skip to content

Commit

Permalink
feat: add list_pointcloud_topics function to retrieve PointCloud2 top…
Browse files Browse the repository at this point in the history
…ics; update CHANGELOG and tests
  • Loading branch information
tgoelles committed Dec 3, 2024
1 parent 2f6fc69 commit 851f1e9
Show file tree
Hide file tree
Showing 6 changed files with 87 additions and 62 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,10 @@ Unreleased

Added
~~~~~~
- list_pointcloud_topics at the package top level to list pointcloud2 topics
- dask[dataframe] as a dependency for the CLI
- livox test data


0.10.0 - (2024-12-03)
-------------
Expand Down
67 changes: 38 additions & 29 deletions doc/sphinx/source/tutorial_notebooks/usage.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,13 @@
"outputs": [],
"source": [
"import pointcloudset as pcs\n",
"\n",
"print(f\"package version: {pcs.__version__}\")\n",
"from pathlib import Path\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import plotly.express as px\n",
"\n",
"plt.rcParams[\"figure.figsize\"] = (20,10)\n",
"plt.rcParams[\"figure.figsize\"] = (20, 10)\n",
"\n",
"import plotly.express as px\n",
"print(f\"package version: {pcs.__version__}\")\n",
"\n",
"%load_ext autoreload\n",
"%autoreload 2\n",
Expand All @@ -37,14 +35,23 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"Ignore the INFO messages, which comes from the rospy package.\n"
"## Reading a ROS file into the Dataset"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"testbag = Path().cwd().parent.joinpath(\"../../../tests/testdata/test.bag\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Reading a ROS file into the Dataset"
"Lets check which pointcloud messages are there:"
]
},
{
Expand All @@ -53,7 +60,8 @@
"metadata": {},
"outputs": [],
"source": [
"testbag = Path().cwd().parent.joinpath(\"../../../tests/testdata/test.bag\")"
"topics = pcs.list_pointcloud_topics(testbag)\n",
"topics"
]
},
{
Expand All @@ -62,7 +70,7 @@
"metadata": {},
"outputs": [],
"source": [
"testset = pcs.Dataset.from_file(testbag,topic=\"/os1_cloud_node/points\",keep_zeros=False)"
"testset = pcs.Dataset.from_file(testbag, topic=topics[0], keep_zeros=False)"
]
},
{
Expand Down Expand Up @@ -190,7 +198,7 @@
"metadata": {},
"outputs": [],
"source": [
"px.line(min_pointcloud,x=\"timestamp\", y=\"x min\")"
"px.line(min_pointcloud, x=\"timestamp\", y=\"x min\")"
]
},
{
Expand Down Expand Up @@ -232,7 +240,7 @@
"metadata": {},
"outputs": [],
"source": [
"testset.agg(\"min\",\"dataset\")"
"testset.agg(\"min\", \"dataset\")"
]
},
{
Expand All @@ -241,7 +249,7 @@
"metadata": {},
"outputs": [],
"source": [
"testset.agg([\"min\",\"max\",\"mean\"],\"point\")"
"testset.agg([\"min\", \"max\", \"mean\"], \"point\")"
]
},
{
Expand All @@ -250,7 +258,7 @@
"metadata": {},
"outputs": [],
"source": [
"testset.agg({\"x\":[\"max\",\"min\"]},\"point\")"
"testset.agg({\"x\": [\"max\", \"min\"]}, \"point\")"
]
},
{
Expand All @@ -259,7 +267,7 @@
"metadata": {},
"outputs": [],
"source": [
"testset.agg({\"x\":\"max\"},\"point\")"
"testset.agg({\"x\": \"max\"}, \"point\")"
]
},
{
Expand Down Expand Up @@ -449,7 +457,9 @@
"metadata": {},
"outputs": [],
"source": [
"newpointcloud = testpointcloud.limit(\"x\",-5,5).limit(\"intensity\",400,1000).filter(\"quantile\",\"reflectivity\", \">\",0.5)"
"newpointcloud = (\n",
" testpointcloud.limit(\"x\", -5, 5).limit(\"intensity\", 400, 1000).filter(\"quantile\", \"reflectivity\", \">\", 0.5)\n",
")"
]
},
{
Expand Down Expand Up @@ -481,7 +491,7 @@
"metadata": {},
"outputs": [],
"source": [
"newpointcloud.plot(\"intensity\",hover_data=[\"range\"])"
"newpointcloud.plot(\"intensity\", hover_data=[\"range\"])"
]
},
{
Expand All @@ -500,7 +510,9 @@
},
"outputs": [],
"source": [
"plane = newpointcloud.plane_segmentation(distance_threshold= 0.01,ransac_n= 3,num_iterations= 50, return_plane_model=True)\n",
"plane = newpointcloud.plane_segmentation(\n",
" distance_threshold=0.01, ransac_n=3, num_iterations=50, return_plane_model=True\n",
")\n",
"print(len(plane))"
]
},
Expand Down Expand Up @@ -530,9 +542,9 @@
},
"outputs": [],
"source": [
"clusters = newpointcloud.get_cluster(eps=0.5, min_points= 10)\n",
"cluster1 = newpointcloud.take_cluster(1,clusters)\n",
"cluster2 = newpointcloud.take_cluster(2,clusters)\n",
"clusters = newpointcloud.get_cluster(eps=0.5, min_points=10)\n",
"cluster1 = newpointcloud.take_cluster(1, clusters)\n",
"cluster2 = newpointcloud.take_cluster(2, clusters)\n",
"print(len(cluster1))\n",
"print(len(cluster2))"
]
Expand All @@ -552,7 +564,7 @@
"metadata": {},
"outputs": [],
"source": [
"newpointcloud.plot(color=None, overlay={\"Cluster 1\": cluster1,\"Cluster 2\": cluster2}, hover_data=[\"intensity\"])"
"newpointcloud.plot(color=None, overlay={\"Cluster 1\": cluster1, \"Cluster 2\": cluster2}, hover_data=[\"intensity\"])"
]
},
{
Expand All @@ -570,7 +582,7 @@
"outputs": [],
"source": [
"def isolate_target(frame: pcs.PointCloud) -> pcs.PointCloud:\n",
" return frame.limit(\"x\",0,1).limit(\"y\",0,1)"
" return frame.limit(\"x\", 0, 1).limit(\"y\", 0, 1)"
]
},
{
Expand Down Expand Up @@ -644,7 +656,7 @@
"metadata": {},
"outputs": [],
"source": [
"result.agg({\"x difference\":\"max\"},\"pointcloud\")"
"result.agg({\"x difference\": \"max\"}, \"pointcloud\")"
]
},
{
Expand All @@ -656,13 +668,10 @@
}
],
"metadata": {
"interpreter": {
"hash": "5d20601a78b69ce1e830e7a32b8a0e9b424dfcbdd55b4b97e0f1a14444bda345"
},
"kernelspec": {
"display_name": "base",
"display_name": "Python 3",
"language": "python",
"name": "base"
"name": "python3"
},
"language_info": {
"codemirror_mode": {
Expand All @@ -674,7 +683,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.9"
"version": "3.11.10"
}
},
"nbformat": 4,
Expand Down
7 changes: 7 additions & 0 deletions pointcloudset/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,11 @@
from .dataset import Dataset
from .io.dataset.topics import list_pointcloud_topics
from .pointcloud import PointCloud

__version__ = "0.10.0"

__all__ = [
"Dataset",
"PointCloud",
"list_pointcloud_topics",
]
11 changes: 11 additions & 0 deletions pointcloudset/io/dataset/topics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from pathlib import Path

from rosbags.highlevel import AnyReader


def list_pointcloud_topics(bagfile: Path) -> list[str]:
"""Returns a list of all pointcloud topics in a ROS1 bagfile or a directory containing ROS2 mcap file."""
with AnyReader([bagfile]) as reader:
all_topics = reader.topics

return [k for k, v in all_topics.items() if v.msgtype == "sensor_msgs/msg/PointCloud2"]
27 changes: 12 additions & 15 deletions tests/cli/test_rosbagconvert.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
from pathlib import Path
from pydoc_data import topics

import pytest
import pytest_check as check
from pyntcloud.io import TO_FILE
from typer.testing import CliRunner

from pointcloudset import Dataset
from pointcloudset import Dataset, list_pointcloud_topics
from pointcloudset.io.dataset.commandline import app

TO_FILE_PYNTCLOUD = list(TO_FILE.keys())
Expand All @@ -32,14 +33,15 @@ def test_topics(ros_files):


def test_convert_one_rosfile_to_dir(ros_files, tmp_path: Path):
topics = list_pointcloud_topics(ros_files)
out_path = tmp_path.joinpath("cli")
result = runner.invoke(
app,
[
"convert",
ros_files.as_posix(),
"-t",
"/os1_cloud_node/points",
topics[0],
"-d",
out_path.as_posix(),
],
Expand All @@ -49,13 +51,11 @@ def test_convert_one_rosfile_to_dir(ros_files, tmp_path: Path):
check.equal(out_path_real.exists(), True)
read_dataset = Dataset.from_file(out_path_real)
check.is_instance(read_dataset, Dataset)
check.equal(len(read_dataset), 2)
check.equal(len(read_dataset.timestamps), 2)
check.greater(len(read_dataset), 1)
check.greater(len(read_dataset.timestamps), 1)


def test_convert_all_rosfiles_to_dir(
tmp_path: Path, testdata_path: Path, monkeypatch: pytest.MonkeyPatch
):
def test_convert_all_rosfiles_to_dir(tmp_path: Path, testdata_path: Path, monkeypatch: pytest.MonkeyPatch):
monkeypatch.chdir(testdata_path)
out_path = tmp_path.joinpath("cli_dirs")
result = runner.invoke(
Expand Down Expand Up @@ -85,14 +85,15 @@ def test_convert_all_rosfiles_to_dir(

@pytest.mark.parametrize("fileformat", TO_FILE_PYNTCLOUD)
def test_convert_one_ros_file_frames_to_files(ros_files, tmp_path: Path, fileformat):
topics = list_pointcloud_topics(ros_files)
out_path = tmp_path.joinpath("cli_files")
result = runner.invoke(
app,
[
"convert",
ros_files.as_posix(),
"-t",
"/os1_cloud_node/points",
topics[0],
"-d",
out_path.as_posix(),
"-o",
Expand All @@ -103,7 +104,7 @@ def test_convert_one_ros_file_frames_to_files(ros_files, tmp_path: Path, filefor
out_path_real = out_path.joinpath(ros_files.stem)
check.equal(out_path_real.exists(), True)
files = list(out_path_real.glob(f"*.{fileformat.lower()}"))
check.equal(len(files), 2)
check.greater(len(files), 1)
check.equal(files[0].suffix.replace(".", ""), fileformat.lower())


Expand Down Expand Up @@ -135,9 +136,7 @@ def test_convert_one_ros_file_one_frame_to_files(ros_files, tmp_path: Path, file
check.equal(files[0].suffix.replace(".", ""), fileformat.lower())


def test_convert_all_bags_frames_files(
tmp_path: Path, testdata_path: Path, monkeypatch: pytest.MonkeyPatch
):
def test_convert_all_bags_frames_files(tmp_path: Path, testdata_path: Path, monkeypatch: pytest.MonkeyPatch):
monkeypatch.chdir(testdata_path)
out_path = tmp_path.joinpath("cli_dirs_frames")
result = runner.invoke(
Expand All @@ -164,9 +163,7 @@ def test_convert_all_bags_frames_files(

@pytest.mark.slow
@pytest.mark.parametrize("filename", ["big_uncomp.bag", "big_comp.bag"])
def test_convert_large_file_complete(
testdata_path_large: Path, tmp_path: Path, filename: str
):
def test_convert_large_file_complete(testdata_path_large: Path, tmp_path: Path, filename: str):
if testdata_path_large.exists():
len_target = 250
testbag = testdata_path_large.joinpath(filename)
Expand Down
Loading

0 comments on commit 851f1e9

Please sign in to comment.