Skip to content

Commit

Permalink
add enable_tqdm in builder for progress bar visualization
Browse files Browse the repository at this point in the history
  • Loading branch information
chenchenplus committed Jan 3, 2025
1 parent 6bea729 commit c926007
Show file tree
Hide file tree
Showing 8 changed files with 129 additions and 43 deletions.
20 changes: 14 additions & 6 deletions mosstool/map/_map_util/aois/append_aois_matcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from shapely.geometry import (LineString, MultiLineString, MultiPoint,
MultiPolygon, Point, Polygon)
from shapely.strtree import STRtree
from tqdm import tqdm

from ....type import AoiType
from ..._util.angle import abs_delta_angle
Expand Down Expand Up @@ -1517,7 +1518,9 @@ def _add_pois(aois, pois):
return aois, out_pois


def _merge_covered_aoi(aois: list, workers: int, max_chunk_size: int):
def _merge_covered_aoi(
aois: list, workers: int, max_chunk_size: int, enable_tqdm: bool
):
"""
Blend the contained small poly aoi into the large poly aoi
At the same time, cut off the overlapping parts between aoi
Expand All @@ -1535,7 +1538,7 @@ def _merge_covered_aoi(aois: list, workers: int, max_chunk_size: int):
partial_find_aoi_parent_unit = partial(_find_aoi_parent_unit, (aois_to_merge,))
aois = [(i, a) for i, a in enumerate(aois)]
aois_result = []
for i in range(0, len(aois), MAX_BATCH_SIZE):
for i in tqdm(range(0, len(aois), MAX_BATCH_SIZE), disable=not enable_tqdm):
aois_batch = aois[i : i + MAX_BATCH_SIZE]
with Pool(processes=workers) as pool:
aois_result += pool.map(
Expand Down Expand Up @@ -1576,7 +1579,7 @@ def _merge_covered_aoi(aois: list, workers: int, max_chunk_size: int):
aois_result = []
partial_args = (aois_with_overlap,)
partial_find_aoi_overlap_unit = partial(_find_aoi_overlap_unit, partial_args)
for i in range(0, len(aois), MAX_BATCH_SIZE):
for i in tqdm(range(0, len(aois), MAX_BATCH_SIZE), disable=not enable_tqdm):
aois_batch = aois[i : i + MAX_BATCH_SIZE]
with Pool(processes=workers) as pool:
aois_result += pool.map(
Expand Down Expand Up @@ -1635,6 +1638,7 @@ def _add_aoi(
stop_dis_gate: float,
stop_huge_gate: float,
merge_aoi: bool = False,
enable_tqdm: bool = False,
) -> dict[int, dict]:
"""
aois matches the rightmost lane
Expand Down Expand Up @@ -1724,7 +1728,7 @@ def _add_aoi(
aois_poly.extend(aois_poi)

if merge_aoi:
aois_poly = _merge_covered_aoi(aois_poly, workers, max_chunk_size)
aois_poly = _merge_covered_aoi(aois_poly, workers, max_chunk_size, enable_tqdm)
# The convex hull may fail, check it
for a in aois_poly:
assert isinstance(a["geo"], Polygon)
Expand Down Expand Up @@ -1754,7 +1758,7 @@ def _add_aoi(
AOI_GATE_OFFSET,
)
partial_add_aoi_stop_unit = partial(_add_aoi_stop_unit, partial_args)
for i in range(0, len(args), MAX_BATCH_SIZE):
for i in tqdm(range(0, len(args), MAX_BATCH_SIZE), disable=not enable_tqdm):
args_batch = args[i : i + MAX_BATCH_SIZE]
with Pool(processes=workers) as pool:
results_stop += pool.map(
Expand All @@ -1777,7 +1781,7 @@ def _add_aoi(
W_HUGE_GATE,
)
partial_add_poly_aoi_unit = partial(_add_poly_aoi_unit, partial_args)
for i in range(0, len(args), MAX_BATCH_SIZE):
for i in tqdm(range(0, len(args), MAX_BATCH_SIZE), disable=not enable_tqdm):
args_batch = args[i : i + MAX_BATCH_SIZE]
with Pool(processes=workers) as pool:
results_poly += pool.map(
Expand All @@ -1804,6 +1808,7 @@ def add_aoi_to_map(
input_stops: list,
bbox: tuple[float, float, float, float],
merge_aoi: bool,
enable_tqdm: bool,
dis_gate: float = 30.0,
station_dis_gate: float = 30.0,
station_huge_gate: float = 50.0,
Expand All @@ -1828,6 +1833,7 @@ def add_aoi_to_map(
stop_huge_gate=station_huge_gate,
max_chunk_size=multiprocessing_chunk_size,
merge_aoi=merge_aoi,
enable_tqdm=enable_tqdm,
)
added_input_poi = []
for _, aoi in aois.items():
Expand All @@ -1848,6 +1854,7 @@ def add_sumo_aoi_to_map(
input_pois: list,
input_stops: list,
merge_aoi: bool,
enable_tqdm: bool,
dis_gate: float = 30.0,
station_dis_gate: float = 30.0,
station_huge_gate: float = 50.0,
Expand Down Expand Up @@ -1878,6 +1885,7 @@ def add_sumo_aoi_to_map(
stop_huge_gate=station_huge_gate,
max_chunk_size=multiprocessing_chunk_size,
merge_aoi=merge_aoi,
enable_tqdm=enable_tqdm,
)
added_ex_pois = []
for _, aoi in aois.items():
Expand Down
16 changes: 12 additions & 4 deletions mosstool/map/_map_util/aois/match_aoi_pop.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import pyproj
import rasterio
from shapely.geometry import MultiPolygon, Point, Polygon
from tqdm import tqdm

from ..const import *
from .utils import geo_coords
Expand Down Expand Up @@ -280,6 +281,7 @@ def _get_aoi_pop(
y_step: float,
xy_gps_scale2: float,
pixel_area: float,
enable_tqdm: bool,
workers: int,
max_chunk_size: int,
):
Expand All @@ -293,7 +295,7 @@ def _get_aoi_pop(
pixel_area,
)
partial_get_aoi_point_pop_unit = partial(_get_aoi_point_pop_unit, partial_args)
for i in range(0, len(aois_point), MAX_BATCH_SIZE):
for i in tqdm(range(0, len(aois_point), MAX_BATCH_SIZE), disable=not enable_tqdm):
aois_point_batch = aois_point[i : i + MAX_BATCH_SIZE]
with Pool(processes=workers) as pool:
aois_point_result += pool.map(
Expand All @@ -312,7 +314,7 @@ def _get_aoi_pop(
pixel_area,
)
partial_get_aoi_poly_pop_unit = partial(_get_aoi_poly_pop_unit, partial_args)
for i in range(0, len(aois_poly), MAX_BATCH_SIZE):
for i in tqdm(range(0, len(aois_poly), MAX_BATCH_SIZE), disable=not enable_tqdm):
aois_poly_batch = aois_poly[i : i + MAX_BATCH_SIZE]
with Pool(processes=workers) as pool:
aois_poly_result_with_flag += pool.map(
Expand Down Expand Up @@ -351,6 +353,7 @@ def add_aoi_pop(
max_latitude: float,
min_latitude: float,
proj_str: str,
enable_tqdm: bool = False,
multiprocessing_chunk_size: int = 500,
upsample_factor: int = 4,
workers: int = 32,
Expand Down Expand Up @@ -446,7 +449,9 @@ def add_aoi_pop(
results = []
partial_args = (aois_poly_global, n_upsample, x_step, y_step)
partial_upsample_pixels_unit = partial(_upsample_pixels_unit, partial_args)
for i in range(0, len(list_pixel2pop), MAX_BATCH_SIZE):
for i in tqdm(
range(0, len(list_pixel2pop), MAX_BATCH_SIZE), disable=not enable_tqdm
):
list_pixel2pop_batch = list_pixel2pop[i : i + MAX_BATCH_SIZE]
with Pool(processes=workers) as pool:
results += pool.map(
Expand Down Expand Up @@ -474,7 +479,9 @@ def add_aoi_pop(
partial_upsample_pixels_idiot_unit = partial(
_upsample_pixels_idiot_unit, partial_args
)
for i in range(0, len(list_pixel2pop), MAX_BATCH_SIZE):
for i in tqdm(
range(0, len(list_pixel2pop), MAX_BATCH_SIZE), disable=not enable_tqdm
):
list_pixel2pop_batch = list_pixel2pop[i : i + MAX_BATCH_SIZE]
with Pool(processes=workers) as pool:
results += pool.map(
Expand All @@ -499,6 +506,7 @@ def add_aoi_pop(
y_step=y_step,
xy_gps_scale2=xy_gps_scale2,
pixel_area=pixel_area,
enable_tqdm=enable_tqdm,
workers=workers,
max_chunk_size=multiprocessing_chunk_size,
)
Expand Down
42 changes: 37 additions & 5 deletions mosstool/map/_map_util/aois/reuse_aois_matchers.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

from shapely.geometry import Polygon
from shapely.strtree import STRtree
from tqdm import tqdm

from ....type import Map
from ....util.format_converter import pb2dict
Expand All @@ -22,8 +23,29 @@ def _map_aoi2geo(aoi: dict) -> Polygon:
return Polygon(coords)


def _add_aoi_unit(partial_args:tuple[list[dict[str,Any]],list[dict[str,Any]],float,float,float,float,STRtree,STRtree],aoi:dict[str,Any]):
d_matcher, w_matcher, D_DIS_GATE, D_HUGE_GATE,W_DIS_GATE, W_HUGE_GATE, d_tree, w_tree=partial_args
def _add_aoi_unit(
partial_args: tuple[
list[dict[str, Any]],
list[dict[str, Any]],
float,
float,
float,
float,
STRtree,
STRtree,
],
aoi: dict[str, Any],
):
(
d_matcher,
w_matcher,
D_DIS_GATE,
D_HUGE_GATE,
W_DIS_GATE,
W_HUGE_GATE,
d_tree,
w_tree,
) = partial_args
geo = _map_aoi2geo(aoi)
# d_matched = _matcher_unit(geo, d_matcher, D_DIS_GATE, D_HUGE_GATE)
# w_matched = _matcher_unit(geo, w_matcher, W_DIS_GATE, W_HUGE_GATE)
Expand All @@ -40,6 +62,7 @@ def match_map_aois(
net: Map,
matchers: dict,
workers: int,
enable_tqdm: bool,
dis_gate: float = 30.0,
multiprocessing_chunk_size: int = 500,
):
Expand All @@ -56,9 +79,18 @@ def match_map_aois(
d_tree = STRtree([l["geo"] for l in d_matcher])
w_tree = STRtree([l["geo"] for l in w_matcher])
results_aois = []
partial_args = (d_matcher, w_matcher, D_DIS_GATE, D_HUGE_GATE,W_DIS_GATE, W_HUGE_GATE, d_tree, w_tree)
partial_add_aoi_unit = partial(_add_aoi_unit,partial_args)
for i in range(0, len(orig_aois), MAX_BATCH_SIZE):
partial_args = (
d_matcher,
w_matcher,
D_DIS_GATE,
D_HUGE_GATE,
W_DIS_GATE,
W_HUGE_GATE,
d_tree,
w_tree,
)
partial_add_aoi_unit = partial(_add_aoi_unit, partial_args)
for i in tqdm(range(0, len(orig_aois), MAX_BATCH_SIZE), disable=not enable_tqdm):
args_batch = orig_aois[i : i + MAX_BATCH_SIZE]
with Pool(processes=workers) as pool:
results_aois += pool.map(
Expand Down
18 changes: 15 additions & 3 deletions mosstool/map/_map_util/aois/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import shapely.ops as ops
from shapely.affinity import scale
from shapely.geometry import MultiPoint, MultiPolygon, Point, Polygon
from tqdm import tqdm

from ..const import *

Expand Down Expand Up @@ -355,6 +356,7 @@ def point_extend(center: Point, length: float):
def _match_poi_to_aoi(
aois: list[dict[str, Any]],
pois: list[dict[str, Any]],
enable_tqdm: bool,
workers: int,
max_chunk_size: int,
):
Expand All @@ -368,7 +370,7 @@ def _match_poi_to_aoi(
results = []
aois_to_match_poi = aois
partial_match_poi_unit = partial(_match_poi_unit, (aois_to_match_poi,))
for i in range(0, len(pois), MAX_BATCH_SIZE):
for i in tqdm(range(0, len(pois), MAX_BATCH_SIZE), disable=not enable_tqdm):
pois_batch = pois[i : i + MAX_BATCH_SIZE]
with Pool(processes=workers) as pool:
results += pool.map(
Expand Down Expand Up @@ -428,6 +430,7 @@ def generate_aoi_poi(
input_aois,
input_pois,
input_stops,
enable_tqdm: bool,
workers: int = 32,
multiprocessing_chunk_size: int = 500,
):
Expand All @@ -441,7 +444,11 @@ def generate_aoi_poi(
)
# Process and join poi: belong to aoi or become an independent aoi
aois_add_poi, pois_isolate, pois_output = _match_poi_to_aoi(
input_aois, input_pois, workers, multiprocessing_chunk_size
aois=input_aois,
pois=input_pois,
workers=workers,
enable_tqdm=enable_tqdm,
max_chunk_size=multiprocessing_chunk_size,
)
# Convert format to output
aois_output = _post_compute_aoi_poi(aois_add_poi, pois_isolate)
Expand All @@ -453,6 +460,7 @@ def generate_sumo_aoi_poi(
input_aois,
input_pois,
input_stops,
enable_tqdm: bool,
workers: int = 32,
merge_aoi: bool = False,
multiprocessing_chunk_size: int = 500,
Expand All @@ -463,7 +471,11 @@ def generate_sumo_aoi_poi(
input_aois, multiprocessing_chunk_size, merge_aoi, workers
)
aois_add_poi, pois_isolate, pois_output = _match_poi_to_aoi(
input_aois, input_pois, workers, multiprocessing_chunk_size
aois=input_aois,
pois=input_pois,
enable_tqdm=enable_tqdm,
workers=workers,
max_chunk_size=multiprocessing_chunk_size,
)
aois_output = _post_compute_aoi_poi(aois_add_poi, pois_isolate)
stops_output = _process_stops(input_stops)
Expand Down
Loading

0 comments on commit c926007

Please sign in to comment.