Skip to content

Commit

Permalink
Stabilize feature order in overzoom (#210)
Browse files Browse the repository at this point in the history
* Stabilize feature order in overzoom

* I want my sorts to be stable, please

* Revert "[ci] test in debug mode (#202)"

This reverts commit 853ada8.

* No need to reinitialize here
  • Loading branch information
e-n-f authored Feb 29, 2024
1 parent d9a5c2e commit 312e156
Show file tree
Hide file tree
Showing 14 changed files with 33 additions and 26 deletions.
7 changes: 2 additions & 5 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,7 @@ on: [push]
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
version: ['Release', 'Debug']
steps:
- uses: actions/checkout@v3
- run: uname -a; BUILDTYPE=${{ matrix.version }} make
- run: make test
- run: uname -a; make
- run: make test
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# 2.47.0

* Stabilize feature order in tippecanoe-overzoom when --preserve-feature-order is specified but the sequence attribute is not present

# 2.46.0

* Polygon dust returns to having the attributes of the contributing feature nearest the placeholder instead of the contributing feature with the largest area.
Expand Down
6 changes: 6 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -278,6 +278,12 @@ overzoom-test: tippecanoe-overzoom
./tippecanoe-decode tests/pbf/13-1310-3166.pbf 13 1310 3166 > tests/pbf/13-1310-3166.pbf.json.check
cmp tests/pbf/13-1310-3166.pbf.json.check tests/pbf/13-1310-3166.pbf.json
rm tests/pbf/13-1310-3166.pbf tests/pbf/13-1310-3166.pbf.json.check
# Make sure feature order is stable
./tippecanoe-overzoom --preserve-input-order -o tests/pbf/11-327-791-out.pbf tests/pbf/11-327-791.pbf 11/327/791 11/327/791
./tippecanoe-decode tests/pbf/11-327-791.pbf 11 327 791 > tests/pbf/11-327-791.json
./tippecanoe-decode tests/pbf/11-327-791-out.pbf 11 327 791 > tests/pbf/11-327-791-out.json
cmp tests/pbf/11-327-791.json tests/pbf/11-327-791-out.json
rm tests/pbf/11-327-791.json tests/pbf/11-327-791-out.json tests/pbf/11-327-791-out.pbf
# Different detail and buffer, and attribute stripping
./tippecanoe-overzoom -d8 -b30 -y NAME -y name -y scalerank -o tests/pbf/13-1310-3166-8-30.pbf tests/pbf/11-327-791.pbf 11/327/791 13/1310/3166
./tippecanoe-decode tests/pbf/13-1310-3166-8-30.pbf 13 1310 3166 > tests/pbf/13-1310-3166-8-30.pbf.json.check
Expand Down
2 changes: 1 addition & 1 deletion clip.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1026,7 +1026,7 @@ std::string overzoom(const mvt_tile &tile, int oz, int ox, int oy, int nz, int n
}

if (preserve_input_order) {
std::sort(outlayer.features.begin(), outlayer.features.end(), preservecmp);
std::stable_sort(outlayer.features.begin(), outlayer.features.end(), preservecmp);
}

if (outlayer.features.size() > 0) {
Expand Down
2 changes: 1 addition & 1 deletion dirtiles.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ std::vector<zxy> enumerate_dirtiles(const char *fname, int minzoom, int maxzoom)
closedir(d1);
}

std::sort(tiles.begin(), tiles.end());
std::stable_sort(tiles.begin(), tiles.end());
return tiles;
}

Expand Down
4 changes: 2 additions & 2 deletions geometry.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1189,7 +1189,7 @@ drawvec polygon_to_anchor(const drawvec &geom) {
std::vector<candidate> candidates;

for (size_t pass = 0; pass < 4; pass++) {
std::sort(points.begin(), points.end(), sorty_sorter(pass));
std::stable_sort(points.begin(), points.end(), sorty_sorter(pass));

for (size_t i = 1; i < points.size(); i++) {
double dx = points[i].x - points[i - 1].x;
Expand All @@ -1213,7 +1213,7 @@ drawvec polygon_to_anchor(const drawvec &geom) {
// segment, if we find one whose midpoint is inside the polygon and
// far enough from any edge to be good enough, stop looking.

std::sort(candidates.begin(), candidates.end());
std::stable_sort(candidates.begin(), candidates.end());
// only check the top 50 stride midpoints, since this list can be quite large
for (size_t i = 0; i < candidates.size() && i < 50; i++) {
double maybe_goodness = label_goodness(geom, candidates[i].x, candidates[i].y);
Expand Down
2 changes: 1 addition & 1 deletion main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2704,7 +2704,7 @@ std::pair<int, metadata> read_input(std::vector<source> &sources, char *fname, i
}
}

std::sort(ddv.begin(), ddv.end());
std::stable_sort(ddv.begin(), ddv.end());

size_t i = 0;
for (int z = 0; z <= basezoom; z++) {
Expand Down
2 changes: 1 addition & 1 deletion mvt.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,7 @@ std::string mvt_tile::encode() {
sorted_values.push_back(std::move(sv));
}

std::sort(sorted_values.begin(), sorted_values.end());
std::stable_sort(sorted_values.begin(), sorted_values.end());
std::vector<size_t> mapping;
mapping.resize(sorted_values.size());

Expand Down
4 changes: 2 additions & 2 deletions pmtiles_file.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ void mbtiles_map_image_to_pmtiles(char *fname, metadata m, bool tile_compression
sqlite3_finalize(stmt);
}

std::sort(tile_ids.begin(), tile_ids.end());
std::stable_sort(tile_ids.begin(), tile_ids.end());

std::unordered_map<std::string, std::pair<unsigned long long, unsigned long>> hash_to_offset_len;
std::vector<pmtiles::entryv3> entries;
Expand Down Expand Up @@ -263,7 +263,7 @@ void mbtiles_map_image_to_pmtiles(char *fname, metadata m, bool tile_compression

// finalize PMTiles archive.
{
std::sort(entries.begin(), entries.end(), pmtiles::entryv3_cmp());
std::stable_sort(entries.begin(), entries.end(), pmtiles::entryv3_cmp());

std::string root_bytes;
std::string leaves_bytes;
Expand Down
2 changes: 1 addition & 1 deletion serial.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -604,7 +604,7 @@ int serialize_feature(struct serialization_state *sst, serial_feature &sf, std::
locs.push_back(encode_index(SHIFT_LEFT(scaled_geometry[i].x), SHIFT_LEFT(scaled_geometry[i].y)));
}
}
std::sort(locs.begin(), locs.end());
std::stable_sort(locs.begin(), locs.end());
size_t n = 0;
double sum = 0;
for (size_t i = 1; i < locs.size(); i++) {
Expand Down
4 changes: 2 additions & 2 deletions shared_borders.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ bool find_common_edges(std::vector<serial_feature> &features, int z, int line_de
}
}

std::sort(edges.begin(), edges.end(), edgecmp_ring);
std::stable_sort(edges.begin(), edges.end(), edgecmp_ring);
std::set<draw> necessaries;

// Now mark all the points where the set of rings using the edge on one side
Expand Down Expand Up @@ -397,7 +397,7 @@ bool find_common_edges(std::vector<serial_feature> &features, int z, int line_de
}
}
}
std::sort(order.begin(), order.end());
std::stable_sort(order.begin(), order.end());

size_t merged = 0;
for (size_t o = 0; o < order.size(); o++) {
Expand Down
2 changes: 1 addition & 1 deletion tile-join.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -584,7 +584,7 @@ struct tileset_reader {
next_overzoomed_tiles.clear();
}

std::sort(overzoomed_tiles.begin(), overzoomed_tiles.end(), tilecmp);
std::stable_sort(overzoomed_tiles.begin(), overzoomed_tiles.end(), tilecmp);
overzoom_consumed_at_this_zoom = false;
}

Expand Down
16 changes: 8 additions & 8 deletions tile.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ static std::vector<serial_feature> disassemble_multiplier_clusters(std::vector<s

// sort the other features by their drop sequence, for consistency across zoom levels
if (cluster.size() > 1) {
std::sort(cluster.begin() + 1, cluster.end(), drop_sequence_cmp());
std::stable_sort(cluster.begin() + 1, cluster.end(), drop_sequence_cmp());
}

for (auto const &feature : cluster) {
Expand Down Expand Up @@ -814,7 +814,7 @@ static unsigned long long choose_mingap(std::vector<unsigned long long> const &i
// If there are no higher extents available, the tile has already been reduced as much as possible
// and tippecanoe will exit with an error.
static long long choose_minextent(std::vector<long long> &extents, double f, long long existing_extent) {
std::sort(extents.begin(), extents.end());
std::stable_sort(extents.begin(), extents.end());

size_t ix = (extents.size() - 1) * (1 - f);
while (ix + 1 < extents.size() && extents[ix] == existing_extent) {
Expand All @@ -829,7 +829,7 @@ static unsigned long long choose_mindrop_sequence(std::vector<unsigned long long
return ULLONG_MAX;
}

std::sort(drop_sequences.begin(), drop_sequences.end());
std::stable_sort(drop_sequences.begin(), drop_sequences.end());

size_t ix = (drop_sequences.size() - 1) * (1 - f);
while (ix + 1 < drop_sequences.size() && drop_sequences[ix] == existing_drop_sequence) {
Expand Down Expand Up @@ -2009,7 +2009,7 @@ long long write_tile(decompressor *geoms, std::atomic<long long> *geompos_in, ch
// Reorder and coalesce.
// Sort back into input order or by attribute value

std::sort(shared_nodes.begin(), shared_nodes.end());
std::stable_sort(shared_nodes.begin(), shared_nodes.end());

for (auto &kv : layers) {
std::string const &layername = kv.first;
Expand All @@ -2031,7 +2031,7 @@ long long write_tile(decompressor *geoms, std::atomic<long long> *geompos_in, ch
// these will be smaller numbers, and avoid the problem of the
// original sequence number varying based on how many reader threads
// there were reading the input
std::sort(feature_sequences.begin(), feature_sequences.end());
std::stable_sort(feature_sequences.begin(), feature_sequences.end());
for (size_t i = 0; i < feature_sequences.size(); i++) {
size_t j = feature_sequences[i].second;
serial_val sv(mvt_double, std::to_string(i));
Expand Down Expand Up @@ -2151,7 +2151,7 @@ long long write_tile(decompressor *geoms, std::atomic<long long> *geompos_in, ch
std::vector<serial_feature> &layer_features = features;

if (additional[A_REORDER]) {
std::sort(layer_features.begin(), layer_features.end(), coalindexcmp_comparator());
std::stable_sort(layer_features.begin(), layer_features.end(), coalindexcmp_comparator());
}

if (additional[A_COALESCE]) {
Expand Down Expand Up @@ -2215,13 +2215,13 @@ long long write_tile(decompressor *geoms, std::atomic<long long> *geompos_in, ch

if (prevent[P_INPUT_ORDER]) {
auto clustered = assemble_multiplier_clusters(layer_features);
std::sort(clustered.begin(), clustered.end(), preservecmp);
std::stable_sort(clustered.begin(), clustered.end(), preservecmp);
layer_features = disassemble_multiplier_clusters(clustered);
}

if (order_by.size() != 0) {
auto clustered = assemble_multiplier_clusters(layer_features);
std::sort(clustered.begin(), clustered.end(), ordercmp());
std::stable_sort(clustered.begin(), clustered.end(), ordercmp());
layer_features = disassemble_multiplier_clusters(clustered);
}

Expand Down
2 changes: 1 addition & 1 deletion version.hpp
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#ifndef VERSION_HPP
#define VERSION_HPP

#define VERSION "v2.46.0"
#define VERSION "v2.47.0"

#endif

0 comments on commit 312e156

Please sign in to comment.