Skip to content

Commit

Permalink
Merge branch 'master' into fix-audit-pq
Browse files Browse the repository at this point in the history
  • Loading branch information
eldenmoon authored Nov 1, 2024
2 parents 7319593 + e9e3327 commit cf9fdd1
Show file tree
Hide file tree
Showing 645 changed files with 26,030 additions and 8,231 deletions.
61 changes: 59 additions & 2 deletions .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,63 @@
## Proposed changes
### What problem does this PR solve?
<!--
You need to clearly describe your PR in this part:
1. What problem was fixed (it's best to include specific error reporting information). How it was fixed.
2. Which behaviors were modified. What was the previous behavior, what is it now, why was it modified, and what possible impacts might there be.
3. What features were added. Why this function was added.
4. Which codes were refactored and why this part of the code was refactored.
5. Which functions were optimized and what is the difference before and after the optimization.
The description of the PR needs to enable reviewers to quickly and clearly understand the logic of the code modification.
-->

<!--
If there are related issues, please fill in the issue number.
- If you want the issue to be closed after the PR is merged, please use "close #12345". Otherwise, use "ref #12345"
-->
Issue Number: close #xxx

<!--Describe your changes.-->
<!--
If this PR is followup a preivous PR, for example, fix the bug that introduced by a related PR,
link the PR here
-->
Related PR: #xxx

Problem Summary:

### Check List (For Committer)

- Test <!-- At least one of them must be included. -->

- [ ] Regression test
- [ ] Unit Test
- [ ] Manual test (add detailed scripts or steps below)
- [ ] No need to test or manual test. Explain why:
- [ ] This is a refactor/code format and no logic has been changed.
- [ ] Previous test can cover this change.
- [ ] No colde files have been changed.
- [ ] Other reason <!-- Add your reason? -->

- Behavior changed:

- [ ] No.
- [ ] Yes. <!-- Explain the behavior change -->

- Does this need documentation?

- [ ] No.
- [ ] Yes. <!-- Add document PR link here. eg: https://github.com/apache/doris-website/pull/1214 -->

- Release note

<!-- bugfix, feat, behavior changed need a release note -->
<!-- Add one line release note for this PR. -->
None

### Check List (For Reviewer who merge this PR)

- [ ] Confirm the release note
- [ ] Confirm test cases
- [ ] Confirm document
- [ ] Add branch pick label <!-- Add branch pick label that this PR should merge into -->

2 changes: 1 addition & 1 deletion .github/workflows/auto-cherry-pick.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
pip install PyGithub
- name: Check SHA
run: |
expected_sha="1941de05514e15c216067778e0287b4c3ebcd6f6042ee189a12257bfd0cdd9f764e18c7dae5de868e9b7128ce3be98dc8f78252932cee7d55552fc0cf8b69496"
expected_sha="80b7c6087f2a3e4f4c7f035a52e8e7b05ce00f27aa5c1bd52179df685c912447f94a96145fd3204a3958d8ed9777de5a5183b120e99e0e95bbca0366d69b0ac0"
calculated_sha=$(sha512sum tools/auto-pick-script.py | awk '{ print $1 }')
if [ "$calculated_sha" != "$expected_sha" ]; then
echo "SHA mismatch! Expected: $expected_sha, but got: $calculated_sha"
Expand Down
5 changes: 5 additions & 0 deletions be/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -342,6 +342,10 @@ if (ENABLE_INJECTION_POINT)
set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -DENABLE_INJECTION_POINT")
endif()

if (ENABLE_CACHE_LOCK_DEBUG)
set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -DENABLE_CACHE_LOCK_DEBUG")
endif()

# Enable memory tracker, which allows BE to limit the memory of tasks such as query, load,
# and compaction,and observe the memory of BE through be_ip:http_port/MemTracker.
# Adding the option `USE_MEM_TRACKER=OFF sh build.sh` when compiling can turn off the memory tracker,
Expand Down Expand Up @@ -782,6 +786,7 @@ install(DIRECTORY DESTINATION ${OUTPUT_DIR}/conf)
install(FILES
${BASE_DIR}/../bin/start_be.sh
${BASE_DIR}/../bin/stop_be.sh
${BASE_DIR}/../tools/jeprof
PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE
GROUP_READ GROUP_WRITE GROUP_EXECUTE
WORLD_READ WORLD_EXECUTE
Expand Down
23 changes: 15 additions & 8 deletions be/src/cloud/cloud_base_compaction.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -124,15 +124,18 @@ Status CloudBaseCompaction::prepare_compact() {
for (auto& rs : _input_rowsets) {
_input_row_num += rs->num_rows();
_input_segments += rs->num_segments();
_input_rowsets_size += rs->data_disk_size();
_input_rowsets_data_size += rs->data_disk_size();
_input_rowsets_total_size += rs->total_disk_size();
}
LOG_INFO("start CloudBaseCompaction, tablet_id={}, range=[{}-{}]", _tablet->tablet_id(),
_input_rowsets.front()->start_version(), _input_rowsets.back()->end_version())
.tag("job_id", _uuid)
.tag("input_rowsets", _input_rowsets.size())
.tag("input_rows", _input_row_num)
.tag("input_segments", _input_segments)
.tag("input_data_size", _input_rowsets_size);
.tag("input_rowsets_data_size", _input_rowsets_data_size)
.tag("input_rowsets_index_size", _input_rowsets_index_size)
.tag("input_rowsets_total_size", _input_rowsets_total_size);
return st;
}

Expand Down Expand Up @@ -270,17 +273,21 @@ Status CloudBaseCompaction::execute_compact() {
.tag("input_rowsets", _input_rowsets.size())
.tag("input_rows", _input_row_num)
.tag("input_segments", _input_segments)
.tag("input_data_size", _input_rowsets_size)
.tag("input_rowsets_data_size", _input_rowsets_data_size)
.tag("input_rowsets_index_size", _input_rowsets_index_size)
.tag("input_rowsets_total", _input_rowsets_total_size)
.tag("output_rows", _output_rowset->num_rows())
.tag("output_segments", _output_rowset->num_segments())
.tag("output_data_size", _output_rowset->data_disk_size());
.tag("output_rowset_data_size", _output_rowset->data_disk_size())
.tag("output_rowset_index_size", _output_rowset->index_disk_size())
.tag("output_rowset_total_size", _output_rowset->total_disk_size());

//_compaction_succeed = true;
_state = CompactionState::SUCCESS;

DorisMetrics::instance()->base_compaction_deltas_total->increment(_input_rowsets.size());
DorisMetrics::instance()->base_compaction_bytes_total->increment(_input_rowsets_size);
base_output_size << _output_rowset->data_disk_size();
DorisMetrics::instance()->base_compaction_bytes_total->increment(_input_rowsets_total_size);
base_output_size << _output_rowset->total_disk_size();

return Status::OK();
}
Expand All @@ -302,8 +309,8 @@ Status CloudBaseCompaction::modify_rowsets() {
compaction_job->set_output_cumulative_point(cloud_tablet()->cumulative_layer_point());
compaction_job->set_num_input_rows(_input_row_num);
compaction_job->set_num_output_rows(_output_rowset->num_rows());
compaction_job->set_size_input_rowsets(_input_rowsets_size);
compaction_job->set_size_output_rowsets(_output_rowset->data_disk_size());
compaction_job->set_size_input_rowsets(_input_rowsets_total_size);
compaction_job->set_size_output_rowsets(_output_rowset->total_disk_size());
compaction_job->set_num_input_segments(_input_segments);
compaction_job->set_num_output_segments(_output_rowset->num_segments());
compaction_job->set_num_input_rowsets(_input_rowsets.size());
Expand Down
28 changes: 19 additions & 9 deletions be/src/cloud/cloud_cumulative_compaction.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -164,15 +164,19 @@ Status CloudCumulativeCompaction::prepare_compact() {
for (auto& rs : _input_rowsets) {
_input_row_num += rs->num_rows();
_input_segments += rs->num_segments();
_input_rowsets_size += rs->data_disk_size();
_input_rowsets_data_size += rs->data_disk_size();
_input_rowsets_index_size += rs->index_disk_size();
_input_rowsets_total_size += rs->total_disk_size();
}
LOG_INFO("start CloudCumulativeCompaction, tablet_id={}, range=[{}-{}]", _tablet->tablet_id(),
_input_rowsets.front()->start_version(), _input_rowsets.back()->end_version())
.tag("job_id", _uuid)
.tag("input_rowsets", _input_rowsets.size())
.tag("input_rows", _input_row_num)
.tag("input_segments", _input_segments)
.tag("input_data_size", _input_rowsets_size)
.tag("input_rowsets_data_size", _input_rowsets_data_size)
.tag("input_rowsets_index_size", _input_rowsets_index_size)
.tag("input_rowsets_total_size", _input_rowsets_total_size)
.tag("tablet_max_version", cloud_tablet()->max_version_unlocked())
.tag("cumulative_point", cloud_tablet()->cumulative_layer_point())
.tag("num_rowsets", cloud_tablet()->fetch_add_approximate_num_rowsets(0))
Expand Down Expand Up @@ -201,10 +205,14 @@ Status CloudCumulativeCompaction::execute_compact() {
.tag("input_rowsets", _input_rowsets.size())
.tag("input_rows", _input_row_num)
.tag("input_segments", _input_segments)
.tag("input_data_size", _input_rowsets_size)
.tag("input_rowsets_data_size", _input_rowsets_data_size)
.tag("input_rowsets_index_size", _input_rowsets_index_size)
.tag("input_rowsets_total_size", _input_rowsets_total_size)
.tag("output_rows", _output_rowset->num_rows())
.tag("output_segments", _output_rowset->num_segments())
.tag("output_data_size", _output_rowset->data_disk_size())
.tag("output_rowset_data_size", _output_rowset->data_disk_size())
.tag("output_rowset_index_size", _output_rowset->index_disk_size())
.tag("output_rowset_total_size", _output_rowset->total_disk_size())
.tag("tablet_max_version", _tablet->max_version_unlocked())
.tag("cumulative_point", cloud_tablet()->cumulative_layer_point())
.tag("num_rowsets", cloud_tablet()->fetch_add_approximate_num_rowsets(0))
Expand All @@ -213,8 +221,9 @@ Status CloudCumulativeCompaction::execute_compact() {
_state = CompactionState::SUCCESS;

DorisMetrics::instance()->cumulative_compaction_deltas_total->increment(_input_rowsets.size());
DorisMetrics::instance()->cumulative_compaction_bytes_total->increment(_input_rowsets_size);
cumu_output_size << _output_rowset->data_disk_size();
DorisMetrics::instance()->cumulative_compaction_bytes_total->increment(
_input_rowsets_total_size);
cumu_output_size << _output_rowset->total_disk_size();

return Status::OK();
}
Expand Down Expand Up @@ -243,8 +252,8 @@ Status CloudCumulativeCompaction::modify_rowsets() {
compaction_job->set_output_cumulative_point(new_cumulative_point);
compaction_job->set_num_input_rows(_input_row_num);
compaction_job->set_num_output_rows(_output_rowset->num_rows());
compaction_job->set_size_input_rowsets(_input_rowsets_size);
compaction_job->set_size_output_rowsets(_output_rowset->data_disk_size());
compaction_job->set_size_input_rowsets(_input_rowsets_total_size);
compaction_job->set_size_output_rowsets(_output_rowset->total_disk_size());
compaction_job->set_num_input_segments(_input_segments);
compaction_job->set_num_output_segments(_output_rowset->num_segments());
compaction_job->set_num_input_rowsets(_input_rowsets.size());
Expand Down Expand Up @@ -351,7 +360,8 @@ Status CloudCumulativeCompaction::modify_rowsets() {
stats.num_rows(), stats.data_size());
}
}
if (_tablet->keys_type() == KeysType::UNIQUE_KEYS &&
if (config::enable_delete_bitmap_merge_on_compaction &&
_tablet->keys_type() == KeysType::UNIQUE_KEYS &&
_tablet->enable_unique_key_merge_on_write() && _input_rowsets.size() != 1) {
process_old_version_delete_bitmap();
}
Expand Down
2 changes: 1 addition & 1 deletion be/src/cloud/cloud_cumulative_compaction_policy.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ int64_t CloudSizeBasedCumulativeCompactionPolicy::new_cumulative_point(
// if rowsets have no delete version, check output_rowset total disk size satisfies promotion size.
return output_rowset->start_version() == last_cumulative_point &&
(last_delete_version.first != -1 ||
output_rowset->data_disk_size() >= cloud_promotion_size(tablet) ||
output_rowset->total_disk_size() >= cloud_promotion_size(tablet) ||
satisfy_promotion_version)
? output_rowset->end_version() + 1
: last_cumulative_point;
Expand Down
2 changes: 2 additions & 0 deletions be/src/cloud/cloud_delete_bitmap_action.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,8 @@ Status CloudDeleteBitmapAction::_handle_show_delete_bitmap_count(HttpRequest* re
auto count = tablet->tablet_meta()->delete_bitmap().get_delete_bitmap_count();
auto cardinality = tablet->tablet_meta()->delete_bitmap().cardinality();
auto size = tablet->tablet_meta()->delete_bitmap().get_size();
LOG(INFO) << "show_delete_bitmap_count,tablet_id=" << tablet_id << ",count=" << count
<< ",cardinality=" << cardinality << ",size=" << size;

rapidjson::Document root;
root.SetObject();
Expand Down
30 changes: 21 additions & 9 deletions be/src/cloud/cloud_full_compaction.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -98,15 +98,19 @@ Status CloudFullCompaction::prepare_compact() {
for (auto& rs : _input_rowsets) {
_input_row_num += rs->num_rows();
_input_segments += rs->num_segments();
_input_rowsets_size += rs->data_disk_size();
_input_rowsets_data_size += rs->data_disk_size();
_input_rowsets_index_size += rs->index_disk_size();
_input_rowsets_total_size += rs->total_disk_size();
}
LOG_INFO("start CloudFullCompaction, tablet_id={}, range=[{}-{}]", _tablet->tablet_id(),
_input_rowsets.front()->start_version(), _input_rowsets.back()->end_version())
.tag("job_id", _uuid)
.tag("input_rowsets", _input_rowsets.size())
.tag("input_rows", _input_row_num)
.tag("input_segments", _input_segments)
.tag("input_data_size", _input_rowsets_size);
.tag("input_rowsets_data_size", _input_rowsets_data_size)
.tag("input_rowsets_index_size", _input_rowsets_index_size)
.tag("input_rowsets_total_size", _input_rowsets_total_size);
return st;
}

Expand Down Expand Up @@ -162,16 +166,20 @@ Status CloudFullCompaction::execute_compact() {
.tag("input_rowsets", _input_rowsets.size())
.tag("input_rows", _input_row_num)
.tag("input_segments", _input_segments)
.tag("input_data_size", _input_rowsets_size)
.tag("input_rowsets_data_size", _input_rowsets_data_size)
.tag("input_rowsets_index_size", _input_rowsets_index_size)
.tag("input_rowsets_total_size", _input_rowsets_total_size)
.tag("output_rows", _output_rowset->num_rows())
.tag("output_segments", _output_rowset->num_segments())
.tag("output_data_size", _output_rowset->data_disk_size());
.tag("output_rowset_data_size", _output_rowset->data_disk_size())
.tag("output_rowset_index_size", _output_rowset->index_disk_size())
.tag("output_rowset_total_size", _output_rowset->total_disk_size());

_state = CompactionState::SUCCESS;

DorisMetrics::instance()->full_compaction_deltas_total->increment(_input_rowsets.size());
DorisMetrics::instance()->full_compaction_bytes_total->increment(_input_rowsets_size);
full_output_size << _output_rowset->data_disk_size();
DorisMetrics::instance()->full_compaction_bytes_total->increment(_input_rowsets_total_size);
full_output_size << _output_rowset->total_disk_size();

return Status::OK();
}
Expand All @@ -193,8 +201,12 @@ Status CloudFullCompaction::modify_rowsets() {
compaction_job->set_output_cumulative_point(_output_rowset->end_version() + 1);
compaction_job->set_num_input_rows(_input_row_num);
compaction_job->set_num_output_rows(_output_rowset->num_rows());
compaction_job->set_size_input_rowsets(_input_rowsets_size);
compaction_job->set_size_output_rowsets(_output_rowset->data_disk_size());
compaction_job->set_size_input_rowsets(_input_rowsets_total_size);
compaction_job->set_size_output_rowsets(_output_rowset->total_disk_size());
DBUG_EXECUTE_IF("CloudFullCompaction::modify_rowsets.wrong_compaction_data_size", {
compaction_job->set_size_input_rowsets(1);
compaction_job->set_size_output_rowsets(10000001);
})
compaction_job->set_num_input_segments(_input_segments);
compaction_job->set_num_output_segments(_output_rowset->num_segments());
compaction_job->set_num_input_rowsets(_input_rowsets.size());
Expand Down Expand Up @@ -341,7 +353,7 @@ Status CloudFullCompaction::_cloud_full_compaction_update_delete_bitmap(int64_t
.tag("input_rowsets", _input_rowsets.size())
.tag("input_rows", _input_row_num)
.tag("input_segments", _input_segments)
.tag("input_data_size", _input_rowsets_size)
.tag("input_rowsets_total_size", _input_rowsets_total_size)
.tag("update_bitmap_size", delete_bitmap->delete_bitmap.size());
_tablet->tablet_meta()->delete_bitmap().merge(*delete_bitmap);
return Status::OK();
Expand Down
Loading

0 comments on commit cf9fdd1

Please sign in to comment.