Skip to content

Commit

Permalink
branch-3.0: [Fix](ms) Fix ci-uncaught ut coredump due to dynamic rate…
Browse files Browse the repository at this point in the history
… limit setting #44362 (#44370)

Cherry-picked from #44362

Co-authored-by: Siyang Tang <[email protected]>
  • Loading branch information
github-actions[bot] and TangSiyang2001 authored Nov 21, 2024
1 parent 7e0410e commit 9cd1287
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 7 deletions.
2 changes: 2 additions & 0 deletions cloud/script/run_all_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -143,11 +143,13 @@ for i in *_test; do
patchelf --set-rpath "$(pwd)" "${i}"
fi

set -euo pipefail
if [[ "${filter}" == "" ]]; then
LLVM_PROFILE_FILE="./report/${i}.profraw" "./${i}" --gtest_print_time=true --gtest_output="xml:${i}.xml"
else
LLVM_PROFILE_FILE="./report/${i}.profraw" "./${i}" --gtest_print_time=true --gtest_output="xml:${i}.xml" --gtest_filter="${filter}"
fi
set +euo pipefail
unittest_files[${#unittest_files[*]}]="${i}"
echo "--------------------------"
fi
Expand Down
14 changes: 7 additions & 7 deletions cloud/src/meta-service/meta_service_http.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -409,8 +409,8 @@ static HttpResponse process_adjust_rate_limit(MetaServiceImpl* service, brpc::Co
processors[0b101] = std::move(set_instance_qps_limit);
processors[0b111] = std::move(set_instance_rpc_qps_limit);

uint8_t level = (0x01 & qps_limit_str.empty()) | ((0x01 & rpc_name.empty()) << 1) |
((0x01 & instance_id.empty()) << 2);
uint8_t level = (0x01 & !qps_limit_str.empty()) | ((0x01 & !rpc_name.empty()) << 1) |
((0x01 & !instance_id.empty()) << 2);

DCHECK_LT(level, 8);

Expand All @@ -420,21 +420,21 @@ static HttpResponse process_adjust_rate_limit(MetaServiceImpl* service, brpc::Co
static HttpResponse process_query_rate_limit(MetaServiceImpl* service, brpc::Controller* cntl) {
auto rate_limiter = service->rate_limiter();
rapidjson::Document d;
d.SetObject();
auto get_qps_limit = [&d](std::string_view rpc_name,
std::shared_ptr<RpcRateLimiter> rpc_limiter) {
rapidjson::Document node;
node.SetObject();
rapidjson::Document sub;
sub.SetObject();
auto get_qps_token_limit = [&](std::string_view instance_id,
std::shared_ptr<RpcRateLimiter::QpsToken> qps_token) {
sub.AddMember(rapidjson::StringRef(instance_id.data(), instance_id.size()),
qps_token->max_qps_limit(), d.GetAllocator());
};
rpc_limiter->for_each_qps_token(std::move(get_qps_token_limit));

auto max_qps_limit = std::to_string(rpc_limiter->max_qps_limit());
node.AddMember("RPC qps limit",
rapidjson::StringRef(max_qps_limit.data(), max_qps_limit.size()),
d.GetAllocator());
node.AddMember("RPC qps limit", rpc_limiter->max_qps_limit(), d.GetAllocator());
node.AddMember("instance specific qps limit", sub, d.GetAllocator());
d.AddMember(rapidjson::StringRef(rpc_name.data(), rpc_name.size()), node, d.GetAllocator());
};
Expand All @@ -443,7 +443,7 @@ static HttpResponse process_query_rate_limit(MetaServiceImpl* service, brpc::Con
rapidjson::StringBuffer sb;
rapidjson::PrettyWriter<rapidjson::StringBuffer> writer(sb);
d.Accept(writer);
return http_json_reply(MetaServiceCode::OK, sb.GetString());
return http_json_reply(MetaServiceCode::OK, "", sb.GetString());
}

static HttpResponse process_decode_key(MetaServiceImpl*, brpc::Controller* ctrl) {
Expand Down
1 change: 1 addition & 0 deletions run-cloud-ut.sh
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,7 @@ cd test
# FILTER: binary_name:gtest_filter
# FILTER: meta_service_test:DetachSchemaKVTest.*
# ./run_all_tests.sh --test "\"$(echo "${FILTER}" | awk -F: '{print $1}')\"" --filter "\"$(echo "${FILTER}" | awk -F: '{print $2}')\"" --fdb "\"${FDB}\""
set -euo pipefail
if [[ "_${ENABLE_CLANG_COVERAGE}" == "_ON" ]]; then
bash -x ./run_all_tests.sh --coverage --test "$(echo "${FILTER}" | awk -F: '{print $1}')" --filter "$(echo "${FILTER}" | awk -F: '{print $2}')" --fdb "${FDB}"
else
Expand Down

0 comments on commit 9cd1287

Please sign in to comment.