diff --git a/.gitignore b/.gitignore index bddbc878..a33c0521 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ # ignore IDE and any panda wheel/debian packages +.vscode .idea *.deb *.whl diff --git a/tools/fbi/src/CMakeLists.txt b/tools/fbi/src/CMakeLists.txt index a1a93d47..50395253 100644 --- a/tools/fbi/src/CMakeLists.txt +++ b/tools/fbi/src/CMakeLists.txt @@ -3,7 +3,7 @@ project (FBI LANGUAGES CXX) # fbi target add_executable(fbi find_bug_inj.cpp) -set_property(TARGET fbi PROPERTY CXX_STANDARD 14) +set_property(TARGET fbi PROPERTY CXX_STANDARD 17) target_compile_options(fbi PRIVATE -D_GLIBCXX_USE_CXX11_ABI=0) if (${DEBUG}) diff --git a/tools/fbi/src/find_bug_inj.cpp b/tools/fbi/src/find_bug_inj.cpp index 2cf289fe..6c2b5c93 100644 --- a/tools/fbi/src/find_bug_inj.cpp +++ b/tools/fbi/src/find_bug_inj.cpp @@ -241,7 +241,7 @@ void update_unique_taint_sets(Json::Value& tquls) { printf("\n"); } // maintain mapping from ptr (uint64_t) to actual set of taint labels - Ptr p = strtoull(tquls["ptr"].asString(), c_str(), 0, 0); + Ptr p = std::strtoull(tquls["ptr"].asString().c_str(), 0, 0); auto it = ptr_to_labelset.lower_bound(p); int max_index = tquls["label"].size() - 1; if (it == ptr_to_labelset.end() || p < it->first) { @@ -249,7 +249,7 @@ void update_unique_taint_sets(Json::Value& tquls) { std::vector vec; // Populate contents of vector with that of "label" for (Json::Value& element : tquls["label"]) { - vec.push_back(strtoul(element.asString(), c_str(), 0, 0)); + vec.push_back(std::strtoul(element.asString().c_str(), 0, 0)); } const LabelSet *ls = create(LabelSet{0, p, inputfile, vec}); @@ -348,7 +348,7 @@ inline Range get_dua_dead_range(const Dua *dua, const std::vector &to_ count_nonzero(viable_bytes)); if (dua->lval->ast_name.find("nodua") != std::string::npos) { dprintf("Found nodua symbol, skipping"); - dprintf(dua->lval->ast_name.c_str()); + dprintf("%s", dua->lval->ast_name.c_str()); dprintf("\n"); Range empty{0, 0}; return empty; @@ -402,24 +402,22 @@ void record_injectable_bugs_at(const AttackPoint *atp, bool is_new_atp, std::initializer_list extra_duas); void taint_query_pri(Json::Value& ple) { - assert (ple != NULL); Json::Value tqh = ple["taintQueryPri"]; - assert (tqh != NULL); // size of query in bytes & num tainted bytes found // bdg: don't try handle lvals that are bigger than our max lval - uint32_t len = std::min(strtoul(tqh["len"].asString(), c_str(), 0, 0), max_lval); - uint32_t num_tainted = strtoul(tqh["numTainted"].asString(), c_str(), 0, 0); + uint32_t len = std::min((uint32_t) std::strtoul(tqh["len"].asString().c_str(), 0, 0), max_lval); + uint32_t num_tainted = std::strtoul(tqh["numTainted"].asString().c_str(), 0, 0); // entry 1 is source info Json::Value si = tqh["srcInfo"]; // ignore duas in header files if (is_header_file(std::string(si["filename"].asString()))) { - return; + return; } - assert (si != NULL); + // entry 2 is callstack -- ignore Json::Value cs = tqh["callStack"]; - assert (cs != NULL); - uint64_t instr = strtoull(ple["instr"].asString(), c_str(), 0, 0); + + uint64_t instr = std::strtoull(ple["instr"].asString().c_str(), 0, 0); dprintf("TAINT QUERY HYPERCALL len=%d num_tainted=%d\n", len, num_tainted); // collects set (as sorted vec) of labels on all viable bytes @@ -449,7 +447,7 @@ void taint_query_pri(Json::Value& ple) { std::vector viable_byte(len, nullptr); std::vector byte_tcn(len, 0); - dprintf("considering taint queries on %lu bytes\n", strtoull(tqh["num_tainted"].asString(), c_str(), 0, 0)); + dprintf("considering taint queries on %llu bytes\n", std::strtoull(tqh["num_tainted"].asString().c_str(), 0, 0)); bool is_dua = false; bool is_fake_dua = false; @@ -457,19 +455,19 @@ void taint_query_pri(Json::Value& ple) { // optimization. don't need to check each byte if we don't have enough. if (num_tainted >= LAVA_MAGIC_VALUE_SIZE) { for (const auto& tq : tqh["taintQuery"]) { - uint32_t offset = strtoul(tq["offset"].asString(), c_str(), 0, 0); + uint32_t offset = std::strtoul(tq["offset"].asString().c_str(), 0, 0); if (offset >= len) { - continue; - } - dprintf("considering offset = %d\n", offset); - const LabelSet *ls = ptr_to_labelset.at(strtoull(tq["ptr"].asString(), c_str(), 0, 0)); - byte_tcn[offset] = strtoul(tq["tcn"].asString(), c_str(), 0, 0); + continue; + } + dprintf("considering offset = %d\n", offset); + const LabelSet *ls = ptr_to_labelset.at(std::strtoull(tq["ptr"].asString().c_str(), 0, 0)); + byte_tcn[offset] = std::strtoul(tq["tcn"].asString().c_str(), 0, 0); // flag for tracking *why* we discarded a byte // check tcn and cardinality of taint set first uint32_t current_byte_not_ok = 0; - current_byte_not_ok |= (strtoul(tq["tcn"].asString(), c_str(), 0, 0) > max_tcn) << CBNO_TCN_BIT; - current_byte_not_ok |= (ls->labels.size() > max_card) << CBNO_CRD_BIT + current_byte_not_ok |= (std::strtoul(tq["tcn"].asString().c_str(), 0, 0)) > max_tcn << CBNO_TCN_BIT; + current_byte_not_ok |= (ls->labels.size() > max_card) << CBNO_CRD_BIT; if (current_byte_not_ok && debug) { // discard this byte dprintf("discarding byte -- here's why: %x\n", current_byte_not_ok); @@ -481,7 +479,7 @@ void taint_query_pri(Json::Value& ple) { dprintf("retaining byte\n"); // this byte is ok to retain. // keep track of highest tcn, liveness, and card for any viable byte for this lval - c_max_tcn = std::max(strtoul(tq["tcn"].asString(), c_str(), 0, 0), c_max_tcn); + c_max_tcn = std::max((uint32_t) std::strtoul(tq["tcn"].asString().c_str(), 0, 0), c_max_tcn); c_max_card = std::max((uint32_t) ls->labels.size(), c_max_card); merge_into(ls->labels.begin(), ls->labels.end(), all_labels); @@ -508,7 +506,7 @@ void taint_query_pri(Json::Value& ple) { // create a fake dua if we can if (chaff_bugs && !is_dua - && strtoul(tqh["len"].asString(), c_str(), 0, 0) - num_tainted >= LAVA_MAGIC_VALUE_SIZE) { + && std::strtoul(tqh["len"].asString().c_str(), 0, 0) - num_tainted >= LAVA_MAGIC_VALUE_SIZE) { dprintf("not enough taint -- what about non-taint?\n"); dprintf("len=%d num_tainted=%d\n", len, num_tainted); viable_byte.assign(viable_byte.size(), nullptr); @@ -519,7 +517,7 @@ void taint_query_pri(Json::Value& ple) { // Assume these are sorted by offset. // Keep two iterators, one in viable_byte, one in tqh->taint_query. // Iterate over both and fill gaps in tqh into viable_byte. - if (strtoul(tq["offset"].asString(), c_str(), 0, 0) > i) { + if (std::strtoul(tq["offset"].asString().c_str(), 0, 0) > i) { // if untainted, we can guarantee that we can use the untainted // bytes to produce a bug that definitely won't trigger. // so we create a fake, empty labelset. @@ -553,7 +551,7 @@ void taint_query_pri(Json::Value& ple) { const Dua *dua = create(Dua(lval, std::move(viable_byte), std::move(byte_tcn), std::move(all_labels), inputfile, - c_max_tcn, c_max_card, strtoull(ple["instr"].asString(), c_str(), 0, 0), is_fake_dua)); + c_max_tcn, c_max_card, std::strtoull(ple["instr"].asString().c_str(), 0, 0), is_fake_dua)); if (is_dua) { // Only track liveness for non-fake duas. @@ -619,18 +617,17 @@ void taint_query_pri(Json::Value& ple) { if (is_dua) num_real_duas++; if (is_fake_dua) num_fake_duas++; } else { - dprintf("discarded %u viable bytes %lu labels %s:%u %s", - num_viable_bytes, all_labels.size(), si["filename"].asString(), strtoul(si["linenum"].asString(), c_str(), 0, 0) - si["astnodename"].asString()); + dprintf("discarded %u viable bytes %lu labels %s:%lu %s", + num_viable_bytes, all_labels.size(), si["filename"].asString().c_str(), + std::strtoul(si["linenum"].asString().c_str(), 0, 0), + si["astnodename"].asString().c_str()); } t.commit(); } // update liveness measure for each of taint labels (file bytes) associated with a byte in lval that was queried void update_liveness(const Json::Value& ple) { - assert (ple != NULL); Json::Value tb = ple["taintedBranch"]; - assert (tb != NULL); dprintf("TAINTED BRANCH\n"); transaction t(db->begin()); @@ -646,7 +643,7 @@ void update_liveness(const Json::Value& ple) { // This should be O(mn) for m sets, n elems each. // though we should have n >> m in our worst case. const std::vector &cur_labels = - ptr_to_labelset.at(strtoul(tq["ptr"].asString(), c_str(), 0, 0)) -> labels; + ptr_to_labelset.at(std::strtoul(tq["ptr"].asString().c_str(), 0, 0)) -> labels; merge_into(cur_labels.begin(), cur_labels.end(), all_labels); } t.commit(); @@ -860,12 +857,10 @@ void record_injectable_bugs_at(const AttackPoint *atp, bool is_new_atp, } void attack_point_lval_usage(Json::Value ple) { - assert (ple != NULL); Json::Value pleatp = ple["attackPoint"]; if (pleatp["src_info"].isMember("astLocId")) { - dprintf ("attack point id = %d\n", pleatp["srcInfo"]["astLocId"]); + dprintf ("attack point id = %lu\n", std::strtoul(pleatp["srcInfo"]["astLocId"].asString().c_str(), 0, 0)); } - assert (pleatp != NULL); Json::Value si = pleatp["srcInfo"]; // ignore duas in header files if (is_header_file(si["filename"].asString())) { @@ -886,11 +881,11 @@ void attack_point_lval_usage(Json::Value ple) { const AttackPoint *atp; bool is_new_atp; std::tie(atp, is_new_atp) = create_full(AttackPoint{0, - ast_loc, (AttackPoint::Type) strtoul(pleatp["info"].asString(), c_str(), 0, 0)}); + ast_loc, (AttackPoint::Type) std::strtoul(pleatp["info"].asString().c_str(), 0, 0)}); dprintf("@ATP: %s\n", std::string(*atp).c_str()); // Don't decimate PTR_ADD bugs. - switch ((AttackPoint::Type) strtoul(pleatp["info"].asString(), c_str(), 0, 0)) { + switch ((AttackPoint::Type) std::strtoul(pleatp["info"].asString().c_str(), 0, 0)) { case AttackPoint::POINTER_WRITE: record_injectable_bugs_at(atp, is_new_atp, { }); // fall through @@ -964,7 +959,7 @@ int main (int argc, char **argv) { if (!project["max_liveness"].isUInt()) { throw std::runtime_error("Could not parse max_liveness"); } - max_liveness = strtoul(project["max_liveness"].asString(), c_str(), 0, 0); + max_liveness = std::strtoul(project["max_liveness"].asString().c_str(), 0, 0); printf("maximum liveness score of %lu\n", max_liveness); if (!project.isMember("max_cardinality")) { @@ -974,7 +969,7 @@ int main (int argc, char **argv) { if (!project["max_cardinality"].isUInt()) { throw std::runtime_error("Could not parse max_cardinality"); } - max_card = strtoul(project["max_cardinality"].asString(), c_str(), 0, 0); + max_card = std::strtoul(project["max_cardinality"].asString().c_str(), 0, 0); printf("max card of taint set returned by query = %d\n", max_card); if (!project.isMember("max_tcn")) { @@ -984,7 +979,7 @@ int main (int argc, char **argv) { if (!project["max_tcn"].isUInt()) { throw std::runtime_error("Could not parse max_tcn"); } - max_tcn = strtoul(project["max_tcn"].asString(), c_str(), 0, 0); + max_tcn = std::strtoul(project["max_tcn"].asString().c_str(), 0, 0); printf("max tcn for addr = %d\n", max_tcn); if (!project.isMember("max_lval_size")) { @@ -994,7 +989,7 @@ int main (int argc, char **argv) { if (!project["max_lval_size"].isUInt()) { throw std::runtime_error("Could not parse max_lval_size"); } - max_lval = strtoul(project["max_lval_size"].asString(), c_str(), 0, 0); + max_lval = std::strtoul(project["max_lval_size"].asString().c_str(), 0, 0); printf("max lval size = %d\n", max_lval); /* Unsupported for now (why?) @@ -1013,7 +1008,7 @@ int main (int argc, char **argv) { curtail = 0; }else{ // null should never happen, if it does we'll violate an assert in the asUInt - curtail = strtoul(project.get("curtail_fbi", Json::Value::null).asString(), c_str(), 0, 0); + curtail = std::strtoul(project.get("curtail_fbi", Json::Value::null).asString().c_str(), 0, 0); } } printf("Curtail is %d\n", curtail); diff --git a/tools/lavaODB/include/spit.hxx b/tools/lavaODB/include/spit.hxx index 2f4340e9..4f9c4c55 100644 --- a/tools/lavaODB/include/spit.hxx +++ b/tools/lavaODB/include/spit.hxx @@ -7,34 +7,46 @@ extern std::vector ind2str; -static void spit_tquls(const Panda__TaintQueryUniqueLabelSet *tquls) { - printf("tquls=[ptr=0x%" PRIx64 ",n_label=%d,label=[", tquls->ptr, (int) tquls->n_label); - for (uint32_t i=0; in_label; i++) { - printf("%d", tquls->label[i]); - if (i+1n_label) printf(","); - } +static void spit_tquls(Json::Value& tquls) { + uint32_t n_label = tquls["label"].size(); + printf("tquls=[ptr=0x%" PRIx64 ",n_label=%u,label=[", std::strtoul(tquls["ptr"].asString().c_str(), 0, 0), n_label); + + int i = 0; + for (Json::Value& element : tquls["label"]) { + printf("%lu", std::strtoul(element.asString().c_str(), 0, 0)); + if (i + 1 < n_label) { + printf(","); + } + ++i; + } printf("]]"); } -static void spit_tq(Panda__TaintQuery *tq) { - printf("tq=[ptr=0x%" PRIx64 ",tcn=%d,offset=%d]", tq->ptr, tq->tcn, tq->offset); +static void spit_tq(const Json::Value& tq) { + printf("tq=[ptr=0x%" PRIx64 ",tcn=%lu,offset=%lu]", + std::strtoul(tq["ptr"].asString().c_str(), 0, 0), + std::strtoul(tq["tcn"].asString().c_str(), 0, 0), + std::strtoul(tq["offset"].asString().c_str(), 0, 0)); } -static void spit_si(Panda__SrcInfo *si) { - printf("si=[filename='%s',line=%d,", (char*) ind2str[si->filename].c_str(), si->linenum); - printf("astnodename='%s',", (char *) ind2str[si->astnodename].c_str()); - if (si->has_insertionpoint) { - printf("insertionpoint=%d", si->insertionpoint); +static void spit_si(const Json::Value& si) { + printf("si=[filename='%s',line=%lu,", si["filename"].asString().c_str(), std::strtoul(si["linenum"].asString().c_str(), 0, 0)); + printf("astnodename='%s',", si["astnodename"].asString().c_str()); + if (si.isMember("insertionpoint")) { + printf("insertionpoint=%lu", std::strtoul(si["insertionpoint"].asString().c_str(), 0, 0)); } printf("]"); } -static void spit_tqh(Panda__TaintQueryHypercall *tqh) { - printf("tqh=[buf=0x%" PRIx64 ",len=%d,num_tainted=%d]", tqh->buf, tqh->len, tqh->num_tainted); +static void spit_tqh(const Json::Value& tqh) { + printf("tqh=[buf=0x%" PRIx64 ",len=%lu,num_tainted=%lu]", + std::strtoul(tqh["buf"].asString().c_str(), 0, 0), + std::strtoul(tqh["len"].asString().c_str(), 0, 0), + std::strtoul(tqh["num_tainted"].asString().c_str(), 0, 0)); } -static void spit_ap(Panda__AttackPoint *ap) { - printf("ap=[info=%d]", ap->info); +static void spit_ap(const Json::Value& ap) { + printf("ap=[info=%lu]", std::strtoul(ap["info"].asString().c_str(), 0, 0)); } #endif