Skip to content

Commit

Permalink
Account for frozen datatypes when updating finding metadata
Browse files Browse the repository at this point in the history
  • Loading branch information
drdavella committed Nov 22, 2024
1 parent a77025c commit 6d4e9c0
Show file tree
Hide file tree
Showing 3 changed files with 87 additions and 7 deletions.
26 changes: 26 additions & 0 deletions src/codemodder/codetf.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,16 @@ def validate_description(self):
raise ValueError("description must not be empty")
return self

def with_findings(self, findings: list[Finding] | None) -> Change:
return Change(
lineNumber=self.lineNumber,
description=self.description,
diffSide=self.diffSide,
properties=self.properties,
packageActions=self.packageActions,
findings=findings,
)


class AIMetadata(BaseModel):
provider: Optional[str] = None
Expand All @@ -99,6 +109,16 @@ class ChangeSet(BaseModel):
strategy: Optional[Strategy] = None
provisional: Optional[bool] = False

def with_changes(self, changes: list[Change]) -> ChangeSet:
return ChangeSet(
path=self.path,
diff=self.diff,
changes=changes,
ai=self.ai,
strategy=self.strategy,
provisional=self.provisional,
)


class Reference(BaseModel):
url: str
Expand Down Expand Up @@ -141,6 +161,12 @@ def to_unfixed_finding(
reason=reason,
)

def with_rule(self, name: str, url: Optional[str]) -> Finding:
return Finding(
id=self.id,
rule=Rule(id=self.rule.id, name=name, url=url),
)


class UnfixedFinding(Finding):
path: str
Expand Down
25 changes: 18 additions & 7 deletions src/codemodder/utils/update_finding_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
if typing.TYPE_CHECKING:
from codemodder.codemods.base_codemod import ToolRule

from codemodder.codetf import ChangeSet
from codemodder.codetf import Change, ChangeSet


def update_finding_metadata(
Expand All @@ -15,12 +15,23 @@ def update_finding_metadata(
if not (tool_rule_map := {rule.id: (rule.name, rule.url) for rule in tool_rules}):
return changesets

new_changesets: list[ChangeSet] = []
for changeset in changesets:
new_changes: list[Change] = []
for change in changeset.changes:
for finding in change.findings or []:
if finding.id in tool_rule_map:
finding.rule.name = tool_rule_map[finding.id][0]
finding.rule.url = tool_rule_map[finding.id][1]
new_changes.append(
change.with_findings(
[
(
finding.with_rule(*tool_rule_map[finding.rule.id])
if finding.rule.id in tool_rule_map
else finding
)
for finding in change.findings or []
]
or None
)
)
new_changesets.append(changeset.with_changes(new_changes))

# TODO: eventually make this functional and return a new list
return changesets
return new_changesets
43 changes: 43 additions & 0 deletions tests/test_update_finding_metadata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
from codemodder.codemods.base_codemod import ToolRule
from codemodder.codetf import Change, ChangeSet, Finding, Rule
from codemodder.utils.update_finding_metadata import update_finding_metadata


def test_update_finding_metdata():
tool_rule = ToolRule(id="rule_id", name="rule_name", url="rule_url")
changeset = ChangeSet(
path="",
diff="",
changes=[
Change(
lineNumber=1,
description="foo",
findings=[
Finding(id="rule_id", rule=Rule(id="rule_id", name="other_name"))
],
),
Change(
lineNumber=2,
description="bar",
findings=[
Finding(id="other_id", rule=Rule(id="other_id", name="other_name"))
],
),
Change(
lineNumber=3,
description="baz",
),
],
)

new_changesets = update_finding_metadata(
tool_rules=[tool_rule], changesets=[changeset]
)

assert new_changesets[0].changes[0].findings
assert new_changesets[0].changes[0].findings[0].rule.name == "rule_name"
assert new_changesets[0].changes[0].findings[0].rule.url == "rule_url"
assert new_changesets[0].changes[1].findings
assert new_changesets[0].changes[1].findings[0].rule.name == "other_name"
assert new_changesets[0].changes[1].findings[0].rule.url is None
assert new_changesets[0].changes[2] == changeset.changes[2]

0 comments on commit 6d4e9c0

Please sign in to comment.