-
Notifications
You must be signed in to change notification settings - Fork 141
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add sast-unicode-check task for testing
Resolves: https://issues.redhat.com/browse/OSH-739 Signed-off-by: Chuntao Han <[email protected]>
- Loading branch information
1 parent
7e708fe
commit aa7eecb
Showing
4 changed files
with
290 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
# sast-unicode-check task | ||
|
||
## Description: | ||
|
||
The sast-unicode-check task uses [find-unicode-control](https://github.com/siddhesh/find-unicode-control.git) tool to perform Static Application Security Testing (SAST) to look for non-printable unicode characters in all text files in a source tree. | ||
|
||
## Parameters: | ||
|
||
| name | description | | ||
|------------------------------|--------------------------------------------------------------------------------------------------| | ||
| FIND_UNICODE_CONTROL_GIT_URL | URL from repository to find unicode control | | ||
| KFP_GIT_URL | Link to the known-false-positives repository. If left blank, results won't be filtered | | ||
| PROJECT_NVR | Name-Version-Release (NVR) of the scanned project, used to find path exclusions (it is optional) | | ||
| RECORD_EXCLUDED | File to store all excluded findings to (it is optional) | | ||
|
||
|
||
## Results: | ||
|
||
| name | description | | ||
|-------------|--------------------------| | ||
| TEST_OUTPUT | Tekton task test output. | | ||
|
||
## Source repository for image: | ||
|
||
https://github.com/konflux-ci/konflux-test | ||
|
||
## Additional links: | ||
|
||
* https://github.com/siddhesh/find-unicode-control.git | ||
* https://gitlab.cee.redhat.com/osh/known-false-positives.git |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,246 @@ | ||
apiVersion: tekton.dev/v1 | ||
kind: Task | ||
metadata: | ||
labels: | ||
app.kubernetes.io/version: "0.1" | ||
annotations: | ||
tekton.dev/pipelines.minVersion: "0.12.1" | ||
tekton.dev/tags: "konflux" | ||
name: sast-unicode-check | ||
spec: | ||
description: >- | ||
Scans source code for non-printable unicode characters in all text files. | ||
results: | ||
- description: Tekton task test output. | ||
name: TEST_OUTPUT | ||
params: | ||
- description: Image URL. | ||
name: image-url | ||
type: string | ||
# In a future 0.4 version of the task, drop the default to make this required | ||
default: "" | ||
- description: Image digest to report findings for. | ||
name: image-digest | ||
type: string | ||
# In a future 0.4 version of the task, drop the default to make this required | ||
default: "" | ||
- name: FIND_UNICODE_CONTROL_GIT_URL | ||
type: string | ||
description: URL from repository to find unicode control | ||
default: "https://github.com/siddhesh/find-unicode-control.git" | ||
- name: KFP_GIT_URL | ||
type: string | ||
description: URL from repository to download known false positives files | ||
default: "https://gitlab.cee.redhat.com/osh/known-false-positives.git" | ||
- default: "" | ||
description: Name-Version-Release (NVR) of the scanned project, used to find path exclusions (it is optional) | ||
name: PROJECT_NVR | ||
type: string | ||
- default: "" | ||
description: File to store all excluded findings to (it is optional) | ||
name: RECORD_EXCLUDED | ||
type: string | ||
volumes: | ||
steps: | ||
- name: sast-unicode-check | ||
image: quay.io/redhat-appstudio/konflux-test:v1.4.6@sha256:5f298d8d990dfa82023e50029b71b08e19c3c9cedb181dfc4bc86c9ecad8700c | ||
# per https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting | ||
# the cluster will set imagePullPolicy to IfNotPresent | ||
workingDir: $(workspaces.workspace.path)/hacbs/$(context.task.name) | ||
volumeMounts: | ||
env: | ||
- name: KFP_GIT_URL | ||
value: $(params.KFP_GIT_URL) | ||
- name: FIND_UNICODE_CONTROL_GIT_URL | ||
value: $(params.FIND_UNICODE_CONTROL_GIT_URL) | ||
- name: PROJECT_NVR | ||
value: $(params.PROJECT_NVR) | ||
- name: RECORD_EXCLUDED | ||
value: $(params.RECORD_EXCLUDED) | ||
- name: SOURCE_CODE_DIR | ||
value: $(workspaces.workspace.path) | ||
script: | | ||
#!/usr/bin/env bash | ||
set -exuo pipefail | ||
. /utils.sh | ||
trap 'handle_error $(results.TEST_OUTPUT.path)' EXIT | ||
SCAN_RESULT="" | ||
SCAN_PROP="" | ||
# Installation of Red Hat certificates for cloning Red Hat internal repositories | ||
curl -sS https://certs.corp.redhat.com/certs/2015-IT-Root-CA.pem > /etc/pki/ca-trust/source/anchors/2015-RH-IT-Root-CA.crt | ||
curl -sS https://certs.corp.redhat.com/certs/2022-IT-Root-CA.pem > /etc/pki/ca-trust/source/anchors/2022-IT-Root-CA.pem | ||
update-ca-trust | ||
# Clone the source code from upstream repo | ||
clone_unicontrol() | ||
{ | ||
# Clone find-unicode-control repository | ||
git clone --depth=1 "${FIND_UNICODE_CONTROL_GIT_URL}" cloned >&2 | ||
status=$? | ||
if [ "${status}" -ne 0 ]; then | ||
echo "Error: Failed to clone the repository: ${FIND_UNICODE_CONTROL_GIT_URL}" >&2 | ||
return 1 | ||
else | ||
echo "Message: Succeed to clone the repository: ${FIND_UNICODE_CONTROL_GIT_URL}" >&2 | ||
fi | ||
# Check usage of find-unicode-control to confirm the tool can work | ||
./cloned/find_unicode_control.py --help >&2 | ||
status=$? | ||
if [ "${status}" -ne 0 ]; then | ||
echo "Error: Failed to check usage of find-unicode-control" >&2 | ||
return 1 | ||
else | ||
echo "Message: Succeed to check usage of find-unicode-control" >&2 | ||
fi | ||
# Get git url suffix | ||
git_url_suffix=$(git -C ./cloned/ rev-parse HEAD) | ||
SCAN_PROP="find-unicode-control-git-url:${FIND_UNICODE_CONTROL_GIT_URL}#${git_url_suffix}" | ||
} | ||
# Find unicode control | ||
unicontrol_scan() | ||
{ | ||
FUC_EXIT_CODE=0 | ||
LANG=en_US.utf8 ./cloned/find_unicode_control.py -p bidi -v -d -t "${SOURCE_CODE_DIR}/source" \ | ||
>raw_sast_unicode_check_out.txt \ | ||
2>raw_sast_unicode_check_out.log \ | ||
|| FUC_EXIT_CODE=$? | ||
if [ "${FUC_EXIT_CODE}" -ne 0 ] && [ "${FUC_EXIT_CODE}" -ne 1 ]; then | ||
echo "Error: failed to run find unicode control command" >&2 | ||
return 1 | ||
else | ||
echo "Message: Succeed to run find-unicode-control command" >&2 | ||
fi | ||
# Translate the output format | ||
sed -i raw_sast_unicode_check_out.txt -E -e 's|(.*:[0-9]+)(.*)|\1: warning:\2|' -e 's|^|Error: UNICONTROL_WARNING:\n|' | ||
status=$? | ||
if [ "${status}" -ne 0 ] && [ "${status}" -ne 1 ]; then | ||
echo "Error: failed to translate the unicontrol output format" >&2 | ||
return 1 | ||
else | ||
echo "Message: Succeed to translate the unicontrol output format" >&2 | ||
SCAN_RESULT="raw_sast_unicode_check_out.txt" | ||
fi | ||
} | ||
# Filter known false positive | ||
filter_kfp() | ||
{ | ||
# We check if the KFP_GIT_URL variable is set to apply the filters or not | ||
if [[ -z "${KFP_GIT_URL}" ]]; then | ||
echo "Error: KFP_GIT_URL variable not defined. False positives won't be filtered" >&2 | ||
return 0 | ||
else | ||
echo "Message: Filtering false positives in results files using csfilter-kfp..." >&2 | ||
CMD=( | ||
csfilter-kfp | ||
--verbose | ||
--kfp-git-url="${KFP_GIT_URL}" | ||
) | ||
if [[ -n "${PROJECT_NVR}" ]]; then | ||
CMD+=(--project-nvr="${PROJECT_NVR}") | ||
fi | ||
if [[ -n "${RECORD_EXCLUDED}" ]]; then | ||
CMD+=(--record-excluded="${RECORD_EXCLUDED}") | ||
fi | ||
"${CMD[@]}" raw_sast_unicode_check_out.txt > filtered_sast_unicode_check_out.json | ||
status=$? | ||
if [ "$status" -ne 0 ]; then | ||
echo "Error: failed to filter known false positives" >&2 | ||
return 1 | ||
else | ||
echo "Message: Succeed to filter known false positives" >&2 | ||
SCAN_RESULT="filtered_sast_unicode_check_out.json" | ||
fi | ||
fi | ||
} | ||
# Process all results as configured with CSGERP_OPTS | ||
process_all_results() | ||
{ | ||
CSGERP_OPTS=( | ||
--mode=json | ||
--event='error|warning' | ||
--remove-duplicates | ||
--embed-context=3 | ||
--set-scan-prop="${SCAN_PROP}" | ||
) | ||
# In order to generate csdiff/v1, we need to add the whole path of the source code as Snyk only provides an URI to embed the context | ||
csgrep --mode=json --prepend-path-prefix="${SOURCE_CODE_DIR}"/ "${SCAN_RESULT}" \ | ||
| csgrep "${CSGERP_OPTS[@]}" \ | ||
| csgrep --mode=json --strip-path-prefix="${SOURCE_CODE_DIR}"/source/ \ | ||
> processed_sast_unicode_check_out.json | ||
csgrep --mode=evtstat processed_sast_unicode_check_out.json | ||
csgrep --mode=sarif processed_sast_unicode_check_out.json > sast_unicode_check_out.sarif | ||
show_output | ||
} | ||
show_output() | ||
{ | ||
if [[ "${FUC_EXIT_CODE}" -eq 0 ]]; then | ||
note="Task $(context.task.name) success: No finding was detected" | ||
ERROR_OUTPUT=$(make_result_json -r SUCCESS -t "$note") | ||
elif [[ "${FUC_EXIT_CODE}" -eq 1 ]] && [[ ! -s sast_unicode_check_out.sarif ]]; then | ||
note="Task $(context.task.name) success: Some findings were detected, but filtered by known false positive" | ||
ERROR_OUTPUT=$(make_result_json -r SUCCESS -t "$note") | ||
else | ||
echo "sast-unicode-check test failed because of the following issues:" | ||
cat processed_sast_unicode_check_out.json | ||
TEST_OUTPUT= | ||
parse_test_output $(context.task.name) sarif sast_unicode_check_out.sarif || true | ||
note="Task $(context.task.name) failed: For details, check Tekton task log." | ||
ERROR_OUTPUT=$(make_result_json -r ERROR -t "$note") | ||
fi | ||
echo "${TEST_OUTPUT:-${ERROR_OUTPUT}}" | tee $(results.TEST_OUTPUT.path) | ||
} | ||
main() | ||
{ | ||
clone_unicontrol | ||
unicontrol_scan | ||
filter_kfp | ||
process_all_results | ||
} | ||
main | ||
- name: upload | ||
image: quay.io/konflux-ci/oras:latest@sha256:f4b891ee3038a5f13cd92ff4f473faad5601c2434d1c6b9bccdfc134d9d5f820 | ||
workingDir: $(workspaces.workspace.path)/hacbs/$(context.task.name) | ||
env: | ||
- name: IMAGE_URL | ||
value: $(params.image-url) | ||
- name: IMAGE_DIGEST | ||
value: $(params.image-digest) | ||
script: | | ||
#!/usr/bin/env bash | ||
UPLOAD_FILE=sast_unicode_check_out.sarif | ||
MEDIA_TYPE=application/sarif+json | ||
if [ -z "${IMAGE_URL}" ] || [ -z "${IMAGE_DIGEST}" ]; then | ||
echo 'No image-url or image-digest param provided. Skipping upload.' | ||
exit 0; | ||
fi | ||
if [ ! -f "${UPLOAD_FILE}" ]; then | ||
echo "No ${UPLOAD_FILE} exists. Skipping upload." | ||
exit 0; | ||
fi | ||
echo "Selecting auth" | ||
select-oci-auth "${IMAGE_URL}" > "${HOME}/auth.json" | ||
echo "Attaching to ${IMAGE_URL} via the OCI 1.1 Referrers API" | ||
oras attach --no-tty --registry-config "$HOME/auth.json" --distribution-spec v1.1-referrers-api --artifact-type "${MEDIA_TYPE}" "${IMAGE_URL}" "${UPLOAD_FILE}:${MEDIA_TYPE}" | ||
echo "Attaching to ${IMAGE_URL} via the OCI 1.1 Referrers Tag" | ||
oras attach --no-tty --registry-config "$HOME/auth.json" --distribution-spec v1.1-referrers-tag --artifact-type "${MEDIA_TYPE}" "${IMAGE_URL}" "${UPLOAD_FILE}:${MEDIA_TYPE}" | ||
workspaces: | ||
- name: workspace |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
# See the OWNERS docs: https://go.k8s.io/owners | ||
approvers: | ||
- integration-team | ||
reviewers: | ||
- integration-team |