Add files using upload-large-folder tool
Browse files- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_winrar/file_to_compare_1 +1297 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/file_to_compare_1 +38 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/file_to_compare_2 +79 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/COMPRESS-644/ARW05UP.ICO +0 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/COMPRESS-661/testARofText.ar +5 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_length-fail.ar +8 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_gnu1-fail.ar +8 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_gnu2-fail.ar +6 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_gnu3-fail.ar +0 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_modified-fail.ar +8 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_user-fail.ar +8 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/dump/invalid_compression_type-fail.dump +0 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/fuzz/crash-f2efd9eaeb86cda597d07b5e3c3d81363633c2da +0 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-daemon/procrunr.ico +0 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-daemon/procruns.ico +0 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-daemon/procrunw.ico +0 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-imaging/OutOfMemory_epine.ico +0 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/bandint_oom.pack +0 -0
- local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/references_oom.pack +0 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/.dockerignore +9 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/build_specified_commit.py +410 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/cifuzz/config_utils.py +283 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/cifuzz/filestore_utils_test.py +50 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/cifuzz/fuzz_target_test.py +298 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/cifuzz/get_coverage.py +208 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/cifuzz/http_utils.py +118 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/cifuzz/run_fuzzers.py +321 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/constants.py +49 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/Makefile +41 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/PoEs/node-shell-quote-v1.7.3/build.sh +22 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/PoEs/pytorch-lightning-1.5.10/Makefile +12 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/README.md +66 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/SystemSan.cpp +493 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/inspect_dns.cpp +236 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/inspect_utils.h +39 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/target.cpp +28 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/target_dns.cpp +39 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/vuln.dict +3 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/build_cache_local.sh +163 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/build_on_cloudbuild.sh +26 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/cloudbuild.yaml +58 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/cloudbuild_all.yaml +34 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/e2e-replay-build.sh +63 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/match_artifacts.sh +35 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/prepare-replay-rebuild +21 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/helper_test.py +239 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/presubmit.py +549 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/pytest.ini +3 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/repo_manager_test.py +201 -0
- local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/utils_test.py +151 -0
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_winrar/file_to_compare_1
ADDED
|
@@ -0,0 +1,1297 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/*
|
| 2 |
+
* Licensed to the Apache Software Foundation (ASF) under one
|
| 3 |
+
* or more contributor license agreements. See the NOTICE file
|
| 4 |
+
* distributed with this work for additional information
|
| 5 |
+
* regarding copyright ownership. The ASF licenses this file
|
| 6 |
+
* to you under the Apache License, Version 2.0 (the
|
| 7 |
+
* "License"); you may not use this file except in compliance
|
| 8 |
+
* with the License. You may obtain a copy of the License at
|
| 9 |
+
*
|
| 10 |
+
* http://www.apache.org/licenses/LICENSE-2.0
|
| 11 |
+
*
|
| 12 |
+
* Unless required by applicable law or agreed to in writing,
|
| 13 |
+
* software distributed under the License is distributed on an
|
| 14 |
+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
| 15 |
+
* KIND, either express or implied. See the License for the
|
| 16 |
+
* specific language governing permissions and limitations
|
| 17 |
+
* under the License.
|
| 18 |
+
*/
|
| 19 |
+
package org.apache.commons.compress.archivers.zip;
|
| 20 |
+
|
| 21 |
+
import java.io.ByteArrayInputStream;
|
| 22 |
+
import java.io.ByteArrayOutputStream;
|
| 23 |
+
import java.io.EOFException;
|
| 24 |
+
import java.io.IOException;
|
| 25 |
+
import java.io.InputStream;
|
| 26 |
+
import java.io.PushbackInputStream;
|
| 27 |
+
import java.math.BigInteger;
|
| 28 |
+
import java.nio.ByteBuffer;
|
| 29 |
+
import java.util.Arrays;
|
| 30 |
+
import java.util.zip.CRC32;
|
| 31 |
+
import java.util.zip.DataFormatException;
|
| 32 |
+
import java.util.zip.Inflater;
|
| 33 |
+
import java.util.zip.ZipEntry;
|
| 34 |
+
import java.util.zip.ZipException;
|
| 35 |
+
|
| 36 |
+
import org.apache.commons.compress.archivers.ArchiveEntry;
|
| 37 |
+
import org.apache.commons.compress.archivers.ArchiveInputStream;
|
| 38 |
+
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
|
| 39 |
+
import org.apache.commons.compress.compressors.deflate64.Deflate64CompressorInputStream;
|
| 40 |
+
import org.apache.commons.compress.utils.ArchiveUtils;
|
| 41 |
+
import org.apache.commons.compress.utils.IOUtils;
|
| 42 |
+
import org.apache.commons.compress.utils.InputStreamStatistics;
|
| 43 |
+
|
| 44 |
+
import static org.apache.commons.compress.archivers.zip.ZipConstants.DWORD;
|
| 45 |
+
import static org.apache.commons.compress.archivers.zip.ZipConstants.SHORT;
|
| 46 |
+
import static org.apache.commons.compress.archivers.zip.ZipConstants.WORD;
|
| 47 |
+
import static org.apache.commons.compress.archivers.zip.ZipConstants.ZIP64_MAGIC;
|
| 48 |
+
|
| 49 |
+
/**
|
| 50 |
+
* Implements an input stream that can read Zip archives.
|
| 51 |
+
*
|
| 52 |
+
* <p>As of Apache Commons Compress it transparently supports Zip64
|
| 53 |
+
* extensions and thus individual entries and archives larger than 4
|
| 54 |
+
* GB or with more than 65536 entries.</p>
|
| 55 |
+
*
|
| 56 |
+
* <p>The {@link ZipFile} class is preferred when reading from files
|
| 57 |
+
* as {@link ZipArchiveInputStream} is limited by not being able to
|
| 58 |
+
* read the central directory header before returning entries. In
|
| 59 |
+
* particular {@link ZipArchiveInputStream}</p>
|
| 60 |
+
*
|
| 61 |
+
* <ul>
|
| 62 |
+
*
|
| 63 |
+
* <li>may return entries that are not part of the central directory
|
| 64 |
+
* at all and shouldn't be considered part of the archive.</li>
|
| 65 |
+
*
|
| 66 |
+
* <li>may return several entries with the same name.</li>
|
| 67 |
+
*
|
| 68 |
+
* <li>will not return internal or external attributes.</li>
|
| 69 |
+
*
|
| 70 |
+
* <li>may return incomplete extra field data.</li>
|
| 71 |
+
*
|
| 72 |
+
* <li>may return unknown sizes and CRC values for entries until the
|
| 73 |
+
* next entry has been reached if the archive uses the data
|
| 74 |
+
* descriptor feature.</li>
|
| 75 |
+
*
|
| 76 |
+
* </ul>
|
| 77 |
+
*
|
| 78 |
+
* @see ZipFile
|
| 79 |
+
* @NotThreadSafe
|
| 80 |
+
*/
|
| 81 |
+
public class ZipArchiveInputStream extends ArchiveInputStream implements InputStreamStatistics {
|
| 82 |
+
|
| 83 |
+
/** The zip encoding to use for file names and the file comment. */
|
| 84 |
+
private final ZipEncoding zipEncoding;
|
| 85 |
+
|
| 86 |
+
// the provided encoding (for unit tests)
|
| 87 |
+
final String encoding;
|
| 88 |
+
|
| 89 |
+
/** Whether to look for and use Unicode extra fields. */
|
| 90 |
+
private final boolean useUnicodeExtraFields;
|
| 91 |
+
|
| 92 |
+
/** Wrapped stream, will always be a PushbackInputStream. */
|
| 93 |
+
private final InputStream in;
|
| 94 |
+
|
| 95 |
+
/** Inflater used for all deflated entries. */
|
| 96 |
+
private final Inflater inf = new Inflater(true);
|
| 97 |
+
|
| 98 |
+
/** Buffer used to read from the wrapped stream. */
|
| 99 |
+
private final ByteBuffer buf = ByteBuffer.allocate(ZipArchiveOutputStream.BUFFER_SIZE);
|
| 100 |
+
|
| 101 |
+
/** The entry that is currently being read. */
|
| 102 |
+
private CurrentEntry current = null;
|
| 103 |
+
|
| 104 |
+
/** Whether the stream has been closed. */
|
| 105 |
+
private boolean closed = false;
|
| 106 |
+
|
| 107 |
+
/** Whether the stream has reached the central directory - and thus found all entries. */
|
| 108 |
+
private boolean hitCentralDirectory = false;
|
| 109 |
+
|
| 110 |
+
/**
|
| 111 |
+
* When reading a stored entry that uses the data descriptor this
|
| 112 |
+
* stream has to read the full entry and caches it. This is the
|
| 113 |
+
* cache.
|
| 114 |
+
*/
|
| 115 |
+
private ByteArrayInputStream lastStoredEntry = null;
|
| 116 |
+
|
| 117 |
+
/** Whether the stream will try to read STORED entries that use a data descriptor. */
|
| 118 |
+
private boolean allowStoredEntriesWithDataDescriptor = false;
|
| 119 |
+
|
| 120 |
+
/** Count decompressed bytes for current entry */
|
| 121 |
+
private long uncompressedCount = 0;
|
| 122 |
+
|
| 123 |
+
private static final int LFH_LEN = 30;
|
| 124 |
+
/*
|
| 125 |
+
local file header signature WORD
|
| 126 |
+
version needed to extract SHORT
|
| 127 |
+
general purpose bit flag SHORT
|
| 128 |
+
compression method SHORT
|
| 129 |
+
last mod file time SHORT
|
| 130 |
+
last mod file date SHORT
|
| 131 |
+
crc-32 WORD
|
| 132 |
+
compressed size WORD
|
| 133 |
+
uncompressed size WORD
|
| 134 |
+
file name length SHORT
|
| 135 |
+
extra field length SHORT
|
| 136 |
+
*/
|
| 137 |
+
|
| 138 |
+
private static final int CFH_LEN = 46;
|
| 139 |
+
/*
|
| 140 |
+
central file header signature WORD
|
| 141 |
+
version made by SHORT
|
| 142 |
+
version needed to extract SHORT
|
| 143 |
+
general purpose bit flag SHORT
|
| 144 |
+
compression method SHORT
|
| 145 |
+
last mod file time SHORT
|
| 146 |
+
last mod file date SHORT
|
| 147 |
+
crc-32 WORD
|
| 148 |
+
compressed size WORD
|
| 149 |
+
uncompressed size WORD
|
| 150 |
+
file name length SHORT
|
| 151 |
+
extra field length SHORT
|
| 152 |
+
file comment length SHORT
|
| 153 |
+
disk number start SHORT
|
| 154 |
+
internal file attributes SHORT
|
| 155 |
+
external file attributes WORD
|
| 156 |
+
relative offset of local header WORD
|
| 157 |
+
*/
|
| 158 |
+
|
| 159 |
+
private static final long TWO_EXP_32 = ZIP64_MAGIC + 1;
|
| 160 |
+
|
| 161 |
+
// cached buffers - must only be used locally in the class (COMPRESS-172 - reduce garbage collection)
|
| 162 |
+
private final byte[] lfhBuf = new byte[LFH_LEN];
|
| 163 |
+
private final byte[] skipBuf = new byte[1024];
|
| 164 |
+
private final byte[] shortBuf = new byte[SHORT];
|
| 165 |
+
private final byte[] wordBuf = new byte[WORD];
|
| 166 |
+
private final byte[] twoDwordBuf = new byte[2 * DWORD];
|
| 167 |
+
|
| 168 |
+
private int entriesRead = 0;
|
| 169 |
+
|
| 170 |
+
/**
|
| 171 |
+
* Create an instance using UTF-8 encoding
|
| 172 |
+
* @param inputStream the stream to wrap
|
| 173 |
+
*/
|
| 174 |
+
public ZipArchiveInputStream(final InputStream inputStream) {
|
| 175 |
+
this(inputStream, ZipEncodingHelper.UTF8);
|
| 176 |
+
}
|
| 177 |
+
|
| 178 |
+
/**
|
| 179 |
+
* Create an instance using the specified encoding
|
| 180 |
+
* @param inputStream the stream to wrap
|
| 181 |
+
* @param encoding the encoding to use for file names, use null
|
| 182 |
+
* for the platform's default encoding
|
| 183 |
+
* @since 1.5
|
| 184 |
+
*/
|
| 185 |
+
public ZipArchiveInputStream(final InputStream inputStream, final String encoding) {
|
| 186 |
+
this(inputStream, encoding, true);
|
| 187 |
+
}
|
| 188 |
+
|
| 189 |
+
/**
|
| 190 |
+
* Create an instance using the specified encoding
|
| 191 |
+
* @param inputStream the stream to wrap
|
| 192 |
+
* @param encoding the encoding to use for file names, use null
|
| 193 |
+
* for the platform's default encoding
|
| 194 |
+
* @param useUnicodeExtraFields whether to use InfoZIP Unicode
|
| 195 |
+
* Extra Fields (if present) to set the file names.
|
| 196 |
+
*/
|
| 197 |
+
public ZipArchiveInputStream(final InputStream inputStream, final String encoding, final boolean useUnicodeExtraFields) {
|
| 198 |
+
this(inputStream, encoding, useUnicodeExtraFields, false);
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
/**
|
| 202 |
+
* Create an instance using the specified encoding
|
| 203 |
+
* @param inputStream the stream to wrap
|
| 204 |
+
* @param encoding the encoding to use for file names, use null
|
| 205 |
+
* for the platform's default encoding
|
| 206 |
+
* @param useUnicodeExtraFields whether to use InfoZIP Unicode
|
| 207 |
+
* Extra Fields (if present) to set the file names.
|
| 208 |
+
* @param allowStoredEntriesWithDataDescriptor whether the stream
|
| 209 |
+
* will try to read STORED entries that use a data descriptor
|
| 210 |
+
* @since 1.1
|
| 211 |
+
*/
|
| 212 |
+
public ZipArchiveInputStream(final InputStream inputStream,
|
| 213 |
+
final String encoding,
|
| 214 |
+
final boolean useUnicodeExtraFields,
|
| 215 |
+
final boolean allowStoredEntriesWithDataDescriptor) {
|
| 216 |
+
this.encoding = encoding;
|
| 217 |
+
zipEncoding = ZipEncodingHelper.getZipEncoding(encoding);
|
| 218 |
+
this.useUnicodeExtraFields = useUnicodeExtraFields;
|
| 219 |
+
in = new PushbackInputStream(inputStream, buf.capacity());
|
| 220 |
+
this.allowStoredEntriesWithDataDescriptor =
|
| 221 |
+
allowStoredEntriesWithDataDescriptor;
|
| 222 |
+
// haven't read anything so far
|
| 223 |
+
buf.limit(0);
|
| 224 |
+
}
|
| 225 |
+
|
| 226 |
+
public ZipArchiveEntry getNextZipEntry() throws IOException {
|
| 227 |
+
uncompressedCount = 0;
|
| 228 |
+
|
| 229 |
+
boolean firstEntry = true;
|
| 230 |
+
if (closed || hitCentralDirectory) {
|
| 231 |
+
return null;
|
| 232 |
+
}
|
| 233 |
+
if (current != null) {
|
| 234 |
+
closeEntry();
|
| 235 |
+
firstEntry = false;
|
| 236 |
+
}
|
| 237 |
+
|
| 238 |
+
long currentHeaderOffset = getBytesRead();
|
| 239 |
+
try {
|
| 240 |
+
if (firstEntry) {
|
| 241 |
+
// split archives have a special signature before the
|
| 242 |
+
// first local file header - look for it and fail with
|
| 243 |
+
// the appropriate error message if this is a split
|
| 244 |
+
// archive.
|
| 245 |
+
readFirstLocalFileHeader(lfhBuf);
|
| 246 |
+
} else {
|
| 247 |
+
readFully(lfhBuf);
|
| 248 |
+
}
|
| 249 |
+
} catch (final EOFException e) { //NOSONAR
|
| 250 |
+
return null;
|
| 251 |
+
}
|
| 252 |
+
|
| 253 |
+
final ZipLong sig = new ZipLong(lfhBuf);
|
| 254 |
+
if (!sig.equals(ZipLong.LFH_SIG)) {
|
| 255 |
+
if (sig.equals(ZipLong.CFH_SIG) || sig.equals(ZipLong.AED_SIG) || isApkSigningBlock(lfhBuf)) {
|
| 256 |
+
hitCentralDirectory = true;
|
| 257 |
+
skipRemainderOfArchive();
|
| 258 |
+
return null;
|
| 259 |
+
}
|
| 260 |
+
throw new ZipException(String.format("Unexpected record signature: 0X%X", sig.getValue()));
|
| 261 |
+
}
|
| 262 |
+
|
| 263 |
+
int off = WORD;
|
| 264 |
+
current = new CurrentEntry();
|
| 265 |
+
|
| 266 |
+
final int versionMadeBy = ZipShort.getValue(lfhBuf, off);
|
| 267 |
+
off += SHORT;
|
| 268 |
+
current.entry.setPlatform((versionMadeBy >> ZipFile.BYTE_SHIFT) & ZipFile.NIBLET_MASK);
|
| 269 |
+
|
| 270 |
+
final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(lfhBuf, off);
|
| 271 |
+
final boolean hasUTF8Flag = gpFlag.usesUTF8ForNames();
|
| 272 |
+
final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding;
|
| 273 |
+
current.hasDataDescriptor = gpFlag.usesDataDescriptor();
|
| 274 |
+
current.entry.setGeneralPurposeBit(gpFlag);
|
| 275 |
+
|
| 276 |
+
off += SHORT;
|
| 277 |
+
|
| 278 |
+
current.entry.setMethod(ZipShort.getValue(lfhBuf, off));
|
| 279 |
+
off += SHORT;
|
| 280 |
+
|
| 281 |
+
final long time = ZipUtil.dosToJavaTime(ZipLong.getValue(lfhBuf, off));
|
| 282 |
+
current.entry.setTime(time);
|
| 283 |
+
off += WORD;
|
| 284 |
+
|
| 285 |
+
ZipLong size = null, cSize = null;
|
| 286 |
+
if (!current.hasDataDescriptor) {
|
| 287 |
+
current.entry.setCrc(ZipLong.getValue(lfhBuf, off));
|
| 288 |
+
off += WORD;
|
| 289 |
+
|
| 290 |
+
cSize = new ZipLong(lfhBuf, off);
|
| 291 |
+
off += WORD;
|
| 292 |
+
|
| 293 |
+
size = new ZipLong(lfhBuf, off);
|
| 294 |
+
off += WORD;
|
| 295 |
+
} else {
|
| 296 |
+
off += 3 * WORD;
|
| 297 |
+
}
|
| 298 |
+
|
| 299 |
+
final int fileNameLen = ZipShort.getValue(lfhBuf, off);
|
| 300 |
+
|
| 301 |
+
off += SHORT;
|
| 302 |
+
|
| 303 |
+
final int extraLen = ZipShort.getValue(lfhBuf, off);
|
| 304 |
+
off += SHORT; // NOSONAR - assignment as documentation
|
| 305 |
+
|
| 306 |
+
final byte[] fileName = new byte[fileNameLen];
|
| 307 |
+
readFully(fileName);
|
| 308 |
+
current.entry.setName(entryEncoding.decode(fileName), fileName);
|
| 309 |
+
if (hasUTF8Flag) {
|
| 310 |
+
current.entry.setNameSource(ZipArchiveEntry.NameSource.NAME_WITH_EFS_FLAG);
|
| 311 |
+
}
|
| 312 |
+
|
| 313 |
+
final byte[] extraData = new byte[extraLen];
|
| 314 |
+
readFully(extraData);
|
| 315 |
+
current.entry.setExtra(extraData);
|
| 316 |
+
|
| 317 |
+
if (!hasUTF8Flag && useUnicodeExtraFields) {
|
| 318 |
+
ZipUtil.setNameAndCommentFromExtraFields(current.entry, fileName, null);
|
| 319 |
+
}
|
| 320 |
+
|
| 321 |
+
processZip64Extra(size, cSize);
|
| 322 |
+
|
| 323 |
+
current.entry.setLocalHeaderOffset(currentHeaderOffset);
|
| 324 |
+
current.entry.setDataOffset(getBytesRead());
|
| 325 |
+
current.entry.setStreamContiguous(true);
|
| 326 |
+
|
| 327 |
+
ZipMethod m = ZipMethod.getMethodByCode(current.entry.getMethod());
|
| 328 |
+
if (current.entry.getCompressedSize() != ArchiveEntry.SIZE_UNKNOWN) {
|
| 329 |
+
if (ZipUtil.canHandleEntryData(current.entry) && m != ZipMethod.STORED && m != ZipMethod.DEFLATED) {
|
| 330 |
+
InputStream bis = new BoundedInputStream(in, current.entry.getCompressedSize());
|
| 331 |
+
switch (m) {
|
| 332 |
+
case UNSHRINKING:
|
| 333 |
+
current.in = new UnshrinkingInputStream(bis);
|
| 334 |
+
break;
|
| 335 |
+
case IMPLODING:
|
| 336 |
+
current.in = new ExplodingInputStream(
|
| 337 |
+
current.entry.getGeneralPurposeBit().getSlidingDictionarySize(),
|
| 338 |
+
current.entry.getGeneralPurposeBit().getNumberOfShannonFanoTrees(),
|
| 339 |
+
bis);
|
| 340 |
+
break;
|
| 341 |
+
case BZIP2:
|
| 342 |
+
current.in = new BZip2CompressorInputStream(bis);
|
| 343 |
+
break;
|
| 344 |
+
case ENHANCED_DEFLATED:
|
| 345 |
+
current.in = new Deflate64CompressorInputStream(bis);
|
| 346 |
+
break;
|
| 347 |
+
default:
|
| 348 |
+
// we should never get here as all supported methods have been covered
|
| 349 |
+
// will cause an error when read is invoked, don't throw an exception here so people can
|
| 350 |
+
// skip unsupported entries
|
| 351 |
+
break;
|
| 352 |
+
}
|
| 353 |
+
}
|
| 354 |
+
} else if (m == ZipMethod.ENHANCED_DEFLATED) {
|
| 355 |
+
current.in = new Deflate64CompressorInputStream(in);
|
| 356 |
+
}
|
| 357 |
+
|
| 358 |
+
entriesRead++;
|
| 359 |
+
return current.entry;
|
| 360 |
+
}
|
| 361 |
+
|
| 362 |
+
/**
|
| 363 |
+
* Fills the given array with the first local file header and
|
| 364 |
+
* deals with splitting/spanning markers that may prefix the first
|
| 365 |
+
* LFH.
|
| 366 |
+
*/
|
| 367 |
+
private void readFirstLocalFileHeader(final byte[] lfh) throws IOException {
|
| 368 |
+
readFully(lfh);
|
| 369 |
+
final ZipLong sig = new ZipLong(lfh);
|
| 370 |
+
if (sig.equals(ZipLong.DD_SIG)) {
|
| 371 |
+
throw new UnsupportedZipFeatureException(UnsupportedZipFeatureException.Feature.SPLITTING);
|
| 372 |
+
}
|
| 373 |
+
|
| 374 |
+
if (sig.equals(ZipLong.SINGLE_SEGMENT_SPLIT_MARKER)) {
|
| 375 |
+
// The archive is not really split as only one segment was
|
| 376 |
+
// needed in the end. Just skip over the marker.
|
| 377 |
+
final byte[] missedLfhBytes = new byte[4];
|
| 378 |
+
readFully(missedLfhBytes);
|
| 379 |
+
System.arraycopy(lfh, 4, lfh, 0, LFH_LEN - 4);
|
| 380 |
+
System.arraycopy(missedLfhBytes, 0, lfh, LFH_LEN - 4, 4);
|
| 381 |
+
}
|
| 382 |
+
}
|
| 383 |
+
|
| 384 |
+
/**
|
| 385 |
+
* Records whether a Zip64 extra is present and sets the size
|
| 386 |
+
* information from it if sizes are 0xFFFFFFFF and the entry
|
| 387 |
+
* doesn't use a data descriptor.
|
| 388 |
+
*/
|
| 389 |
+
private void processZip64Extra(final ZipLong size, final ZipLong cSize) {
|
| 390 |
+
final Zip64ExtendedInformationExtraField z64 =
|
| 391 |
+
(Zip64ExtendedInformationExtraField)
|
| 392 |
+
current.entry.getExtraField(Zip64ExtendedInformationExtraField.HEADER_ID);
|
| 393 |
+
current.usesZip64 = z64 != null;
|
| 394 |
+
if (!current.hasDataDescriptor) {
|
| 395 |
+
if (z64 != null // same as current.usesZip64 but avoids NPE warning
|
| 396 |
+
&& (ZipLong.ZIP64_MAGIC.equals(cSize) || ZipLong.ZIP64_MAGIC.equals(size)) ) {
|
| 397 |
+
current.entry.setCompressedSize(z64.getCompressedSize().getLongValue());
|
| 398 |
+
current.entry.setSize(z64.getSize().getLongValue());
|
| 399 |
+
} else if (cSize != null && size != null) {
|
| 400 |
+
current.entry.setCompressedSize(cSize.getValue());
|
| 401 |
+
current.entry.setSize(size.getValue());
|
| 402 |
+
}
|
| 403 |
+
}
|
| 404 |
+
}
|
| 405 |
+
|
| 406 |
+
@Override
|
| 407 |
+
public ArchiveEntry getNextEntry() throws IOException {
|
| 408 |
+
return getNextZipEntry();
|
| 409 |
+
}
|
| 410 |
+
|
| 411 |
+
/**
|
| 412 |
+
* Whether this class is able to read the given entry.
|
| 413 |
+
*
|
| 414 |
+
* <p>May return false if it is set up to use encryption or a
|
| 415 |
+
* compression method that hasn't been implemented yet.</p>
|
| 416 |
+
* @since 1.1
|
| 417 |
+
*/
|
| 418 |
+
@Override
|
| 419 |
+
public boolean canReadEntryData(final ArchiveEntry ae) {
|
| 420 |
+
if (ae instanceof ZipArchiveEntry) {
|
| 421 |
+
final ZipArchiveEntry ze = (ZipArchiveEntry) ae;
|
| 422 |
+
return ZipUtil.canHandleEntryData(ze)
|
| 423 |
+
&& supportsDataDescriptorFor(ze)
|
| 424 |
+
&& supportsCompressedSizeFor(ze);
|
| 425 |
+
}
|
| 426 |
+
return false;
|
| 427 |
+
}
|
| 428 |
+
|
| 429 |
+
@Override
|
| 430 |
+
public int read(final byte[] buffer, final int offset, final int length) throws IOException {
|
| 431 |
+
if (length == 0) {
|
| 432 |
+
return 0;
|
| 433 |
+
}
|
| 434 |
+
if (closed) {
|
| 435 |
+
throw new IOException("The stream is closed");
|
| 436 |
+
}
|
| 437 |
+
|
| 438 |
+
if (current == null) {
|
| 439 |
+
return -1;
|
| 440 |
+
}
|
| 441 |
+
|
| 442 |
+
// avoid int overflow, check null buffer
|
| 443 |
+
if (offset > buffer.length || length < 0 || offset < 0 || buffer.length - offset < length) {
|
| 444 |
+
throw new ArrayIndexOutOfBoundsException();
|
| 445 |
+
}
|
| 446 |
+
|
| 447 |
+
ZipUtil.checkRequestedFeatures(current.entry);
|
| 448 |
+
if (!supportsDataDescriptorFor(current.entry)) {
|
| 449 |
+
throw new UnsupportedZipFeatureException(UnsupportedZipFeatureException.Feature.DATA_DESCRIPTOR,
|
| 450 |
+
current.entry);
|
| 451 |
+
}
|
| 452 |
+
if (!supportsCompressedSizeFor(current.entry)) {
|
| 453 |
+
throw new UnsupportedZipFeatureException(UnsupportedZipFeatureException.Feature.UNKNOWN_COMPRESSED_SIZE,
|
| 454 |
+
current.entry);
|
| 455 |
+
}
|
| 456 |
+
|
| 457 |
+
int read;
|
| 458 |
+
if (current.entry.getMethod() == ZipArchiveOutputStream.STORED) {
|
| 459 |
+
read = readStored(buffer, offset, length);
|
| 460 |
+
} else if (current.entry.getMethod() == ZipArchiveOutputStream.DEFLATED) {
|
| 461 |
+
read = readDeflated(buffer, offset, length);
|
| 462 |
+
} else if (current.entry.getMethod() == ZipMethod.UNSHRINKING.getCode()
|
| 463 |
+
|| current.entry.getMethod() == ZipMethod.IMPLODING.getCode()
|
| 464 |
+
|| current.entry.getMethod() == ZipMethod.ENHANCED_DEFLATED.getCode()
|
| 465 |
+
|| current.entry.getMethod() == ZipMethod.BZIP2.getCode()) {
|
| 466 |
+
read = current.in.read(buffer, offset, length);
|
| 467 |
+
} else {
|
| 468 |
+
throw new UnsupportedZipFeatureException(ZipMethod.getMethodByCode(current.entry.getMethod()),
|
| 469 |
+
current.entry);
|
| 470 |
+
}
|
| 471 |
+
|
| 472 |
+
if (read >= 0) {
|
| 473 |
+
current.crc.update(buffer, offset, read);
|
| 474 |
+
uncompressedCount += read;
|
| 475 |
+
}
|
| 476 |
+
|
| 477 |
+
return read;
|
| 478 |
+
}
|
| 479 |
+
|
| 480 |
+
/**
|
| 481 |
+
* @since 1.17
|
| 482 |
+
*/
|
| 483 |
+
@Override
|
| 484 |
+
public long getCompressedCount() {
|
| 485 |
+
if (current.entry.getMethod() == ZipArchiveOutputStream.STORED) {
|
| 486 |
+
return current.bytesRead;
|
| 487 |
+
} else if (current.entry.getMethod() == ZipArchiveOutputStream.DEFLATED) {
|
| 488 |
+
return getBytesInflated();
|
| 489 |
+
} else if (current.entry.getMethod() == ZipMethod.UNSHRINKING.getCode()) {
|
| 490 |
+
return ((UnshrinkingInputStream) current.in).getCompressedCount();
|
| 491 |
+
} else if (current.entry.getMethod() == ZipMethod.IMPLODING.getCode()) {
|
| 492 |
+
return ((ExplodingInputStream) current.in).getCompressedCount();
|
| 493 |
+
} else if (current.entry.getMethod() == ZipMethod.ENHANCED_DEFLATED.getCode()) {
|
| 494 |
+
return ((Deflate64CompressorInputStream) current.in).getCompressedCount();
|
| 495 |
+
} else if (current.entry.getMethod() == ZipMethod.BZIP2.getCode()) {
|
| 496 |
+
return ((BZip2CompressorInputStream) current.in).getCompressedCount();
|
| 497 |
+
} else {
|
| 498 |
+
return -1;
|
| 499 |
+
}
|
| 500 |
+
}
|
| 501 |
+
|
| 502 |
+
/**
|
| 503 |
+
* @since 1.17
|
| 504 |
+
*/
|
| 505 |
+
@Override
|
| 506 |
+
public long getUncompressedCount() {
|
| 507 |
+
return uncompressedCount;
|
| 508 |
+
}
|
| 509 |
+
|
| 510 |
+
/**
|
| 511 |
+
* Implementation of read for STORED entries.
|
| 512 |
+
*/
|
| 513 |
+
private int readStored(final byte[] buffer, final int offset, final int length) throws IOException {
|
| 514 |
+
|
| 515 |
+
if (current.hasDataDescriptor) {
|
| 516 |
+
if (lastStoredEntry == null) {
|
| 517 |
+
readStoredEntry();
|
| 518 |
+
}
|
| 519 |
+
return lastStoredEntry.read(buffer, offset, length);
|
| 520 |
+
}
|
| 521 |
+
|
| 522 |
+
final long csize = current.entry.getSize();
|
| 523 |
+
if (current.bytesRead >= csize) {
|
| 524 |
+
return -1;
|
| 525 |
+
}
|
| 526 |
+
|
| 527 |
+
if (buf.position() >= buf.limit()) {
|
| 528 |
+
buf.position(0);
|
| 529 |
+
final int l = in.read(buf.array());
|
| 530 |
+
if (l == -1) {
|
| 531 |
+
buf.limit(0);
|
| 532 |
+
throw new IOException("Truncated ZIP file");
|
| 533 |
+
}
|
| 534 |
+
buf.limit(l);
|
| 535 |
+
|
| 536 |
+
count(l);
|
| 537 |
+
current.bytesReadFromStream += l;
|
| 538 |
+
}
|
| 539 |
+
|
| 540 |
+
int toRead = Math.min(buf.remaining(), length);
|
| 541 |
+
if ((csize - current.bytesRead) < toRead) {
|
| 542 |
+
// if it is smaller than toRead then it fits into an int
|
| 543 |
+
toRead = (int) (csize - current.bytesRead);
|
| 544 |
+
}
|
| 545 |
+
buf.get(buffer, offset, toRead);
|
| 546 |
+
current.bytesRead += toRead;
|
| 547 |
+
return toRead;
|
| 548 |
+
}
|
| 549 |
+
|
| 550 |
+
/**
|
| 551 |
+
* Implementation of read for DEFLATED entries.
|
| 552 |
+
*/
|
| 553 |
+
private int readDeflated(final byte[] buffer, final int offset, final int length) throws IOException {
|
| 554 |
+
final int read = readFromInflater(buffer, offset, length);
|
| 555 |
+
if (read <= 0) {
|
| 556 |
+
if (inf.finished()) {
|
| 557 |
+
return -1;
|
| 558 |
+
} else if (inf.needsDictionary()) {
|
| 559 |
+
throw new ZipException("This archive needs a preset dictionary"
|
| 560 |
+
+ " which is not supported by Commons"
|
| 561 |
+
+ " Compress.");
|
| 562 |
+
} else if (read == -1) {
|
| 563 |
+
throw new IOException("Truncated ZIP file");
|
| 564 |
+
}
|
| 565 |
+
}
|
| 566 |
+
return read;
|
| 567 |
+
}
|
| 568 |
+
|
| 569 |
+
/**
|
| 570 |
+
* Potentially reads more bytes to fill the inflater's buffer and
|
| 571 |
+
* reads from it.
|
| 572 |
+
*/
|
| 573 |
+
private int readFromInflater(final byte[] buffer, final int offset, final int length) throws IOException {
|
| 574 |
+
int read = 0;
|
| 575 |
+
do {
|
| 576 |
+
if (inf.needsInput()) {
|
| 577 |
+
final int l = fill();
|
| 578 |
+
if (l > 0) {
|
| 579 |
+
current.bytesReadFromStream += buf.limit();
|
| 580 |
+
} else if (l == -1) {
|
| 581 |
+
return -1;
|
| 582 |
+
} else {
|
| 583 |
+
break;
|
| 584 |
+
}
|
| 585 |
+
}
|
| 586 |
+
try {
|
| 587 |
+
read = inf.inflate(buffer, offset, length);
|
| 588 |
+
} catch (final DataFormatException e) {
|
| 589 |
+
throw (IOException) new ZipException(e.getMessage()).initCause(e);
|
| 590 |
+
}
|
| 591 |
+
} while (read == 0 && inf.needsInput());
|
| 592 |
+
return read;
|
| 593 |
+
}
|
| 594 |
+
|
| 595 |
+
@Override
|
| 596 |
+
public void close() throws IOException {
|
| 597 |
+
if (!closed) {
|
| 598 |
+
closed = true;
|
| 599 |
+
try {
|
| 600 |
+
in.close();
|
| 601 |
+
} finally {
|
| 602 |
+
inf.end();
|
| 603 |
+
}
|
| 604 |
+
}
|
| 605 |
+
}
|
| 606 |
+
|
| 607 |
+
/**
|
| 608 |
+
* Skips over and discards value bytes of data from this input
|
| 609 |
+
* stream.
|
| 610 |
+
*
|
| 611 |
+
* <p>This implementation may end up skipping over some smaller
|
| 612 |
+
* number of bytes, possibly 0, if and only if it reaches the end
|
| 613 |
+
* of the underlying stream.</p>
|
| 614 |
+
*
|
| 615 |
+
* <p>The actual number of bytes skipped is returned.</p>
|
| 616 |
+
*
|
| 617 |
+
* @param value the number of bytes to be skipped.
|
| 618 |
+
* @return the actual number of bytes skipped.
|
| 619 |
+
* @throws IOException - if an I/O error occurs.
|
| 620 |
+
* @throws IllegalArgumentException - if value is negative.
|
| 621 |
+
*/
|
| 622 |
+
@Override
|
| 623 |
+
public long skip(final long value) throws IOException {
|
| 624 |
+
if (value >= 0) {
|
| 625 |
+
long skipped = 0;
|
| 626 |
+
while (skipped < value) {
|
| 627 |
+
final long rem = value - skipped;
|
| 628 |
+
final int x = read(skipBuf, 0, (int) (skipBuf.length > rem ? rem : skipBuf.length));
|
| 629 |
+
if (x == -1) {
|
| 630 |
+
return skipped;
|
| 631 |
+
}
|
| 632 |
+
skipped += x;
|
| 633 |
+
}
|
| 634 |
+
return skipped;
|
| 635 |
+
}
|
| 636 |
+
throw new IllegalArgumentException();
|
| 637 |
+
}
|
| 638 |
+
|
| 639 |
+
/**
|
| 640 |
+
* Checks if the signature matches what is expected for a zip file.
|
| 641 |
+
* Does not currently handle self-extracting zips which may have arbitrary
|
| 642 |
+
* leading content.
|
| 643 |
+
*
|
| 644 |
+
* @param signature the bytes to check
|
| 645 |
+
* @param length the number of bytes to check
|
| 646 |
+
* @return true, if this stream is a zip archive stream, false otherwise
|
| 647 |
+
*/
|
| 648 |
+
public static boolean matches(final byte[] signature, final int length) {
|
| 649 |
+
if (length < ZipArchiveOutputStream.LFH_SIG.length) {
|
| 650 |
+
return false;
|
| 651 |
+
}
|
| 652 |
+
|
| 653 |
+
return checksig(signature, ZipArchiveOutputStream.LFH_SIG) // normal file
|
| 654 |
+
|| checksig(signature, ZipArchiveOutputStream.EOCD_SIG) // empty zip
|
| 655 |
+
|| checksig(signature, ZipArchiveOutputStream.DD_SIG) // split zip
|
| 656 |
+
|| checksig(signature, ZipLong.SINGLE_SEGMENT_SPLIT_MARKER.getBytes());
|
| 657 |
+
}
|
| 658 |
+
|
| 659 |
+
private static boolean checksig(final byte[] signature, final byte[] expected) {
|
| 660 |
+
for (int i = 0; i < expected.length; i++) {
|
| 661 |
+
if (signature[i] != expected[i]) {
|
| 662 |
+
return false;
|
| 663 |
+
}
|
| 664 |
+
}
|
| 665 |
+
return true;
|
| 666 |
+
}
|
| 667 |
+
|
| 668 |
+
/**
|
| 669 |
+
* Closes the current ZIP archive entry and positions the underlying
|
| 670 |
+
* stream to the beginning of the next entry. All per-entry variables
|
| 671 |
+
* and data structures are cleared.
|
| 672 |
+
* <p>
|
| 673 |
+
* If the compressed size of this entry is included in the entry header,
|
| 674 |
+
* then any outstanding bytes are simply skipped from the underlying
|
| 675 |
+
* stream without uncompressing them. This allows an entry to be safely
|
| 676 |
+
* closed even if the compression method is unsupported.
|
| 677 |
+
* <p>
|
| 678 |
+
* In case we don't know the compressed size of this entry or have
|
| 679 |
+
* already buffered too much data from the underlying stream to support
|
| 680 |
+
* uncompression, then the uncompression process is completed and the
|
| 681 |
+
* end position of the stream is adjusted based on the result of that
|
| 682 |
+
* process.
|
| 683 |
+
*
|
| 684 |
+
* @throws IOException if an error occurs
|
| 685 |
+
*/
|
| 686 |
+
private void closeEntry() throws IOException {
|
| 687 |
+
if (closed) {
|
| 688 |
+
throw new IOException("The stream is closed");
|
| 689 |
+
}
|
| 690 |
+
if (current == null) {
|
| 691 |
+
return;
|
| 692 |
+
}
|
| 693 |
+
|
| 694 |
+
// Ensure all entry bytes are read
|
| 695 |
+
if (currentEntryHasOutstandingBytes()) {
|
| 696 |
+
drainCurrentEntryData();
|
| 697 |
+
} else {
|
| 698 |
+
// this is guaranteed to exhaust the stream
|
| 699 |
+
skip(Long.MAX_VALUE); //NOSONAR
|
| 700 |
+
|
| 701 |
+
final long inB = current.entry.getMethod() == ZipArchiveOutputStream.DEFLATED
|
| 702 |
+
? getBytesInflated() : current.bytesRead;
|
| 703 |
+
|
| 704 |
+
// this is at most a single read() operation and can't
|
| 705 |
+
// exceed the range of int
|
| 706 |
+
final int diff = (int) (current.bytesReadFromStream - inB);
|
| 707 |
+
|
| 708 |
+
// Pushback any required bytes
|
| 709 |
+
if (diff > 0) {
|
| 710 |
+
pushback(buf.array(), buf.limit() - diff, diff);
|
| 711 |
+
current.bytesReadFromStream -= diff;
|
| 712 |
+
}
|
| 713 |
+
|
| 714 |
+
// Drain remainder of entry if not all data bytes were required
|
| 715 |
+
if (currentEntryHasOutstandingBytes()) {
|
| 716 |
+
drainCurrentEntryData();
|
| 717 |
+
}
|
| 718 |
+
}
|
| 719 |
+
|
| 720 |
+
if (lastStoredEntry == null && current.hasDataDescriptor) {
|
| 721 |
+
readDataDescriptor();
|
| 722 |
+
}
|
| 723 |
+
|
| 724 |
+
inf.reset();
|
| 725 |
+
buf.clear().flip();
|
| 726 |
+
current = null;
|
| 727 |
+
lastStoredEntry = null;
|
| 728 |
+
}
|
| 729 |
+
|
| 730 |
+
/**
|
| 731 |
+
* If the compressed size of the current entry is included in the entry header
|
| 732 |
+
* and there are any outstanding bytes in the underlying stream, then
|
| 733 |
+
* this returns true.
|
| 734 |
+
*
|
| 735 |
+
* @return true, if current entry is determined to have outstanding bytes, false otherwise
|
| 736 |
+
*/
|
| 737 |
+
private boolean currentEntryHasOutstandingBytes() {
|
| 738 |
+
return current.bytesReadFromStream <= current.entry.getCompressedSize()
|
| 739 |
+
&& !current.hasDataDescriptor;
|
| 740 |
+
}
|
| 741 |
+
|
| 742 |
+
/**
|
| 743 |
+
* Read all data of the current entry from the underlying stream
|
| 744 |
+
* that hasn't been read, yet.
|
| 745 |
+
*/
|
| 746 |
+
private void drainCurrentEntryData() throws IOException {
|
| 747 |
+
long remaining = current.entry.getCompressedSize() - current.bytesReadFromStream;
|
| 748 |
+
while (remaining > 0) {
|
| 749 |
+
final long n = in.read(buf.array(), 0, (int) Math.min(buf.capacity(), remaining));
|
| 750 |
+
if (n < 0) {
|
| 751 |
+
throw new EOFException("Truncated ZIP entry: "
|
| 752 |
+
+ ArchiveUtils.sanitize(current.entry.getName()));
|
| 753 |
+
}
|
| 754 |
+
count(n);
|
| 755 |
+
remaining -= n;
|
| 756 |
+
}
|
| 757 |
+
}
|
| 758 |
+
|
| 759 |
+
/**
|
| 760 |
+
* Get the number of bytes Inflater has actually processed.
|
| 761 |
+
*
|
| 762 |
+
* <p>for Java < Java7 the getBytes* methods in
|
| 763 |
+
* Inflater/Deflater seem to return unsigned ints rather than
|
| 764 |
+
* longs that start over with 0 at 2^32.</p>
|
| 765 |
+
*
|
| 766 |
+
* <p>The stream knows how many bytes it has read, but not how
|
| 767 |
+
* many the Inflater actually consumed - it should be between the
|
| 768 |
+
* total number of bytes read for the entry and the total number
|
| 769 |
+
* minus the last read operation. Here we just try to make the
|
| 770 |
+
* value close enough to the bytes we've read by assuming the
|
| 771 |
+
* number of bytes consumed must be smaller than (or equal to) the
|
| 772 |
+
* number of bytes read but not smaller by more than 2^32.</p>
|
| 773 |
+
*/
|
| 774 |
+
private long getBytesInflated() {
|
| 775 |
+
long inB = inf.getBytesRead();
|
| 776 |
+
if (current.bytesReadFromStream >= TWO_EXP_32) {
|
| 777 |
+
while (inB + TWO_EXP_32 <= current.bytesReadFromStream) {
|
| 778 |
+
inB += TWO_EXP_32;
|
| 779 |
+
}
|
| 780 |
+
}
|
| 781 |
+
return inB;
|
| 782 |
+
}
|
| 783 |
+
|
| 784 |
+
private int fill() throws IOException {
|
| 785 |
+
if (closed) {
|
| 786 |
+
throw new IOException("The stream is closed");
|
| 787 |
+
}
|
| 788 |
+
final int length = in.read(buf.array());
|
| 789 |
+
if (length > 0) {
|
| 790 |
+
buf.limit(length);
|
| 791 |
+
count(buf.limit());
|
| 792 |
+
inf.setInput(buf.array(), 0, buf.limit());
|
| 793 |
+
}
|
| 794 |
+
return length;
|
| 795 |
+
}
|
| 796 |
+
|
| 797 |
+
private void readFully(final byte[] b) throws IOException {
|
| 798 |
+
readFully(b, 0);
|
| 799 |
+
}
|
| 800 |
+
|
| 801 |
+
private void readFully(final byte[] b, final int off) throws IOException {
|
| 802 |
+
final int len = b.length - off;
|
| 803 |
+
final int count = IOUtils.readFully(in, b, off, len);
|
| 804 |
+
count(count);
|
| 805 |
+
if (count < len) {
|
| 806 |
+
throw new EOFException();
|
| 807 |
+
}
|
| 808 |
+
}
|
| 809 |
+
|
| 810 |
+
private void readDataDescriptor() throws IOException {
|
| 811 |
+
readFully(wordBuf);
|
| 812 |
+
ZipLong val = new ZipLong(wordBuf);
|
| 813 |
+
if (ZipLong.DD_SIG.equals(val)) {
|
| 814 |
+
// data descriptor with signature, skip sig
|
| 815 |
+
readFully(wordBuf);
|
| 816 |
+
val = new ZipLong(wordBuf);
|
| 817 |
+
}
|
| 818 |
+
current.entry.setCrc(val.getValue());
|
| 819 |
+
|
| 820 |
+
// if there is a ZIP64 extra field, sizes are eight bytes
|
| 821 |
+
// each, otherwise four bytes each. Unfortunately some
|
| 822 |
+
// implementations - namely Java7 - use eight bytes without
|
| 823 |
+
// using a ZIP64 extra field -
|
| 824 |
+
// https://bugs.sun.com/bugdatabase/view_bug.do?bug_id=7073588
|
| 825 |
+
|
| 826 |
+
// just read 16 bytes and check whether bytes nine to twelve
|
| 827 |
+
// look like one of the signatures of what could follow a data
|
| 828 |
+
// descriptor (ignoring archive decryption headers for now).
|
| 829 |
+
// If so, push back eight bytes and assume sizes are four
|
| 830 |
+
// bytes, otherwise sizes are eight bytes each.
|
| 831 |
+
readFully(twoDwordBuf);
|
| 832 |
+
final ZipLong potentialSig = new ZipLong(twoDwordBuf, DWORD);
|
| 833 |
+
if (potentialSig.equals(ZipLong.CFH_SIG) || potentialSig.equals(ZipLong.LFH_SIG)) {
|
| 834 |
+
pushback(twoDwordBuf, DWORD, DWORD);
|
| 835 |
+
current.entry.setCompressedSize(ZipLong.getValue(twoDwordBuf));
|
| 836 |
+
current.entry.setSize(ZipLong.getValue(twoDwordBuf, WORD));
|
| 837 |
+
} else {
|
| 838 |
+
current.entry.setCompressedSize(ZipEightByteInteger.getLongValue(twoDwordBuf));
|
| 839 |
+
current.entry.setSize(ZipEightByteInteger.getLongValue(twoDwordBuf, DWORD));
|
| 840 |
+
}
|
| 841 |
+
}
|
| 842 |
+
|
| 843 |
+
/**
|
| 844 |
+
* Whether this entry requires a data descriptor this library can work with.
|
| 845 |
+
*
|
| 846 |
+
* @return true if allowStoredEntriesWithDataDescriptor is true,
|
| 847 |
+
* the entry doesn't require any data descriptor or the method is
|
| 848 |
+
* DEFLATED or ENHANCED_DEFLATED.
|
| 849 |
+
*/
|
| 850 |
+
private boolean supportsDataDescriptorFor(final ZipArchiveEntry entry) {
|
| 851 |
+
return !entry.getGeneralPurposeBit().usesDataDescriptor()
|
| 852 |
+
|
| 853 |
+
|| (allowStoredEntriesWithDataDescriptor && entry.getMethod() == ZipEntry.STORED)
|
| 854 |
+
|| entry.getMethod() == ZipEntry.DEFLATED
|
| 855 |
+
|| entry.getMethod() == ZipMethod.ENHANCED_DEFLATED.getCode();
|
| 856 |
+
}
|
| 857 |
+
|
| 858 |
+
/**
|
| 859 |
+
* Whether the compressed size for the entry is either known or
|
| 860 |
+
* not required by the compression method being used.
|
| 861 |
+
*/
|
| 862 |
+
private boolean supportsCompressedSizeFor(final ZipArchiveEntry entry) {
|
| 863 |
+
return entry.getCompressedSize() != ArchiveEntry.SIZE_UNKNOWN
|
| 864 |
+
|| entry.getMethod() == ZipEntry.DEFLATED
|
| 865 |
+
|| entry.getMethod() == ZipMethod.ENHANCED_DEFLATED.getCode()
|
| 866 |
+
|| (entry.getGeneralPurposeBit().usesDataDescriptor()
|
| 867 |
+
&& allowStoredEntriesWithDataDescriptor
|
| 868 |
+
&& entry.getMethod() == ZipEntry.STORED);
|
| 869 |
+
}
|
| 870 |
+
|
| 871 |
+
private static final String USE_ZIPFILE_INSTEAD_OF_STREAM_DISCLAIMER =
|
| 872 |
+
" while reading a stored entry using data descriptor. Either the archive is broken"
|
| 873 |
+
+ " or it can not be read using ZipArchiveInputStream and you must use ZipFile."
|
| 874 |
+
+ " A common cause for this is a ZIP archive containing a ZIP archive."
|
| 875 |
+
+ " See http://commons.apache.org/proper/commons-compress/zip.html#ZipArchiveInputStream_vs_ZipFile";
|
| 876 |
+
|
| 877 |
+
/**
|
| 878 |
+
* Caches a stored entry that uses the data descriptor.
|
| 879 |
+
*
|
| 880 |
+
* <ul>
|
| 881 |
+
* <li>Reads a stored entry until the signature of a local file
|
| 882 |
+
* header, central directory header or data descriptor has been
|
| 883 |
+
* found.</li>
|
| 884 |
+
* <li>Stores all entry data in lastStoredEntry.</p>
|
| 885 |
+
* <li>Rewinds the stream to position at the data
|
| 886 |
+
* descriptor.</li>
|
| 887 |
+
* <li>reads the data descriptor</li>
|
| 888 |
+
* </ul>
|
| 889 |
+
*
|
| 890 |
+
* <p>After calling this method the entry should know its size,
|
| 891 |
+
* the entry's data is cached and the stream is positioned at the
|
| 892 |
+
* next local file or central directory header.</p>
|
| 893 |
+
*/
|
| 894 |
+
private void readStoredEntry() throws IOException {
|
| 895 |
+
final ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
| 896 |
+
int off = 0;
|
| 897 |
+
boolean done = false;
|
| 898 |
+
|
| 899 |
+
// length of DD without signature
|
| 900 |
+
final int ddLen = current.usesZip64 ? WORD + 2 * DWORD : 3 * WORD;
|
| 901 |
+
|
| 902 |
+
while (!done) {
|
| 903 |
+
final int r = in.read(buf.array(), off, ZipArchiveOutputStream.BUFFER_SIZE - off);
|
| 904 |
+
if (r <= 0) {
|
| 905 |
+
// read the whole archive without ever finding a
|
| 906 |
+
// central directory
|
| 907 |
+
throw new IOException("Truncated ZIP file");
|
| 908 |
+
}
|
| 909 |
+
if (r + off < 4) {
|
| 910 |
+
// buffer too small to check for a signature, loop
|
| 911 |
+
off += r;
|
| 912 |
+
continue;
|
| 913 |
+
}
|
| 914 |
+
|
| 915 |
+
done = bufferContainsSignature(bos, off, r, ddLen);
|
| 916 |
+
if (!done) {
|
| 917 |
+
off = cacheBytesRead(bos, off, r, ddLen);
|
| 918 |
+
}
|
| 919 |
+
}
|
| 920 |
+
if (current.entry.getCompressedSize() != current.entry.getSize()) {
|
| 921 |
+
throw new ZipException("compressed and uncompressed size don't match"
|
| 922 |
+
+ USE_ZIPFILE_INSTEAD_OF_STREAM_DISCLAIMER);
|
| 923 |
+
}
|
| 924 |
+
final byte[] b = bos.toByteArray();
|
| 925 |
+
if (b.length != current.entry.getSize()) {
|
| 926 |
+
throw new ZipException("actual and claimed size don't match"
|
| 927 |
+
+ USE_ZIPFILE_INSTEAD_OF_STREAM_DISCLAIMER);
|
| 928 |
+
}
|
| 929 |
+
lastStoredEntry = new ByteArrayInputStream(b);
|
| 930 |
+
}
|
| 931 |
+
|
| 932 |
+
private static final byte[] LFH = ZipLong.LFH_SIG.getBytes();
|
| 933 |
+
private static final byte[] CFH = ZipLong.CFH_SIG.getBytes();
|
| 934 |
+
private static final byte[] DD = ZipLong.DD_SIG.getBytes();
|
| 935 |
+
|
| 936 |
+
/**
|
| 937 |
+
* Checks whether the current buffer contains the signature of a
|
| 938 |
+
* "data descriptor", "local file header" or
|
| 939 |
+
* "central directory entry".
|
| 940 |
+
*
|
| 941 |
+
* <p>If it contains such a signature, reads the data descriptor
|
| 942 |
+
* and positions the stream right after the data descriptor.</p>
|
| 943 |
+
*/
|
| 944 |
+
private boolean bufferContainsSignature(final ByteArrayOutputStream bos, final int offset, final int lastRead, final int expectedDDLen)
|
| 945 |
+
throws IOException {
|
| 946 |
+
|
| 947 |
+
boolean done = false;
|
| 948 |
+
for (int i = 0; !done && i < offset + lastRead - 4; i++) {
|
| 949 |
+
if (buf.array()[i] == LFH[0] && buf.array()[i + 1] == LFH[1]) {
|
| 950 |
+
int expectDDPos = i;
|
| 951 |
+
if (i >= expectedDDLen &&
|
| 952 |
+
(buf.array()[i + 2] == LFH[2] && buf.array()[i + 3] == LFH[3])
|
| 953 |
+
|| (buf.array()[i] == CFH[2] && buf.array()[i + 3] == CFH[3])) {
|
| 954 |
+
// found a LFH or CFH:
|
| 955 |
+
expectDDPos = i - expectedDDLen;
|
| 956 |
+
done = true;
|
| 957 |
+
}
|
| 958 |
+
else if (buf.array()[i + 2] == DD[2] && buf.array()[i + 3] == DD[3]) {
|
| 959 |
+
// found DD:
|
| 960 |
+
done = true;
|
| 961 |
+
}
|
| 962 |
+
if (done) {
|
| 963 |
+
// * push back bytes read in excess as well as the data
|
| 964 |
+
// descriptor
|
| 965 |
+
// * copy the remaining bytes to cache
|
| 966 |
+
// * read data descriptor
|
| 967 |
+
pushback(buf.array(), expectDDPos, offset + lastRead - expectDDPos);
|
| 968 |
+
bos.write(buf.array(), 0, expectDDPos);
|
| 969 |
+
readDataDescriptor();
|
| 970 |
+
}
|
| 971 |
+
}
|
| 972 |
+
}
|
| 973 |
+
return done;
|
| 974 |
+
}
|
| 975 |
+
|
| 976 |
+
/**
|
| 977 |
+
* If the last read bytes could hold a data descriptor and an
|
| 978 |
+
* incomplete signature then save the last bytes to the front of
|
| 979 |
+
* the buffer and cache everything in front of the potential data
|
| 980 |
+
* descriptor into the given ByteArrayOutputStream.
|
| 981 |
+
*
|
| 982 |
+
* <p>Data descriptor plus incomplete signature (3 bytes in the
|
| 983 |
+
* worst case) can be 20 bytes max.</p>
|
| 984 |
+
*/
|
| 985 |
+
private int cacheBytesRead(final ByteArrayOutputStream bos, int offset, final int lastRead, final int expecteDDLen) {
|
| 986 |
+
final int cacheable = offset + lastRead - expecteDDLen - 3;
|
| 987 |
+
if (cacheable > 0) {
|
| 988 |
+
bos.write(buf.array(), 0, cacheable);
|
| 989 |
+
System.arraycopy(buf.array(), cacheable, buf.array(), 0, expecteDDLen + 3);
|
| 990 |
+
offset = expecteDDLen + 3;
|
| 991 |
+
} else {
|
| 992 |
+
offset += lastRead;
|
| 993 |
+
}
|
| 994 |
+
return offset;
|
| 995 |
+
}
|
| 996 |
+
|
| 997 |
+
private void pushback(final byte[] buf, final int offset, final int length) throws IOException {
|
| 998 |
+
((PushbackInputStream) in).unread(buf, offset, length);
|
| 999 |
+
pushedBackBytes(length);
|
| 1000 |
+
}
|
| 1001 |
+
|
| 1002 |
+
// End of Central Directory Record
|
| 1003 |
+
// end of central dir signature WORD
|
| 1004 |
+
// number of this disk SHORT
|
| 1005 |
+
// number of the disk with the
|
| 1006 |
+
// start of the central directory SHORT
|
| 1007 |
+
// total number of entries in the
|
| 1008 |
+
// central directory on this disk SHORT
|
| 1009 |
+
// total number of entries in
|
| 1010 |
+
// the central directory SHORT
|
| 1011 |
+
// size of the central directory WORD
|
| 1012 |
+
// offset of start of central
|
| 1013 |
+
// directory with respect to
|
| 1014 |
+
// the starting disk number WORD
|
| 1015 |
+
// .ZIP file comment length SHORT
|
| 1016 |
+
// .ZIP file comment up to 64KB
|
| 1017 |
+
//
|
| 1018 |
+
|
| 1019 |
+
/**
|
| 1020 |
+
* Reads the stream until it find the "End of central directory
|
| 1021 |
+
* record" and consumes it as well.
|
| 1022 |
+
*/
|
| 1023 |
+
private void skipRemainderOfArchive() throws IOException {
|
| 1024 |
+
// skip over central directory. One LFH has been read too much
|
| 1025 |
+
// already. The calculation discounts file names and extra
|
| 1026 |
+
// data so it will be too short.
|
| 1027 |
+
realSkip((long) entriesRead * CFH_LEN - LFH_LEN);
|
| 1028 |
+
findEocdRecord();
|
| 1029 |
+
realSkip((long) ZipFile.MIN_EOCD_SIZE - WORD /* signature */ - SHORT /* comment len */);
|
| 1030 |
+
readFully(shortBuf);
|
| 1031 |
+
// file comment
|
| 1032 |
+
realSkip(ZipShort.getValue(shortBuf));
|
| 1033 |
+
}
|
| 1034 |
+
|
| 1035 |
+
/**
|
| 1036 |
+
* Reads forward until the signature of the "End of central
|
| 1037 |
+
* directory" record is found.
|
| 1038 |
+
*/
|
| 1039 |
+
private void findEocdRecord() throws IOException {
|
| 1040 |
+
int currentByte = -1;
|
| 1041 |
+
boolean skipReadCall = false;
|
| 1042 |
+
while (skipReadCall || (currentByte = readOneByte()) > -1) {
|
| 1043 |
+
skipReadCall = false;
|
| 1044 |
+
if (!isFirstByteOfEocdSig(currentByte)) {
|
| 1045 |
+
continue;
|
| 1046 |
+
}
|
| 1047 |
+
currentByte = readOneByte();
|
| 1048 |
+
if (currentByte != ZipArchiveOutputStream.EOCD_SIG[1]) {
|
| 1049 |
+
if (currentByte == -1) {
|
| 1050 |
+
break;
|
| 1051 |
+
}
|
| 1052 |
+
skipReadCall = isFirstByteOfEocdSig(currentByte);
|
| 1053 |
+
continue;
|
| 1054 |
+
}
|
| 1055 |
+
currentByte = readOneByte();
|
| 1056 |
+
if (currentByte != ZipArchiveOutputStream.EOCD_SIG[2]) {
|
| 1057 |
+
if (currentByte == -1) {
|
| 1058 |
+
break;
|
| 1059 |
+
}
|
| 1060 |
+
skipReadCall = isFirstByteOfEocdSig(currentByte);
|
| 1061 |
+
continue;
|
| 1062 |
+
}
|
| 1063 |
+
currentByte = readOneByte();
|
| 1064 |
+
if (currentByte == -1
|
| 1065 |
+
|| currentByte == ZipArchiveOutputStream.EOCD_SIG[3]) {
|
| 1066 |
+
break;
|
| 1067 |
+
}
|
| 1068 |
+
skipReadCall = isFirstByteOfEocdSig(currentByte);
|
| 1069 |
+
}
|
| 1070 |
+
}
|
| 1071 |
+
|
| 1072 |
+
/**
|
| 1073 |
+
* Skips bytes by reading from the underlying stream rather than
|
| 1074 |
+
* the (potentially inflating) archive stream - which {@link
|
| 1075 |
+
* #skip} would do.
|
| 1076 |
+
*
|
| 1077 |
+
* Also updates bytes-read counter.
|
| 1078 |
+
*/
|
| 1079 |
+
private void realSkip(final long value) throws IOException {
|
| 1080 |
+
if (value >= 0) {
|
| 1081 |
+
long skipped = 0;
|
| 1082 |
+
while (skipped < value) {
|
| 1083 |
+
final long rem = value - skipped;
|
| 1084 |
+
final int x = in.read(skipBuf, 0, (int) (skipBuf.length > rem ? rem : skipBuf.length));
|
| 1085 |
+
if (x == -1) {
|
| 1086 |
+
return;
|
| 1087 |
+
}
|
| 1088 |
+
count(x);
|
| 1089 |
+
skipped += x;
|
| 1090 |
+
}
|
| 1091 |
+
return;
|
| 1092 |
+
}
|
| 1093 |
+
throw new IllegalArgumentException();
|
| 1094 |
+
}
|
| 1095 |
+
|
| 1096 |
+
/**
|
| 1097 |
+
* Reads bytes by reading from the underlying stream rather than
|
| 1098 |
+
* the (potentially inflating) archive stream - which {@link #read} would do.
|
| 1099 |
+
*
|
| 1100 |
+
* Also updates bytes-read counter.
|
| 1101 |
+
*/
|
| 1102 |
+
private int readOneByte() throws IOException {
|
| 1103 |
+
final int b = in.read();
|
| 1104 |
+
if (b != -1) {
|
| 1105 |
+
count(1);
|
| 1106 |
+
}
|
| 1107 |
+
return b;
|
| 1108 |
+
}
|
| 1109 |
+
|
| 1110 |
+
private boolean isFirstByteOfEocdSig(final int b) {
|
| 1111 |
+
return b == ZipArchiveOutputStream.EOCD_SIG[0];
|
| 1112 |
+
}
|
| 1113 |
+
|
| 1114 |
+
private static final byte[] APK_SIGNING_BLOCK_MAGIC = new byte[] {
|
| 1115 |
+
'A', 'P', 'K', ' ', 'S', 'i', 'g', ' ', 'B', 'l', 'o', 'c', 'k', ' ', '4', '2',
|
| 1116 |
+
};
|
| 1117 |
+
private static final BigInteger LONG_MAX = BigInteger.valueOf(Long.MAX_VALUE);
|
| 1118 |
+
|
| 1119 |
+
/**
|
| 1120 |
+
* Checks whether this might be an APK Signing Block.
|
| 1121 |
+
*
|
| 1122 |
+
* <p>Unfortunately the APK signing block does not start with some kind of signature, it rather ends with one. It
|
| 1123 |
+
* starts with a length, so what we do is parse the suspect length, skip ahead far enough, look for the signature
|
| 1124 |
+
* and if we've found it, return true.</p>
|
| 1125 |
+
*
|
| 1126 |
+
* @param suspectLocalFileHeader the bytes read from the underlying stream in the expectation that they would hold
|
| 1127 |
+
* the local file header of the next entry.
|
| 1128 |
+
*
|
| 1129 |
+
* @return true if this looks like a APK signing block
|
| 1130 |
+
*
|
| 1131 |
+
* @see <a href="https://source.android.com/security/apksigning/v2">https://source.android.com/security/apksigning/v2</a>
|
| 1132 |
+
*/
|
| 1133 |
+
private boolean isApkSigningBlock(byte[] suspectLocalFileHeader) throws IOException {
|
| 1134 |
+
// length of block excluding the size field itself
|
| 1135 |
+
BigInteger len = ZipEightByteInteger.getValue(suspectLocalFileHeader);
|
| 1136 |
+
// LFH has already been read and all but the first eight bytes contain (part of) the APK signing block,
|
| 1137 |
+
// also subtract 16 bytes in order to position us at the magic string
|
| 1138 |
+
BigInteger toSkip = len.add(BigInteger.valueOf(DWORD - suspectLocalFileHeader.length
|
| 1139 |
+
- (long) APK_SIGNING_BLOCK_MAGIC.length));
|
| 1140 |
+
byte[] magic = new byte[APK_SIGNING_BLOCK_MAGIC.length];
|
| 1141 |
+
|
| 1142 |
+
try {
|
| 1143 |
+
if (toSkip.signum() < 0) {
|
| 1144 |
+
// suspectLocalFileHeader contains the start of suspect magic string
|
| 1145 |
+
int off = suspectLocalFileHeader.length + toSkip.intValue();
|
| 1146 |
+
// length was shorter than magic length
|
| 1147 |
+
if (off < DWORD) {
|
| 1148 |
+
return false;
|
| 1149 |
+
}
|
| 1150 |
+
int bytesInBuffer = Math.abs(toSkip.intValue());
|
| 1151 |
+
System.arraycopy(suspectLocalFileHeader, off, magic, 0, Math.min(bytesInBuffer, magic.length));
|
| 1152 |
+
if (bytesInBuffer < magic.length) {
|
| 1153 |
+
readFully(magic, bytesInBuffer);
|
| 1154 |
+
}
|
| 1155 |
+
} else {
|
| 1156 |
+
while (toSkip.compareTo(LONG_MAX) > 0) {
|
| 1157 |
+
realSkip(Long.MAX_VALUE);
|
| 1158 |
+
toSkip = toSkip.add(LONG_MAX.negate());
|
| 1159 |
+
}
|
| 1160 |
+
realSkip(toSkip.longValue());
|
| 1161 |
+
readFully(magic);
|
| 1162 |
+
}
|
| 1163 |
+
} catch (EOFException ex) { //NOSONAR
|
| 1164 |
+
// length was invalid
|
| 1165 |
+
return false;
|
| 1166 |
+
}
|
| 1167 |
+
return Arrays.equals(magic, APK_SIGNING_BLOCK_MAGIC);
|
| 1168 |
+
}
|
| 1169 |
+
|
| 1170 |
+
/**
|
| 1171 |
+
* Structure collecting information for the entry that is
|
| 1172 |
+
* currently being read.
|
| 1173 |
+
*/
|
| 1174 |
+
private static final class CurrentEntry {
|
| 1175 |
+
|
| 1176 |
+
/**
|
| 1177 |
+
* Current ZIP entry.
|
| 1178 |
+
*/
|
| 1179 |
+
private final ZipArchiveEntry entry = new ZipArchiveEntry();
|
| 1180 |
+
|
| 1181 |
+
/**
|
| 1182 |
+
* Does the entry use a data descriptor?
|
| 1183 |
+
*/
|
| 1184 |
+
private boolean hasDataDescriptor;
|
| 1185 |
+
|
| 1186 |
+
/**
|
| 1187 |
+
* Does the entry have a ZIP64 extended information extra field.
|
| 1188 |
+
*/
|
| 1189 |
+
private boolean usesZip64;
|
| 1190 |
+
|
| 1191 |
+
/**
|
| 1192 |
+
* Number of bytes of entry content read by the client if the
|
| 1193 |
+
* entry is STORED.
|
| 1194 |
+
*/
|
| 1195 |
+
private long bytesRead;
|
| 1196 |
+
|
| 1197 |
+
/**
|
| 1198 |
+
* Number of bytes of entry content read from the stream.
|
| 1199 |
+
*
|
| 1200 |
+
* <p>This may be more than the actual entry's length as some
|
| 1201 |
+
* stuff gets buffered up and needs to be pushed back when the
|
| 1202 |
+
* end of the entry has been reached.</p>
|
| 1203 |
+
*/
|
| 1204 |
+
private long bytesReadFromStream;
|
| 1205 |
+
|
| 1206 |
+
/**
|
| 1207 |
+
* The checksum calculated as the current entry is read.
|
| 1208 |
+
*/
|
| 1209 |
+
private final CRC32 crc = new CRC32();
|
| 1210 |
+
|
| 1211 |
+
/**
|
| 1212 |
+
* The input stream decompressing the data for shrunk and imploded entries.
|
| 1213 |
+
*/
|
| 1214 |
+
private InputStream in;
|
| 1215 |
+
}
|
| 1216 |
+
|
| 1217 |
+
/**
|
| 1218 |
+
* Bounded input stream adapted from commons-io
|
| 1219 |
+
*/
|
| 1220 |
+
private class BoundedInputStream extends InputStream {
|
| 1221 |
+
|
| 1222 |
+
/** the wrapped input stream */
|
| 1223 |
+
private final InputStream in;
|
| 1224 |
+
|
| 1225 |
+
/** the max length to provide */
|
| 1226 |
+
private final long max;
|
| 1227 |
+
|
| 1228 |
+
/** the number of bytes already returned */
|
| 1229 |
+
private long pos = 0;
|
| 1230 |
+
|
| 1231 |
+
/**
|
| 1232 |
+
* Creates a new <code>BoundedInputStream</code> that wraps the given input
|
| 1233 |
+
* stream and limits it to a certain size.
|
| 1234 |
+
*
|
| 1235 |
+
* @param in The wrapped input stream
|
| 1236 |
+
* @param size The maximum number of bytes to return
|
| 1237 |
+
*/
|
| 1238 |
+
public BoundedInputStream(final InputStream in, final long size) {
|
| 1239 |
+
this.max = size;
|
| 1240 |
+
this.in = in;
|
| 1241 |
+
}
|
| 1242 |
+
|
| 1243 |
+
@Override
|
| 1244 |
+
public int read() throws IOException {
|
| 1245 |
+
if (max >= 0 && pos >= max) {
|
| 1246 |
+
return -1;
|
| 1247 |
+
}
|
| 1248 |
+
final int result = in.read();
|
| 1249 |
+
pos++;
|
| 1250 |
+
count(1);
|
| 1251 |
+
current.bytesReadFromStream++;
|
| 1252 |
+
return result;
|
| 1253 |
+
}
|
| 1254 |
+
|
| 1255 |
+
@Override
|
| 1256 |
+
public int read(final byte[] b) throws IOException {
|
| 1257 |
+
return this.read(b, 0, b.length);
|
| 1258 |
+
}
|
| 1259 |
+
|
| 1260 |
+
@Override
|
| 1261 |
+
public int read(final byte[] b, final int off, final int len) throws IOException {
|
| 1262 |
+
if (len == 0) {
|
| 1263 |
+
return 0;
|
| 1264 |
+
}
|
| 1265 |
+
if (max >= 0 && pos >= max) {
|
| 1266 |
+
return -1;
|
| 1267 |
+
}
|
| 1268 |
+
final long maxRead = max >= 0 ? Math.min(len, max - pos) : len;
|
| 1269 |
+
final int bytesRead = in.read(b, off, (int) maxRead);
|
| 1270 |
+
|
| 1271 |
+
if (bytesRead == -1) {
|
| 1272 |
+
return -1;
|
| 1273 |
+
}
|
| 1274 |
+
|
| 1275 |
+
pos += bytesRead;
|
| 1276 |
+
count(bytesRead);
|
| 1277 |
+
current.bytesReadFromStream += bytesRead;
|
| 1278 |
+
return bytesRead;
|
| 1279 |
+
}
|
| 1280 |
+
|
| 1281 |
+
@Override
|
| 1282 |
+
public long skip(final long n) throws IOException {
|
| 1283 |
+
final long toSkip = max >= 0 ? Math.min(n, max - pos) : n;
|
| 1284 |
+
final long skippedBytes = IOUtils.skip(in, toSkip);
|
| 1285 |
+
pos += skippedBytes;
|
| 1286 |
+
return skippedBytes;
|
| 1287 |
+
}
|
| 1288 |
+
|
| 1289 |
+
@Override
|
| 1290 |
+
public int available() throws IOException {
|
| 1291 |
+
if (max >= 0 && pos >= max) {
|
| 1292 |
+
return 0;
|
| 1293 |
+
}
|
| 1294 |
+
return in.available();
|
| 1295 |
+
}
|
| 1296 |
+
}
|
| 1297 |
+
}
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/file_to_compare_1
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/*
|
| 2 |
+
* Licensed to the Apache Software Foundation (ASF) under one
|
| 3 |
+
* or more contributor license agreements. See the NOTICE file
|
| 4 |
+
* distributed with this work for additional information
|
| 5 |
+
* regarding copyright ownership. The ASF licenses this file
|
| 6 |
+
* to you under the Apache License, Version 2.0 (the
|
| 7 |
+
* "License"); you may not use this file except in compliance
|
| 8 |
+
* with the License. You may obtain a copy of the License at
|
| 9 |
+
*
|
| 10 |
+
* http://www.apache.org/licenses/LICENSE-2.0
|
| 11 |
+
*
|
| 12 |
+
* Unless required by applicable law or agreed to in writing,
|
| 13 |
+
* software distributed under the License is distributed on an
|
| 14 |
+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
| 15 |
+
* KIND, either express or implied. See the License for the
|
| 16 |
+
* specific language governing permissions and limitations
|
| 17 |
+
* under the License.
|
| 18 |
+
*/
|
| 19 |
+
package org.apache.commons.compress.archivers.dump;
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
/**
|
| 23 |
+
* Unsupported compression algorithm. The dump archive uses an unsupported
|
| 24 |
+
* compression algorithm (BZLIB2 or LZO).
|
| 25 |
+
*/
|
| 26 |
+
public class UnsupportedCompressionAlgorithmException
|
| 27 |
+
extends DumpArchiveException {
|
| 28 |
+
private static final long serialVersionUID = 1L;
|
| 29 |
+
|
| 30 |
+
public UnsupportedCompressionAlgorithmException() {
|
| 31 |
+
super("this file uses an unsupported compression algorithm.");
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
public UnsupportedCompressionAlgorithmException(final String alg) {
|
| 35 |
+
super("this file uses an unsupported compression algorithm: " + alg +
|
| 36 |
+
".");
|
| 37 |
+
}
|
| 38 |
+
}
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/file_to_compare_2
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/*
|
| 2 |
+
* Licensed to the Apache Software Foundation (ASF) under one
|
| 3 |
+
* or more contributor license agreements. See the NOTICE file
|
| 4 |
+
* distributed with this work for additional information
|
| 5 |
+
* regarding copyright ownership. The ASF licenses this file
|
| 6 |
+
* to you under the Apache License, Version 2.0 (the
|
| 7 |
+
* "License"); you may not use this file except in compliance
|
| 8 |
+
* with the License. You may obtain a copy of the License at
|
| 9 |
+
*
|
| 10 |
+
* http://www.apache.org/licenses/LICENSE-2.0
|
| 11 |
+
*
|
| 12 |
+
* Unless required by applicable law or agreed to in writing,
|
| 13 |
+
* software distributed under the License is distributed on an
|
| 14 |
+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
| 15 |
+
* KIND, either express or implied. See the License for the
|
| 16 |
+
* specific language governing permissions and limitations
|
| 17 |
+
* under the License.
|
| 18 |
+
*/
|
| 19 |
+
|
| 20 |
+
package org.apache.commons.compress.compressors.deflate;
|
| 21 |
+
|
| 22 |
+
import java.util.zip.Deflater;
|
| 23 |
+
|
| 24 |
+
/**
|
| 25 |
+
* Parameters for the Deflate compressor.
|
| 26 |
+
* @since 1.9
|
| 27 |
+
*/
|
| 28 |
+
public class DeflateParameters {
|
| 29 |
+
|
| 30 |
+
private boolean zlibHeader = true;
|
| 31 |
+
private int compressionLevel = Deflater.DEFAULT_COMPRESSION;
|
| 32 |
+
|
| 33 |
+
/**
|
| 34 |
+
* Whether or not the zlib header shall be written (when
|
| 35 |
+
* compressing) or expected (when decompressing).
|
| 36 |
+
* @return true if zlib header shall be written
|
| 37 |
+
*/
|
| 38 |
+
public boolean withZlibHeader() {
|
| 39 |
+
return zlibHeader;
|
| 40 |
+
}
|
| 41 |
+
|
| 42 |
+
/**
|
| 43 |
+
* Sets the zlib header presence parameter.
|
| 44 |
+
*
|
| 45 |
+
* <p>This affects whether or not the zlib header will be written
|
| 46 |
+
* (when compressing) or expected (when decompressing).</p>
|
| 47 |
+
*
|
| 48 |
+
* @param zlibHeader true if zlib header shall be written
|
| 49 |
+
*/
|
| 50 |
+
public void setWithZlibHeader(final boolean zlibHeader) {
|
| 51 |
+
this.zlibHeader = zlibHeader;
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
/**
|
| 55 |
+
* The compression level.
|
| 56 |
+
* @see #setCompressionLevel
|
| 57 |
+
* @return the compression level
|
| 58 |
+
*/
|
| 59 |
+
public int getCompressionLevel() {
|
| 60 |
+
return compressionLevel;
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
/**
|
| 64 |
+
* Sets the compression level.
|
| 65 |
+
*
|
| 66 |
+
* @param compressionLevel the compression level (between 0 and 9)
|
| 67 |
+
* @see Deflater#NO_COMPRESSION
|
| 68 |
+
* @see Deflater#BEST_SPEED
|
| 69 |
+
* @see Deflater#DEFAULT_COMPRESSION
|
| 70 |
+
* @see Deflater#BEST_COMPRESSION
|
| 71 |
+
*/
|
| 72 |
+
public void setCompressionLevel(final int compressionLevel) {
|
| 73 |
+
if (compressionLevel < -1 || compressionLevel > 9) {
|
| 74 |
+
throw new IllegalArgumentException("Invalid Deflate compression level: " + compressionLevel);
|
| 75 |
+
}
|
| 76 |
+
this.compressionLevel = compressionLevel;
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
}
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/COMPRESS-644/ARW05UP.ICO
ADDED
|
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/COMPRESS-661/testARofText.ar
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
testTXT.txt/ 1262968202 500 500 100644 47 `
|
| 3 |
+
Test d'indexation de Txt
|
| 4 |
+
http://www.apache.org
|
| 5 |
+
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_length-fail.ar
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
// 68 `
|
| 3 |
+
this_is_a_long_file_name.txt/
|
| 4 |
+
this_is_a_long_file_name_as_well.txt/
|
| 5 |
+
/0 1454693980 1000 1000 100664 1.23 `
|
| 6 |
+
Hello, world!
|
| 7 |
+
/30 1454694016 1000 1000 100664 4 `
|
| 8 |
+
Bye
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_gnu1-fail.ar
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
// 68 `
|
| 3 |
+
this_is_a_long_file_name.txt/
|
| 4 |
+
this_is_a_long_file_name_as_well.txt/
|
| 5 |
+
/9999999999 1454693980 1000 1000 100664 14 `
|
| 6 |
+
Hello, world!
|
| 7 |
+
/30 1454694016 1000 1000 100664 4 `
|
| 8 |
+
Bye
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_gnu2-fail.ar
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
// 68 `
|
| 3 |
+
this_is_a_long_file_name.txt/
|
| 4 |
+
this_is_a_long_file_name_as_well.txt/
|
| 5 |
+
/29 1454694016 1000 1000 100664 4 `
|
| 6 |
+
Bye
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_gnu3-fail.ar
ADDED
|
Binary file (274 Bytes). View file
|
|
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_modified-fail.ar
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
// 68 `
|
| 3 |
+
this_is_a_long_file_name.txt/
|
| 4 |
+
this_is_a_long_file_name_as_well.txt/
|
| 5 |
+
/0 9e99999999 1000 1000 100664 14 `
|
| 6 |
+
Hello, world!
|
| 7 |
+
/30 1454694016 1000 1000 100664 4 `
|
| 8 |
+
Bye
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_user-fail.ar
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
// 68 `
|
| 3 |
+
this_is_a_long_file_name.txt/
|
| 4 |
+
this_is_a_long_file_name_as_well.txt/
|
| 5 |
+
/0 1454693980 9e99 1000 100664 14 `
|
| 6 |
+
Hello, world!
|
| 7 |
+
/30 1454694016 1000 1000 100664 4 `
|
| 8 |
+
Bye
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/dump/invalid_compression_type-fail.dump
ADDED
|
Binary file (11.9 kB). View file
|
|
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/fuzz/crash-f2efd9eaeb86cda597d07b5e3c3d81363633c2da
ADDED
|
Binary file (8.94 kB). View file
|
|
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-daemon/procrunr.ico
ADDED
|
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-daemon/procruns.ico
ADDED
|
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-daemon/procrunw.ico
ADDED
|
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-imaging/OutOfMemory_epine.ico
ADDED
|
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/bandint_oom.pack
ADDED
|
Binary file (88 Bytes). View file
|
|
|
local-test-commons-compress-full-01-vuln_5/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/references_oom.pack
ADDED
|
Binary file (66 Bytes). View file
|
|
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/.dockerignore
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cifuzz/test_data/*
|
| 2 |
+
|
| 3 |
+
# Copied from .gitignore.
|
| 4 |
+
.vscode/
|
| 5 |
+
*.pyc
|
| 6 |
+
build
|
| 7 |
+
*~
|
| 8 |
+
.DS_Store
|
| 9 |
+
*.swp
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/build_specified_commit.py
ADDED
|
@@ -0,0 +1,410 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Module to build a image from a specific commit, branch or pull request.
|
| 15 |
+
|
| 16 |
+
This module is allows each of the OSS Fuzz projects fuzzers to be built
|
| 17 |
+
from a specific point in time. This feature can be used for implementations
|
| 18 |
+
like continuious integration fuzzing and bisection to find errors
|
| 19 |
+
"""
|
| 20 |
+
import argparse
|
| 21 |
+
import bisect
|
| 22 |
+
import datetime
|
| 23 |
+
import os
|
| 24 |
+
import collections
|
| 25 |
+
import json
|
| 26 |
+
import logging
|
| 27 |
+
import re
|
| 28 |
+
import shutil
|
| 29 |
+
import tempfile
|
| 30 |
+
|
| 31 |
+
import helper
|
| 32 |
+
import repo_manager
|
| 33 |
+
import retry
|
| 34 |
+
import utils
|
| 35 |
+
|
| 36 |
+
BuildData = collections.namedtuple(
|
| 37 |
+
'BuildData', ['project_name', 'engine', 'sanitizer', 'architecture'])
|
| 38 |
+
|
| 39 |
+
_GIT_DIR_MARKER = 'gitdir: '
|
| 40 |
+
_IMAGE_BUILD_TRIES = 3
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class BaseBuilderRepo:
|
| 44 |
+
"""Repo of base-builder images."""
|
| 45 |
+
|
| 46 |
+
def __init__(self):
|
| 47 |
+
self.timestamps = []
|
| 48 |
+
self.digests = []
|
| 49 |
+
|
| 50 |
+
def add_digest(self, timestamp, digest):
|
| 51 |
+
"""Add a digest."""
|
| 52 |
+
self.timestamps.append(timestamp)
|
| 53 |
+
self.digests.append(digest)
|
| 54 |
+
|
| 55 |
+
def find_digest(self, timestamp):
|
| 56 |
+
"""Find the latest image before the given timestamp."""
|
| 57 |
+
index = bisect.bisect_right(self.timestamps, timestamp)
|
| 58 |
+
if index > 0:
|
| 59 |
+
return self.digests[index - 1]
|
| 60 |
+
|
| 61 |
+
logging.error('Failed to find suitable base-builder.')
|
| 62 |
+
return None
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def _replace_gitdir(src_dir, file_path):
|
| 66 |
+
"""Replace gitdir with a relative path."""
|
| 67 |
+
with open(file_path) as handle:
|
| 68 |
+
lines = handle.readlines()
|
| 69 |
+
|
| 70 |
+
new_lines = []
|
| 71 |
+
for line in lines:
|
| 72 |
+
if line.startswith(_GIT_DIR_MARKER):
|
| 73 |
+
absolute_path = line[len(_GIT_DIR_MARKER):].strip()
|
| 74 |
+
if not os.path.isabs(absolute_path):
|
| 75 |
+
# Already relative.
|
| 76 |
+
return
|
| 77 |
+
|
| 78 |
+
current_dir = os.path.dirname(file_path)
|
| 79 |
+
# Rebase to /src rather than the host src dir.
|
| 80 |
+
base_dir = current_dir.replace(src_dir, '/src')
|
| 81 |
+
relative_path = os.path.relpath(absolute_path, base_dir)
|
| 82 |
+
logging.info('Replacing absolute submodule gitdir from %s to %s',
|
| 83 |
+
absolute_path, relative_path)
|
| 84 |
+
|
| 85 |
+
line = _GIT_DIR_MARKER + relative_path
|
| 86 |
+
|
| 87 |
+
new_lines.append(line)
|
| 88 |
+
|
| 89 |
+
with open(file_path, 'w') as handle:
|
| 90 |
+
handle.write(''.join(new_lines))
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def _make_gitdirs_relative(src_dir):
|
| 94 |
+
"""Make gitdirs relative."""
|
| 95 |
+
for root_dir, _, files in os.walk(src_dir):
|
| 96 |
+
for filename in files:
|
| 97 |
+
if filename != '.git':
|
| 98 |
+
continue
|
| 99 |
+
|
| 100 |
+
file_path = os.path.join(root_dir, filename)
|
| 101 |
+
_replace_gitdir(src_dir, file_path)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def _replace_base_builder_digest(dockerfile_path, digest):
|
| 105 |
+
"""Replace the base-builder digest in a Dockerfile."""
|
| 106 |
+
with open(dockerfile_path) as handle:
|
| 107 |
+
lines = handle.readlines()
|
| 108 |
+
|
| 109 |
+
new_lines = []
|
| 110 |
+
for line in lines:
|
| 111 |
+
if line.strip().startswith('FROM'):
|
| 112 |
+
line = 'FROM ghcr.io/aixcc-finals/base-builder@' + digest + '\n'
|
| 113 |
+
|
| 114 |
+
new_lines.append(line)
|
| 115 |
+
|
| 116 |
+
with open(dockerfile_path, 'w') as handle:
|
| 117 |
+
handle.write(''.join(new_lines))
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def copy_src_from_docker(project_name, host_dir):
|
| 121 |
+
"""Copy /src from docker to the host."""
|
| 122 |
+
# Copy /src to host.
|
| 123 |
+
image_name = 'gcr.io/oss-fuzz/' + project_name
|
| 124 |
+
src_dir = os.path.join(host_dir, 'src')
|
| 125 |
+
if os.path.exists(src_dir):
|
| 126 |
+
shutil.rmtree(src_dir, ignore_errors=True)
|
| 127 |
+
|
| 128 |
+
docker_args = [
|
| 129 |
+
'-v',
|
| 130 |
+
host_dir + ':/out',
|
| 131 |
+
image_name,
|
| 132 |
+
'cp',
|
| 133 |
+
'-r',
|
| 134 |
+
'-p',
|
| 135 |
+
'/src',
|
| 136 |
+
'/out',
|
| 137 |
+
]
|
| 138 |
+
helper.docker_run(docker_args)
|
| 139 |
+
|
| 140 |
+
# Submodules can have gitdir entries which point to absolute paths. Make them
|
| 141 |
+
# relative, as otherwise we can't do operations on the checkout on the host.
|
| 142 |
+
_make_gitdirs_relative(src_dir)
|
| 143 |
+
return src_dir
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
@retry.wrap(_IMAGE_BUILD_TRIES, 2)
|
| 147 |
+
def _build_image_with_retries(project_name):
|
| 148 |
+
"""Build image with retries."""
|
| 149 |
+
return helper.build_image_impl(helper.Project(project_name))
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def get_required_post_checkout_steps(dockerfile_path):
|
| 153 |
+
"""Get required post checkout steps (best effort)."""
|
| 154 |
+
|
| 155 |
+
checkout_pattern = re.compile(r'\s*RUN\s*(git|svn|hg)')
|
| 156 |
+
|
| 157 |
+
# If the build.sh is copied from upstream, we need to copy it again after
|
| 158 |
+
# changing the revision to ensure correct building.
|
| 159 |
+
post_run_pattern = re.compile(r'\s*RUN\s*(.*build\.sh.*(\$SRC|/src).*)')
|
| 160 |
+
|
| 161 |
+
with open(dockerfile_path) as handle:
|
| 162 |
+
lines = handle.readlines()
|
| 163 |
+
|
| 164 |
+
subsequent_run_cmds = []
|
| 165 |
+
for i, line in enumerate(lines):
|
| 166 |
+
if checkout_pattern.match(line):
|
| 167 |
+
subsequent_run_cmds = []
|
| 168 |
+
continue
|
| 169 |
+
|
| 170 |
+
match = post_run_pattern.match(line)
|
| 171 |
+
if match:
|
| 172 |
+
workdir = helper.workdir_from_lines(lines[:i])
|
| 173 |
+
command = match.group(1)
|
| 174 |
+
subsequent_run_cmds.append((workdir, command))
|
| 175 |
+
|
| 176 |
+
return subsequent_run_cmds
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
# pylint: disable=too-many-locals
|
| 180 |
+
def build_fuzzers_from_commit(commit,
|
| 181 |
+
build_repo_manager,
|
| 182 |
+
host_src_path,
|
| 183 |
+
build_data,
|
| 184 |
+
base_builder_repo=None):
|
| 185 |
+
"""Builds a OSS-Fuzz fuzzer at a specific commit SHA.
|
| 186 |
+
|
| 187 |
+
Args:
|
| 188 |
+
commit: The commit SHA to build the fuzzers at.
|
| 189 |
+
build_repo_manager: The OSS-Fuzz project's repo manager to be built at.
|
| 190 |
+
build_data: A struct containing project build information.
|
| 191 |
+
base_builder_repo: A BaseBuilderRepo.
|
| 192 |
+
Returns:
|
| 193 |
+
0 on successful build or error code on failure.
|
| 194 |
+
"""
|
| 195 |
+
oss_fuzz_repo_manager = repo_manager.RepoManager(helper.OSS_FUZZ_DIR)
|
| 196 |
+
num_retry = 1
|
| 197 |
+
|
| 198 |
+
def cleanup():
|
| 199 |
+
# Re-copy /src for a clean checkout every time.
|
| 200 |
+
copy_src_from_docker(build_data.project_name,
|
| 201 |
+
os.path.dirname(host_src_path))
|
| 202 |
+
build_repo_manager.fetch_all_remotes()
|
| 203 |
+
|
| 204 |
+
projects_dir = os.path.join('projects', build_data.project_name)
|
| 205 |
+
dockerfile_path = os.path.join(projects_dir, 'Dockerfile')
|
| 206 |
+
|
| 207 |
+
for i in range(num_retry + 1):
|
| 208 |
+
build_repo_manager.checkout_commit(commit, clean=False)
|
| 209 |
+
|
| 210 |
+
post_checkout_steps = get_required_post_checkout_steps(dockerfile_path)
|
| 211 |
+
for workdir, post_checkout_step in post_checkout_steps:
|
| 212 |
+
logging.info('Running post-checkout step `%s` in %s.', post_checkout_step,
|
| 213 |
+
workdir)
|
| 214 |
+
helper.docker_run([
|
| 215 |
+
'-w',
|
| 216 |
+
workdir,
|
| 217 |
+
'-v',
|
| 218 |
+
host_src_path + ':' + '/src',
|
| 219 |
+
'gcr.io/oss-fuzz/' + build_data.project_name,
|
| 220 |
+
'/bin/bash',
|
| 221 |
+
'-c',
|
| 222 |
+
post_checkout_step,
|
| 223 |
+
])
|
| 224 |
+
|
| 225 |
+
project = helper.Project(build_data.project_name)
|
| 226 |
+
result = helper.build_fuzzers_impl(project=project,
|
| 227 |
+
clean=True,
|
| 228 |
+
engine=build_data.engine,
|
| 229 |
+
sanitizer=build_data.sanitizer,
|
| 230 |
+
architecture=build_data.architecture,
|
| 231 |
+
env_to_add=None,
|
| 232 |
+
source_path=host_src_path,
|
| 233 |
+
mount_path='/src')
|
| 234 |
+
if result or i == num_retry:
|
| 235 |
+
break
|
| 236 |
+
|
| 237 |
+
# Retry with an OSS-Fuzz builder container that's closer to the project
|
| 238 |
+
# commit date.
|
| 239 |
+
commit_date = build_repo_manager.commit_date(commit)
|
| 240 |
+
|
| 241 |
+
# Find first change in the projects/<PROJECT> directory before the project
|
| 242 |
+
# commit date.
|
| 243 |
+
oss_fuzz_commit, _, _ = oss_fuzz_repo_manager.git([
|
| 244 |
+
'log', '--before=' + commit_date.isoformat(), '-n1', '--format=%H',
|
| 245 |
+
projects_dir
|
| 246 |
+
],
|
| 247 |
+
check_result=True)
|
| 248 |
+
oss_fuzz_commit = oss_fuzz_commit.strip()
|
| 249 |
+
if not oss_fuzz_commit:
|
| 250 |
+
logging.info(
|
| 251 |
+
'Could not find first OSS-Fuzz commit prior to upstream commit. '
|
| 252 |
+
'Falling back to oldest integration commit.')
|
| 253 |
+
|
| 254 |
+
# Find the oldest commit.
|
| 255 |
+
oss_fuzz_commit, _, _ = oss_fuzz_repo_manager.git(
|
| 256 |
+
['log', '--reverse', '--format=%H', projects_dir], check_result=True)
|
| 257 |
+
|
| 258 |
+
oss_fuzz_commit = oss_fuzz_commit.splitlines()[0].strip()
|
| 259 |
+
|
| 260 |
+
if not oss_fuzz_commit:
|
| 261 |
+
logging.error('Failed to get oldest integration commit.')
|
| 262 |
+
break
|
| 263 |
+
|
| 264 |
+
logging.info('Build failed. Retrying on earlier OSS-Fuzz commit %s.',
|
| 265 |
+
oss_fuzz_commit)
|
| 266 |
+
|
| 267 |
+
# Check out projects/<PROJECT> dir to the commit that was found.
|
| 268 |
+
oss_fuzz_repo_manager.git(['checkout', oss_fuzz_commit, projects_dir],
|
| 269 |
+
check_result=True)
|
| 270 |
+
|
| 271 |
+
# Also use the closest base-builder we can find.
|
| 272 |
+
if base_builder_repo:
|
| 273 |
+
base_builder_digest = base_builder_repo.find_digest(commit_date)
|
| 274 |
+
if not base_builder_digest:
|
| 275 |
+
return False
|
| 276 |
+
|
| 277 |
+
logging.info('Using base-builder with digest %s.', base_builder_digest)
|
| 278 |
+
_replace_base_builder_digest(dockerfile_path, base_builder_digest)
|
| 279 |
+
|
| 280 |
+
# Rebuild image and re-copy src dir since things in /src could have changed.
|
| 281 |
+
if not _build_image_with_retries(build_data.project_name):
|
| 282 |
+
logging.error('Failed to rebuild image.')
|
| 283 |
+
return False
|
| 284 |
+
|
| 285 |
+
cleanup()
|
| 286 |
+
|
| 287 |
+
cleanup()
|
| 288 |
+
return result
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
def detect_main_repo(project_name, repo_name=None, commit=None):
|
| 292 |
+
"""Checks a docker image for the main repo of an OSS-Fuzz project.
|
| 293 |
+
|
| 294 |
+
Note: The default is to use the repo name to detect the main repo.
|
| 295 |
+
|
| 296 |
+
Args:
|
| 297 |
+
project_name: The name of the oss-fuzz project.
|
| 298 |
+
repo_name: The name of the main repo in an OSS-Fuzz project.
|
| 299 |
+
commit: A commit SHA that is associated with the main repo.
|
| 300 |
+
|
| 301 |
+
Returns:
|
| 302 |
+
A tuple containing (the repo's origin, the repo's path).
|
| 303 |
+
"""
|
| 304 |
+
|
| 305 |
+
if not repo_name and not commit:
|
| 306 |
+
logging.error(
|
| 307 |
+
'Error: can not detect main repo without a repo_name or a commit.')
|
| 308 |
+
return None, None
|
| 309 |
+
if repo_name and commit:
|
| 310 |
+
logging.info(
|
| 311 |
+
'Both repo name and commit specific. Using repo name for detection.')
|
| 312 |
+
|
| 313 |
+
# Change to oss-fuzz main directory so helper.py runs correctly.
|
| 314 |
+
utils.chdir_to_root()
|
| 315 |
+
if not _build_image_with_retries(project_name):
|
| 316 |
+
logging.error('Error: building %s image failed.', project_name)
|
| 317 |
+
return None, None
|
| 318 |
+
docker_image_name = 'gcr.io/oss-fuzz/' + project_name
|
| 319 |
+
command_to_run = [
|
| 320 |
+
'docker', 'run', '--rm', '-t', docker_image_name, 'python3',
|
| 321 |
+
os.path.join('/opt', 'cifuzz', 'detect_repo.py')
|
| 322 |
+
]
|
| 323 |
+
if repo_name:
|
| 324 |
+
command_to_run.extend(['--repo_name', repo_name])
|
| 325 |
+
else:
|
| 326 |
+
command_to_run.extend(['--example_commit', commit])
|
| 327 |
+
out, _, _ = utils.execute(command_to_run)
|
| 328 |
+
match = re.search(r'\bDetected repo: ([^ ]+) ([^ ]+)', out.rstrip())
|
| 329 |
+
if match and match.group(1) and match.group(2):
|
| 330 |
+
return match.group(1), match.group(2)
|
| 331 |
+
|
| 332 |
+
logging.error('Failed to detect repo:\n%s', out)
|
| 333 |
+
return None, None
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
def load_base_builder_repo():
|
| 337 |
+
"""Get base-image digests."""
|
| 338 |
+
gcloud_path = shutil.which('gcloud')
|
| 339 |
+
if not gcloud_path:
|
| 340 |
+
logging.warning('gcloud not found in PATH.')
|
| 341 |
+
return None
|
| 342 |
+
|
| 343 |
+
result, _, _ = utils.execute([
|
| 344 |
+
gcloud_path,
|
| 345 |
+
'container',
|
| 346 |
+
'images',
|
| 347 |
+
'list-tags',
|
| 348 |
+
'ghcr.io/aixcc-finals/base-builder',
|
| 349 |
+
'--format=json',
|
| 350 |
+
'--sort-by=timestamp',
|
| 351 |
+
],
|
| 352 |
+
check_result=True)
|
| 353 |
+
result = json.loads(result)
|
| 354 |
+
|
| 355 |
+
repo = BaseBuilderRepo()
|
| 356 |
+
for image in result:
|
| 357 |
+
timestamp = datetime.datetime.fromisoformat(
|
| 358 |
+
image['timestamp']['datetime']).astimezone(datetime.timezone.utc)
|
| 359 |
+
repo.add_digest(timestamp, image['digest'])
|
| 360 |
+
|
| 361 |
+
return repo
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
def main():
|
| 365 |
+
"""Main function."""
|
| 366 |
+
logging.getLogger().setLevel(logging.INFO)
|
| 367 |
+
|
| 368 |
+
parser = argparse.ArgumentParser(
|
| 369 |
+
description='Build fuzzers at a specific commit')
|
| 370 |
+
parser.add_argument('--project_name',
|
| 371 |
+
help='The name of the project where the bug occurred.',
|
| 372 |
+
required=True)
|
| 373 |
+
parser.add_argument('--commit',
|
| 374 |
+
help='The newest commit SHA to be bisected.',
|
| 375 |
+
required=True)
|
| 376 |
+
parser.add_argument('--engine',
|
| 377 |
+
help='The default is "libfuzzer".',
|
| 378 |
+
default='libfuzzer')
|
| 379 |
+
parser.add_argument('--sanitizer',
|
| 380 |
+
default='address',
|
| 381 |
+
help='The default is "address".')
|
| 382 |
+
parser.add_argument('--architecture', default='x86_64')
|
| 383 |
+
|
| 384 |
+
args = parser.parse_args()
|
| 385 |
+
|
| 386 |
+
repo_url, repo_path = detect_main_repo(args.project_name, commit=args.commit)
|
| 387 |
+
|
| 388 |
+
if not repo_url or not repo_path:
|
| 389 |
+
raise ValueError('Main git repo can not be determined.')
|
| 390 |
+
|
| 391 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 392 |
+
host_src_dir = copy_src_from_docker(args.project_name, tmp_dir)
|
| 393 |
+
build_repo_manager = repo_manager.RepoManager(
|
| 394 |
+
os.path.join(host_src_dir, os.path.basename(repo_path)))
|
| 395 |
+
base_builder_repo = load_base_builder_repo()
|
| 396 |
+
|
| 397 |
+
build_data = BuildData(project_name=args.project_name,
|
| 398 |
+
engine=args.engine,
|
| 399 |
+
sanitizer=args.sanitizer,
|
| 400 |
+
architecture=args.architecture)
|
| 401 |
+
if not build_fuzzers_from_commit(args.commit,
|
| 402 |
+
build_repo_manager,
|
| 403 |
+
host_src_dir,
|
| 404 |
+
build_data,
|
| 405 |
+
base_builder_repo=base_builder_repo):
|
| 406 |
+
raise RuntimeError('Failed to build.')
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
if __name__ == '__main__':
|
| 410 |
+
main()
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/cifuzz/config_utils.py
ADDED
|
@@ -0,0 +1,283 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Module for getting the configuration CIFuzz needs to run."""
|
| 15 |
+
|
| 16 |
+
import enum
|
| 17 |
+
import importlib
|
| 18 |
+
import logging
|
| 19 |
+
import os
|
| 20 |
+
import sys
|
| 21 |
+
|
| 22 |
+
import environment
|
| 23 |
+
|
| 24 |
+
# pylint: disable=wrong-import-position,import-error
|
| 25 |
+
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
| 26 |
+
|
| 27 |
+
import platform_config
|
| 28 |
+
import constants
|
| 29 |
+
|
| 30 |
+
SANITIZERS = ['address', 'memory', 'undefined', 'coverage']
|
| 31 |
+
|
| 32 |
+
# TODO(metzman): Set these on config objects so there's one source of truth.
|
| 33 |
+
DEFAULT_ENGINE = 'libfuzzer'
|
| 34 |
+
|
| 35 |
+
# This module deals a lot with env variables. Many of these will be set by users
|
| 36 |
+
# and others beyond CIFuzz's control. Thus, you should be careful about using
|
| 37 |
+
# the environment.py helpers for getting env vars, since it can cause values
|
| 38 |
+
# that should be interpreted as strings to be returned as other types (bools or
|
| 39 |
+
# ints for example). The environment.py helpers should not be used for values
|
| 40 |
+
# that are supposed to be strings.
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def _get_sanitizer():
|
| 44 |
+
return os.getenv('SANITIZER', constants.DEFAULT_SANITIZER).lower()
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def _get_architecture():
|
| 48 |
+
return os.getenv('ARCHITECTURE', constants.DEFAULT_ARCHITECTURE).lower()
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def _is_dry_run():
|
| 52 |
+
"""Returns True if configured to do a dry run."""
|
| 53 |
+
return environment.get_bool('DRY_RUN', False)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def _get_language():
|
| 57 |
+
"""Returns the project language."""
|
| 58 |
+
# Get language from environment. We took this approach because the convenience
|
| 59 |
+
# given to OSS-Fuzz users by not making them specify the language again (and
|
| 60 |
+
# getting it from the project.yaml) is outweighed by the complexity in
|
| 61 |
+
# implementing this. A lot of the complexity comes from our unittests not
|
| 62 |
+
# setting a proper projet at this point.
|
| 63 |
+
return os.getenv('LANGUAGE', constants.DEFAULT_LANGUAGE)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def _get_extra_environment_variables():
|
| 67 |
+
"""Gets extra environment variables specified by the user with
|
| 68 |
+
CFL_EXTRA_$NAME=$VALUE."""
|
| 69 |
+
return [key for key in os.environ if key.startswith('CFL_EXTRA_')]
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
# pylint: disable=too-many-instance-attributes
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class ConfigError(Exception):
|
| 76 |
+
"""Error for invalid configuration."""
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class BaseConfig:
|
| 80 |
+
"""Object containing constant configuration for CIFuzz."""
|
| 81 |
+
|
| 82 |
+
class Platform(enum.Enum):
|
| 83 |
+
"""Enum representing the different platforms CIFuzz runs on."""
|
| 84 |
+
EXTERNAL_GITHUB = 0 # Non-OSS-Fuzz on GitHub actions.
|
| 85 |
+
INTERNAL_GITHUB = 1 # OSS-Fuzz on GitHub actions.
|
| 86 |
+
INTERNAL_GENERIC_CI = 2 # OSS-Fuzz on any CI.
|
| 87 |
+
EXTERNAL_GENERIC_CI = 3 # Non-OSS-Fuzz on any CI.
|
| 88 |
+
|
| 89 |
+
@property
|
| 90 |
+
def is_github(self):
|
| 91 |
+
"""Returns True if running on GitHub."""
|
| 92 |
+
return self.cfl_platform == 'github'
|
| 93 |
+
|
| 94 |
+
def __init__(self):
|
| 95 |
+
# Need to set these before calling self.platform.
|
| 96 |
+
self.oss_fuzz_project_name = os.getenv('OSS_FUZZ_PROJECT_NAME')
|
| 97 |
+
self.cfl_platform = os.getenv('CFL_PLATFORM')
|
| 98 |
+
logging.debug('Is github: %s.', self.is_github)
|
| 99 |
+
|
| 100 |
+
self.platform_conf = _get_platform_config(self.cfl_platform)
|
| 101 |
+
self.base_commit = self.platform_conf.base_commit
|
| 102 |
+
self.base_ref = self.platform_conf.base_ref
|
| 103 |
+
self.pr_ref = self.platform_conf.pr_ref
|
| 104 |
+
self.workspace = self.platform_conf.workspace
|
| 105 |
+
self.project_src_path = self.platform_conf.project_src_path
|
| 106 |
+
self.actor = self.platform_conf.actor
|
| 107 |
+
self.token = self.platform_conf.token
|
| 108 |
+
self.project_repo_owner = self.platform_conf.project_repo_owner
|
| 109 |
+
self.project_repo_name = self.platform_conf.project_repo_name
|
| 110 |
+
self.filestore = self.platform_conf.filestore
|
| 111 |
+
|
| 112 |
+
# This determines if builds are done using docker in docker
|
| 113 |
+
# rather than the normal method which is sibling containers.
|
| 114 |
+
self.docker_in_docker = self.platform_conf.docker_in_docker
|
| 115 |
+
|
| 116 |
+
self.dry_run = _is_dry_run() # Check if failures should not be reported.
|
| 117 |
+
self.sanitizer = _get_sanitizer()
|
| 118 |
+
self.architecture = _get_architecture()
|
| 119 |
+
self.language = _get_language()
|
| 120 |
+
self.low_disk_space = environment.get_bool('LOW_DISK_SPACE', False)
|
| 121 |
+
|
| 122 |
+
self.git_store_repo = os.environ.get('GIT_STORE_REPO')
|
| 123 |
+
self.git_store_branch = os.environ.get('GIT_STORE_BRANCH')
|
| 124 |
+
self.git_store_branch_coverage = os.environ.get('GIT_STORE_BRANCH_COVERAGE',
|
| 125 |
+
self.git_store_branch)
|
| 126 |
+
self.cloud_bucket = os.environ.get('CLOUD_BUCKET')
|
| 127 |
+
self.no_clusterfuzz_deployment = environment.get_bool(
|
| 128 |
+
'NO_CLUSTERFUZZ_DEPLOYMENT', False)
|
| 129 |
+
self.build_integration_path = (
|
| 130 |
+
constants.DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH)
|
| 131 |
+
|
| 132 |
+
self.parallel_fuzzing = environment.get_bool('PARALLEL_FUZZING', False)
|
| 133 |
+
self.extra_environment_variables = _get_extra_environment_variables()
|
| 134 |
+
self.output_sarif = environment.get_bool('OUTPUT_SARIF', False)
|
| 135 |
+
|
| 136 |
+
# TODO(metzman): Fix tests to create valid configurations and get rid of
|
| 137 |
+
# CIFUZZ_TEST here and in presubmit.py.
|
| 138 |
+
if not os.getenv('CIFUZZ_TEST') and not self.validate():
|
| 139 |
+
raise ConfigError('Invalid Configuration.')
|
| 140 |
+
|
| 141 |
+
def validate(self):
|
| 142 |
+
"""Returns False if the configuration is invalid."""
|
| 143 |
+
# Do validation here so that unittests don't need to make a fully-valid
|
| 144 |
+
# config.
|
| 145 |
+
# pylint: disable=too-many-return-statements
|
| 146 |
+
if not self.workspace:
|
| 147 |
+
logging.error('Must set WORKSPACE.')
|
| 148 |
+
return False
|
| 149 |
+
|
| 150 |
+
if self.sanitizer not in SANITIZERS:
|
| 151 |
+
logging.error('Invalid SANITIZER: %s. Must be one of: %s.',
|
| 152 |
+
self.sanitizer, SANITIZERS)
|
| 153 |
+
return False
|
| 154 |
+
|
| 155 |
+
if self.architecture not in constants.ARCHITECTURES:
|
| 156 |
+
logging.error('Invalid ARCHITECTURE: %s. Must be one of: %s.',
|
| 157 |
+
self.architecture, constants.ARCHITECTURES)
|
| 158 |
+
return False
|
| 159 |
+
|
| 160 |
+
if self.architecture == 'i386' and self.sanitizer != 'address':
|
| 161 |
+
logging.error(
|
| 162 |
+
'ARCHITECTURE=i386 can be used with SANITIZER=address only.')
|
| 163 |
+
return False
|
| 164 |
+
|
| 165 |
+
if self.language not in constants.LANGUAGES:
|
| 166 |
+
logging.error('Invalid LANGUAGE: %s. Must be one of: %s.', self.language,
|
| 167 |
+
constants.LANGUAGES)
|
| 168 |
+
return False
|
| 169 |
+
|
| 170 |
+
if not self.project_repo_name:
|
| 171 |
+
logging.error('Must set REPOSITORY.')
|
| 172 |
+
return False
|
| 173 |
+
|
| 174 |
+
return True
|
| 175 |
+
|
| 176 |
+
@property
|
| 177 |
+
def is_internal(self):
|
| 178 |
+
"""Returns True if this is an OSS-Fuzz project."""
|
| 179 |
+
return bool(self.oss_fuzz_project_name)
|
| 180 |
+
|
| 181 |
+
@property
|
| 182 |
+
def platform(self):
|
| 183 |
+
"""Returns the platform CIFuzz is runnning on."""
|
| 184 |
+
if not self.is_internal:
|
| 185 |
+
if not self.is_github:
|
| 186 |
+
return self.Platform.EXTERNAL_GENERIC_CI
|
| 187 |
+
return self.Platform.EXTERNAL_GITHUB
|
| 188 |
+
|
| 189 |
+
if self.is_github:
|
| 190 |
+
return self.Platform.INTERNAL_GITHUB
|
| 191 |
+
return self.Platform.INTERNAL_GENERIC_CI
|
| 192 |
+
|
| 193 |
+
@property
|
| 194 |
+
def is_coverage(self):
|
| 195 |
+
"""Returns True if this CIFuzz run (building fuzzers and running them) for
|
| 196 |
+
generating a coverage report."""
|
| 197 |
+
return self.sanitizer == 'coverage'
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def _get_platform_config(cfl_platform):
|
| 201 |
+
"""Returns the CI environment object for |cfl_platform|."""
|
| 202 |
+
module_name = f'platform_config.{cfl_platform}'
|
| 203 |
+
try:
|
| 204 |
+
cls = importlib.import_module(module_name).PlatformConfig
|
| 205 |
+
except ImportError:
|
| 206 |
+
cls = platform_config.BasePlatformConfig
|
| 207 |
+
return cls()
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
class RunFuzzersConfig(BaseConfig):
|
| 211 |
+
"""Class containing constant configuration for running fuzzers in CIFuzz."""
|
| 212 |
+
|
| 213 |
+
MODES = ['batch', 'code-change', 'coverage', 'prune']
|
| 214 |
+
|
| 215 |
+
def __init__(self):
|
| 216 |
+
super().__init__()
|
| 217 |
+
# TODO(metzman): Pick a better default for pruning.
|
| 218 |
+
self.fuzz_seconds = int(os.environ.get('FUZZ_SECONDS', 600))
|
| 219 |
+
self.mode = os.environ.get('MODE', 'code-change').lower()
|
| 220 |
+
if self.is_coverage:
|
| 221 |
+
self.mode = 'coverage'
|
| 222 |
+
|
| 223 |
+
self.report_unreproducible_crashes = environment.get_bool(
|
| 224 |
+
'REPORT_UNREPRODUCIBLE_CRASHES', False)
|
| 225 |
+
|
| 226 |
+
self.minimize_crashes = environment.get_bool('MINIMIZE_CRASHES', False)
|
| 227 |
+
if self.mode == 'batch':
|
| 228 |
+
logging.warning(
|
| 229 |
+
'Minimizing crashes reduces fuzzing time in batch fuzzing.')
|
| 230 |
+
self.report_timeouts = environment.get_bool('REPORT_TIMEOUTS', False)
|
| 231 |
+
self.report_ooms = environment.get_bool('REPORT_OOMS', True)
|
| 232 |
+
self.upload_all_crashes = environment.get_bool('UPLOAD_ALL_CRASHES', False)
|
| 233 |
+
|
| 234 |
+
# TODO(metzman): Fix tests to create valid configurations and get rid of
|
| 235 |
+
# CIFUZZ_TEST here and in presubmit.py.
|
| 236 |
+
if not os.getenv('CIFUZZ_TEST') and not self._run_config_validate():
|
| 237 |
+
raise ConfigError('Invalid Run Configuration.')
|
| 238 |
+
|
| 239 |
+
def _run_config_validate(self):
|
| 240 |
+
"""Do extra validation on RunFuzzersConfig.__init__(). Do not name this
|
| 241 |
+
validate or else it will be called when using the parent's __init__ and will
|
| 242 |
+
fail. Returns True if valid."""
|
| 243 |
+
if self.mode not in self.MODES:
|
| 244 |
+
logging.error('Invalid MODE: %s. Must be one of %s.', self.mode,
|
| 245 |
+
self.MODES)
|
| 246 |
+
return False
|
| 247 |
+
|
| 248 |
+
return True
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
class BuildFuzzersConfig(BaseConfig):
|
| 252 |
+
"""Class containing constant configuration for building fuzzers in CIFuzz."""
|
| 253 |
+
|
| 254 |
+
def __init__(self):
|
| 255 |
+
"""Get the configuration from CIFuzz from the environment. These variables
|
| 256 |
+
are set by GitHub or the user."""
|
| 257 |
+
super().__init__()
|
| 258 |
+
self.git_sha = self.platform_conf.git_sha
|
| 259 |
+
self.git_url = self.platform_conf.git_url
|
| 260 |
+
|
| 261 |
+
self.allowed_broken_targets_percentage = os.getenv(
|
| 262 |
+
'ALLOWED_BROKEN_TARGETS_PERCENTAGE')
|
| 263 |
+
self.bad_build_check = environment.get_bool('BAD_BUILD_CHECK', True)
|
| 264 |
+
|
| 265 |
+
self.keep_unaffected_fuzz_targets = environment.get_bool(
|
| 266 |
+
'KEEP_UNAFFECTED_FUZZ_TARGETS')
|
| 267 |
+
|
| 268 |
+
self.upload_build = environment.get_bool('UPLOAD_BUILD', False)
|
| 269 |
+
if not self.keep_unaffected_fuzz_targets:
|
| 270 |
+
has_base_for_diff = (self.base_ref or self.base_commit)
|
| 271 |
+
if not has_base_for_diff:
|
| 272 |
+
logging.info(
|
| 273 |
+
'Keeping all fuzzers because there is nothing to diff against.')
|
| 274 |
+
self.keep_unaffected_fuzz_targets = True
|
| 275 |
+
elif self.upload_build:
|
| 276 |
+
logging.info('Keeping all fuzzers because we are uploading build.')
|
| 277 |
+
self.keep_unaffected_fuzz_targets = True
|
| 278 |
+
elif self.sanitizer == 'coverage':
|
| 279 |
+
logging.info('Keeping all fuzzers because we are doing coverage.')
|
| 280 |
+
self.keep_unaffected_fuzz_targets = True
|
| 281 |
+
|
| 282 |
+
if self.sanitizer == 'coverage':
|
| 283 |
+
self.bad_build_check = False
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/cifuzz/filestore_utils_test.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Tests for filestore_utils."""
|
| 15 |
+
import unittest
|
| 16 |
+
from unittest import mock
|
| 17 |
+
|
| 18 |
+
import parameterized
|
| 19 |
+
|
| 20 |
+
import platform_config
|
| 21 |
+
import filestore
|
| 22 |
+
from filestore import github_actions
|
| 23 |
+
import filestore_utils
|
| 24 |
+
import test_helpers
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class GetFilestoreTest(unittest.TestCase):
|
| 28 |
+
"""Tests for get_filestore."""
|
| 29 |
+
|
| 30 |
+
@parameterized.parameterized.expand([
|
| 31 |
+
({
|
| 32 |
+
'cfl_platform': 'github',
|
| 33 |
+
}, github_actions.GithubActionsFilestore),
|
| 34 |
+
])
|
| 35 |
+
def test_get_filestore(self, config_kwargs, filestore_cls):
|
| 36 |
+
"""Tests that get_filestore returns the right filestore given a certain
|
| 37 |
+
platform."""
|
| 38 |
+
run_config = test_helpers.create_run_config(**config_kwargs)
|
| 39 |
+
filestore_impl = filestore_utils.get_filestore(run_config)
|
| 40 |
+
self.assertIsInstance(filestore_impl, filestore_cls)
|
| 41 |
+
|
| 42 |
+
@mock.patch('config_utils.BaseConfig.platform', return_value='other')
|
| 43 |
+
@mock.patch('config_utils._get_platform_config',
|
| 44 |
+
return_value=platform_config.BasePlatformConfig())
|
| 45 |
+
def test_get_filestore_unsupported_platform(self, _, __):
|
| 46 |
+
"""Tests that get_filestore exceptions given a platform it doesn't
|
| 47 |
+
support."""
|
| 48 |
+
run_config = test_helpers.create_run_config()
|
| 49 |
+
with self.assertRaises(filestore.FilestoreError):
|
| 50 |
+
filestore_utils.get_filestore(run_config)
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/cifuzz/fuzz_target_test.py
ADDED
|
@@ -0,0 +1,298 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Tests the functionality of the fuzz_target module."""
|
| 15 |
+
|
| 16 |
+
import os
|
| 17 |
+
import shutil
|
| 18 |
+
import tempfile
|
| 19 |
+
import unittest
|
| 20 |
+
from unittest import mock
|
| 21 |
+
|
| 22 |
+
import certifi
|
| 23 |
+
# Importing this later causes import failures with pytest for some reason.
|
| 24 |
+
# TODO(ochang): Figure out why.
|
| 25 |
+
import parameterized
|
| 26 |
+
import google.cloud.ndb # pylint: disable=unused-import
|
| 27 |
+
from pyfakefs import fake_filesystem_unittest
|
| 28 |
+
from clusterfuzz.fuzz import engine
|
| 29 |
+
|
| 30 |
+
import clusterfuzz_deployment
|
| 31 |
+
import fuzz_target
|
| 32 |
+
import test_helpers
|
| 33 |
+
import workspace_utils
|
| 34 |
+
|
| 35 |
+
# NOTE: This integration test relies on
|
| 36 |
+
# https://github.com/google/oss-fuzz/tree/master/projects/example project.
|
| 37 |
+
EXAMPLE_PROJECT = 'example'
|
| 38 |
+
|
| 39 |
+
# An example fuzzer that triggers an error.
|
| 40 |
+
EXAMPLE_FUZZER = 'example_crash_fuzzer'
|
| 41 |
+
|
| 42 |
+
# Mock return values for engine_impl.reproduce.
|
| 43 |
+
EXECUTE_SUCCESS_RESULT = engine.ReproduceResult([], 0, 0, '')
|
| 44 |
+
EXECUTE_FAILURE_RESULT = engine.ReproduceResult([], 1, 0, '')
|
| 45 |
+
|
| 46 |
+
TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'test_data')
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def _create_config(**kwargs):
|
| 50 |
+
"""Creates a config object and then sets every attribute that is a key in
|
| 51 |
+
|kwargs| to the corresponding value. Asserts that each key in |kwargs| is an
|
| 52 |
+
attribute of Config."""
|
| 53 |
+
defaults = {
|
| 54 |
+
'cfl_platform': 'github',
|
| 55 |
+
'oss_fuzz_project_name': EXAMPLE_PROJECT,
|
| 56 |
+
'workspace': '/workspace'
|
| 57 |
+
}
|
| 58 |
+
for default_key, default_value in defaults.items():
|
| 59 |
+
if default_key not in kwargs:
|
| 60 |
+
kwargs[default_key] = default_value
|
| 61 |
+
|
| 62 |
+
return test_helpers.create_run_config(**kwargs)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def _create_deployment(**kwargs):
|
| 66 |
+
config = _create_config(**kwargs)
|
| 67 |
+
workspace = workspace_utils.Workspace(config)
|
| 68 |
+
return clusterfuzz_deployment.get_clusterfuzz_deployment(config, workspace)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
@mock.patch('utils.get_container_name', return_value='container')
|
| 72 |
+
class IsReproducibleTest(fake_filesystem_unittest.TestCase):
|
| 73 |
+
"""Tests the is_reproducible method in the fuzz_target.FuzzTarget class."""
|
| 74 |
+
|
| 75 |
+
def setUp(self):
|
| 76 |
+
"""Sets up example fuzz target to test is_reproducible method."""
|
| 77 |
+
self.fuzz_target_name = 'fuzz-target'
|
| 78 |
+
deployment = _create_deployment()
|
| 79 |
+
self.config = deployment.config
|
| 80 |
+
self.workspace = deployment.workspace
|
| 81 |
+
self.fuzz_target_path = os.path.join(self.workspace.out,
|
| 82 |
+
self.fuzz_target_name)
|
| 83 |
+
self.setUpPyfakefs()
|
| 84 |
+
self.fs.create_file(self.fuzz_target_path)
|
| 85 |
+
self.testcase_path = '/testcase'
|
| 86 |
+
self.fs.create_file(self.testcase_path)
|
| 87 |
+
|
| 88 |
+
self.target = fuzz_target.FuzzTarget(self.fuzz_target_path,
|
| 89 |
+
fuzz_target.REPRODUCE_ATTEMPTS,
|
| 90 |
+
self.workspace, deployment,
|
| 91 |
+
deployment.config)
|
| 92 |
+
|
| 93 |
+
# ClusterFuzz requires ROOT_DIR.
|
| 94 |
+
root_dir = os.environ['ROOT_DIR']
|
| 95 |
+
test_helpers.patch_environ(self, empty=True)
|
| 96 |
+
os.environ['ROOT_DIR'] = root_dir
|
| 97 |
+
|
| 98 |
+
# There's an extremely bad issue that happens if this test is run: Other tests
|
| 99 |
+
# in this file fail in CI with stacktraces using referencing fakefs even if
|
| 100 |
+
# the tests do not use fakefs.
|
| 101 |
+
# TODO(metzman): Stop using fakefs.
|
| 102 |
+
@mock.patch('os.chmod')
|
| 103 |
+
@unittest.skip('Skip because of weird failures.')
|
| 104 |
+
def test_repro_timed_out(self, mock_chmod, mock_get_container_name):
|
| 105 |
+
"""Tests that is_reproducible behaves correctly when reproduction times
|
| 106 |
+
out."""
|
| 107 |
+
del mock_get_container_name
|
| 108 |
+
del mock_chmod
|
| 109 |
+
|
| 110 |
+
with mock.patch(
|
| 111 |
+
'clusterfuzz._internal.bot.fuzzers.libFuzzer.engine.LibFuzzerEngine.'
|
| 112 |
+
'reproduce',
|
| 113 |
+
side_effect=TimeoutError):
|
| 114 |
+
self.assertFalse(
|
| 115 |
+
self.target.is_reproducible('/testcase', self.target.target_path, []))
|
| 116 |
+
|
| 117 |
+
def test_reproducible(self, _):
|
| 118 |
+
"""Tests that is_reproducible returns True if crash is detected and that
|
| 119 |
+
is_reproducible uses the correct command to reproduce a crash."""
|
| 120 |
+
all_repro = [EXECUTE_FAILURE_RESULT] * fuzz_target.REPRODUCE_ATTEMPTS
|
| 121 |
+
with mock.patch('clusterfuzz.fuzz.get_engine') as mock_get_engine:
|
| 122 |
+
mock_get_engine().reproduce.side_effect = all_repro
|
| 123 |
+
|
| 124 |
+
result = self.target.is_reproducible(self.testcase_path,
|
| 125 |
+
self.fuzz_target_path, [])
|
| 126 |
+
mock_get_engine().reproduce.assert_called_once_with(
|
| 127 |
+
'/workspace/build-out/fuzz-target',
|
| 128 |
+
'/testcase',
|
| 129 |
+
arguments=[],
|
| 130 |
+
max_time=30)
|
| 131 |
+
self.assertTrue(result)
|
| 132 |
+
self.assertEqual(1, mock_get_engine().reproduce.call_count)
|
| 133 |
+
|
| 134 |
+
def test_flaky(self, _):
|
| 135 |
+
"""Tests that is_reproducible returns True if crash is detected on the last
|
| 136 |
+
attempt."""
|
| 137 |
+
last_time_repro = [EXECUTE_SUCCESS_RESULT] * 9 + [EXECUTE_FAILURE_RESULT]
|
| 138 |
+
with mock.patch('clusterfuzz.fuzz.get_engine') as mock_get_engine:
|
| 139 |
+
mock_get_engine().reproduce.side_effect = last_time_repro
|
| 140 |
+
self.assertTrue(
|
| 141 |
+
self.target.is_reproducible(self.testcase_path, self.fuzz_target_path,
|
| 142 |
+
[]))
|
| 143 |
+
self.assertEqual(fuzz_target.REPRODUCE_ATTEMPTS,
|
| 144 |
+
mock_get_engine().reproduce.call_count)
|
| 145 |
+
|
| 146 |
+
def test_nonexistent_fuzzer(self, _):
|
| 147 |
+
"""Tests that is_reproducible raises an error if it could not attempt
|
| 148 |
+
reproduction because the fuzzer doesn't exist."""
|
| 149 |
+
with self.assertRaises(fuzz_target.ReproduceError):
|
| 150 |
+
self.target.is_reproducible(self.testcase_path, '/non-existent-path', [])
|
| 151 |
+
|
| 152 |
+
def test_unreproducible(self, _):
|
| 153 |
+
"""Tests that is_reproducible returns False for a crash that did not
|
| 154 |
+
reproduce."""
|
| 155 |
+
all_unrepro = [EXECUTE_SUCCESS_RESULT] * fuzz_target.REPRODUCE_ATTEMPTS
|
| 156 |
+
with mock.patch('clusterfuzz.fuzz.get_engine') as mock_get_engine:
|
| 157 |
+
mock_get_engine().reproduce.side_effect = all_unrepro
|
| 158 |
+
result = self.target.is_reproducible(self.testcase_path,
|
| 159 |
+
self.fuzz_target_path, [])
|
| 160 |
+
self.assertFalse(result)
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
class IsCrashReportableTest(fake_filesystem_unittest.TestCase):
|
| 164 |
+
"""Tests the is_crash_reportable method of FuzzTarget."""
|
| 165 |
+
|
| 166 |
+
def setUp(self):
|
| 167 |
+
"""Sets up example fuzz target to test is_crash_reportable method."""
|
| 168 |
+
self.setUpPyfakefs()
|
| 169 |
+
self.fuzz_target_path = '/example/do_stuff_fuzzer'
|
| 170 |
+
deployment = _create_deployment()
|
| 171 |
+
self.target = fuzz_target.FuzzTarget(self.fuzz_target_path, 100,
|
| 172 |
+
deployment.workspace, deployment,
|
| 173 |
+
deployment.config)
|
| 174 |
+
self.oss_fuzz_build_path = '/oss-fuzz-build'
|
| 175 |
+
self.fs.create_file(self.fuzz_target_path)
|
| 176 |
+
self.oss_fuzz_target_path = os.path.join(
|
| 177 |
+
self.oss_fuzz_build_path, os.path.basename(self.fuzz_target_path))
|
| 178 |
+
self.fs.create_file(self.oss_fuzz_target_path)
|
| 179 |
+
self.testcase_path = '/testcase'
|
| 180 |
+
self.fs.create_file(self.testcase_path, contents='')
|
| 181 |
+
|
| 182 |
+
# Do this to prevent pyfakefs from messing with requests.
|
| 183 |
+
self.fs.add_real_directory(os.path.dirname(certifi.__file__))
|
| 184 |
+
|
| 185 |
+
@mock.patch('fuzz_target.FuzzTarget.is_reproducible',
|
| 186 |
+
side_effect=[True, False])
|
| 187 |
+
@mock.patch('logging.info')
|
| 188 |
+
def test_new_reproducible_crash(self, mock_info, _):
|
| 189 |
+
"""Tests that a new reproducible crash returns True."""
|
| 190 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 191 |
+
self.target.out_dir = tmp_dir
|
| 192 |
+
self.assertTrue(self.target.is_crash_reportable(self.testcase_path, []))
|
| 193 |
+
mock_info.assert_called_with(
|
| 194 |
+
'The crash is not reproducible on previous build. '
|
| 195 |
+
'Code change (pr/commit) introduced crash.')
|
| 196 |
+
|
| 197 |
+
# yapf: disable
|
| 198 |
+
@parameterized.parameterized.expand([
|
| 199 |
+
# Reproducible on PR build, but also reproducible on OSS-Fuzz.
|
| 200 |
+
([True, True],),
|
| 201 |
+
|
| 202 |
+
# Not reproducible on PR build, but somehow reproducible on OSS-Fuzz.
|
| 203 |
+
# Unlikely to happen in real world except if test is flaky.
|
| 204 |
+
([False, True],),
|
| 205 |
+
|
| 206 |
+
# Not reproducible on PR build, and not reproducible on OSS-Fuzz.
|
| 207 |
+
([False, False],),
|
| 208 |
+
])
|
| 209 |
+
# yapf: enable
|
| 210 |
+
def test_invalid_crash(self, is_reproducible_retvals):
|
| 211 |
+
"""Tests that a nonreportable crash causes the method to return False."""
|
| 212 |
+
with mock.patch('fuzz_target.FuzzTarget.is_reproducible',
|
| 213 |
+
side_effect=is_reproducible_retvals):
|
| 214 |
+
with mock.patch('clusterfuzz_deployment.OSSFuzz.download_latest_build',
|
| 215 |
+
return_value=self.oss_fuzz_build_path):
|
| 216 |
+
self.assertFalse(self.target.is_crash_reportable(
|
| 217 |
+
self.testcase_path, []))
|
| 218 |
+
|
| 219 |
+
@mock.patch('logging.info')
|
| 220 |
+
@mock.patch('fuzz_target.FuzzTarget.is_reproducible', return_value=[True])
|
| 221 |
+
def test_reproducible_no_oss_fuzz_target(self, _, mock_info):
|
| 222 |
+
"""Tests that is_crash_reportable returns True when a crash reproduces on
|
| 223 |
+
the PR build but the target is not in the OSS-Fuzz build (usually because it
|
| 224 |
+
is new)."""
|
| 225 |
+
os.remove(self.oss_fuzz_target_path)
|
| 226 |
+
|
| 227 |
+
def is_reproducible_side_effect(testcase, target_path, reproduce_arguments):
|
| 228 |
+
del testcase
|
| 229 |
+
del reproduce_arguments
|
| 230 |
+
if os.path.dirname(target_path) == self.oss_fuzz_build_path:
|
| 231 |
+
raise fuzz_target.ReproduceError()
|
| 232 |
+
return True
|
| 233 |
+
|
| 234 |
+
with mock.patch(
|
| 235 |
+
'fuzz_target.FuzzTarget.is_reproducible',
|
| 236 |
+
side_effect=is_reproducible_side_effect) as mock_is_reproducible:
|
| 237 |
+
with mock.patch('clusterfuzz_deployment.OSSFuzz.download_latest_build',
|
| 238 |
+
return_value=self.oss_fuzz_build_path):
|
| 239 |
+
self.assertTrue(self.target.is_crash_reportable(self.testcase_path, []))
|
| 240 |
+
mock_is_reproducible.assert_any_call(self.testcase_path,
|
| 241 |
+
self.oss_fuzz_target_path, [])
|
| 242 |
+
mock_info.assert_called_with(
|
| 243 |
+
'Could not run previous build of target to determine if this code '
|
| 244 |
+
'change (pr/commit) introduced crash. Assuming crash was newly '
|
| 245 |
+
'introduced.')
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
class FuzzTest(fake_filesystem_unittest.TestCase):
|
| 249 |
+
"""Fuzz test."""
|
| 250 |
+
|
| 251 |
+
def setUp(self):
|
| 252 |
+
"""Sets up example fuzz target."""
|
| 253 |
+
self.setUpPyfakefs()
|
| 254 |
+
deployment = _create_deployment()
|
| 255 |
+
config = deployment.config
|
| 256 |
+
workspace = deployment.workspace
|
| 257 |
+
self.fuzz_target = fuzz_target.FuzzTarget('/path/fuzz-target', 10,
|
| 258 |
+
workspace, deployment, config)
|
| 259 |
+
|
| 260 |
+
def test_get_fuzz_target_artifact(self):
|
| 261 |
+
"""Tests that get_fuzz_target_artifact works as intended."""
|
| 262 |
+
# pylint: disable=protected-access
|
| 263 |
+
fuzz_target_artifact = self.fuzz_target._target_artifact_path()
|
| 264 |
+
self.assertEqual('/workspace/out/artifacts/fuzz-target/address',
|
| 265 |
+
fuzz_target_artifact)
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
class TimeoutIntegrationTest(unittest.TestCase):
|
| 269 |
+
"""Tests handling of fuzzer timeout (timeout crashes reported by
|
| 270 |
+
libFuzzer)."""
|
| 271 |
+
TIMEOUT_FUZZER_NAME = 'timeout_fuzzer'
|
| 272 |
+
|
| 273 |
+
@parameterized.parameterized.expand([(True, True), (False, False)])
|
| 274 |
+
def test_timeout_reported(self, report_timeouts, expect_crash):
|
| 275 |
+
"""Tests that timeouts are not reported."""
|
| 276 |
+
with test_helpers.temp_dir_copy(TEST_DATA_PATH) as temp_dir:
|
| 277 |
+
fuzz_target_path = os.path.join(temp_dir, 'build-out',
|
| 278 |
+
self.TIMEOUT_FUZZER_NAME)
|
| 279 |
+
shutil.copy(os.path.join(temp_dir, self.TIMEOUT_FUZZER_NAME),
|
| 280 |
+
fuzz_target_path)
|
| 281 |
+
deployment = _create_deployment(workspace=temp_dir,
|
| 282 |
+
report_timeouts=report_timeouts)
|
| 283 |
+
config = deployment.config
|
| 284 |
+
fuzz_target_obj = fuzz_target.FuzzTarget(fuzz_target_path,
|
| 285 |
+
fuzz_target.REPRODUCE_ATTEMPTS,
|
| 286 |
+
deployment.workspace, deployment,
|
| 287 |
+
config)
|
| 288 |
+
with mock.patch('clusterfuzz._internal.bot.fuzzers.libfuzzer.'
|
| 289 |
+
'fix_timeout_argument_for_reproduction') as _:
|
| 290 |
+
with mock.patch(
|
| 291 |
+
'clusterfuzz._internal.bot.fuzzers.libFuzzer.fuzzer.get_arguments',
|
| 292 |
+
return_value=['-timeout=1', '-rss_limit_mb=2560']):
|
| 293 |
+
fuzz_result = fuzz_target_obj.fuzz()
|
| 294 |
+
self.assertEqual(bool(fuzz_result.testcase), expect_crash)
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
if __name__ == '__main__':
|
| 298 |
+
unittest.main()
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/cifuzz/get_coverage.py
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Module for determining coverage of fuzz targets."""
|
| 15 |
+
import json
|
| 16 |
+
import logging
|
| 17 |
+
import os
|
| 18 |
+
import sys
|
| 19 |
+
|
| 20 |
+
import http_utils
|
| 21 |
+
|
| 22 |
+
# pylint: disable=wrong-import-position,import-error
|
| 23 |
+
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
| 24 |
+
import utils
|
| 25 |
+
|
| 26 |
+
# The path to get OSS-Fuzz project's latest report json file.`
|
| 27 |
+
OSS_FUZZ_LATEST_COVERAGE_INFO_PATH = 'oss-fuzz-coverage/latest_report_info/'
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
# pylint: disable=too-few-public-methods
|
| 31 |
+
class CoverageError(Exception):
|
| 32 |
+
"""Exceptions for project coverage."""
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class BaseCoverage:
|
| 36 |
+
"""Gets coverage data for a project."""
|
| 37 |
+
|
| 38 |
+
def __init__(self, repo_path):
|
| 39 |
+
self.repo_path = _normalize_repo_path(repo_path)
|
| 40 |
+
|
| 41 |
+
def get_files_covered_by_target(self, target):
|
| 42 |
+
"""Returns a list of source files covered by the specific fuzz target.
|
| 43 |
+
|
| 44 |
+
Args:
|
| 45 |
+
target: The name of the fuzz target whose coverage is requested.
|
| 46 |
+
|
| 47 |
+
Returns:
|
| 48 |
+
A list of files that the fuzz target covers or None.
|
| 49 |
+
"""
|
| 50 |
+
target_cov = self.get_target_coverage(target)
|
| 51 |
+
if not target_cov:
|
| 52 |
+
logging.info('No coverage available for %s.', target)
|
| 53 |
+
return None
|
| 54 |
+
|
| 55 |
+
coverage_per_file = get_coverage_per_file(target_cov)
|
| 56 |
+
if not coverage_per_file:
|
| 57 |
+
logging.info('No files found in coverage report.')
|
| 58 |
+
return None
|
| 59 |
+
|
| 60 |
+
affected_file_list = []
|
| 61 |
+
for file_cov in coverage_per_file:
|
| 62 |
+
norm_file_path = os.path.normpath(file_cov['filename'])
|
| 63 |
+
if not norm_file_path.startswith(self.repo_path):
|
| 64 |
+
# Exclude files outside of the main repo.
|
| 65 |
+
continue
|
| 66 |
+
|
| 67 |
+
if not is_file_covered(file_cov):
|
| 68 |
+
# Don't consider a file affected if code in it is never executed.
|
| 69 |
+
continue
|
| 70 |
+
|
| 71 |
+
# TODO(metzman): It's weird to me that we access file_cov['filename']
|
| 72 |
+
# again and not norm_file_path, figure out if this makes sense.
|
| 73 |
+
relative_path = utils.remove_prefix(file_cov['filename'], self.repo_path)
|
| 74 |
+
affected_file_list.append(relative_path)
|
| 75 |
+
|
| 76 |
+
return affected_file_list
|
| 77 |
+
|
| 78 |
+
def get_target_coverage(self, target):
|
| 79 |
+
"""Get the coverage report for a specific fuzz target.
|
| 80 |
+
|
| 81 |
+
Args:
|
| 82 |
+
target: The name of the fuzz target whose coverage is requested.
|
| 83 |
+
|
| 84 |
+
Returns:
|
| 85 |
+
The target's coverage json dict or None on failure.
|
| 86 |
+
"""
|
| 87 |
+
raise NotImplementedError('Child class must implement method.')
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
class OSSFuzzCoverage(BaseCoverage):
|
| 91 |
+
"""Gets coverage data for a project from OSS-Fuzz."""
|
| 92 |
+
|
| 93 |
+
def __init__(self, repo_path, oss_fuzz_project_name):
|
| 94 |
+
"""Constructor for OSSFuzzCoverage."""
|
| 95 |
+
super().__init__(repo_path)
|
| 96 |
+
self.oss_fuzz_project_name = oss_fuzz_project_name
|
| 97 |
+
self.fuzzer_stats_url = _get_oss_fuzz_fuzzer_stats_dir_url(
|
| 98 |
+
self.oss_fuzz_project_name)
|
| 99 |
+
if self.fuzzer_stats_url is None:
|
| 100 |
+
raise CoverageError('Could not get latest coverage.')
|
| 101 |
+
|
| 102 |
+
def get_target_coverage(self, target):
|
| 103 |
+
"""Get the coverage report for a specific fuzz target.
|
| 104 |
+
|
| 105 |
+
Args:
|
| 106 |
+
target: The name of the fuzz target whose coverage is requested.
|
| 107 |
+
|
| 108 |
+
Returns:
|
| 109 |
+
The target's coverage json dict or None on failure.
|
| 110 |
+
"""
|
| 111 |
+
if not self.fuzzer_stats_url:
|
| 112 |
+
return None
|
| 113 |
+
|
| 114 |
+
target_url = utils.url_join(self.fuzzer_stats_url, target + '.json')
|
| 115 |
+
return http_utils.get_json_from_url(target_url)
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def _get_oss_fuzz_latest_cov_report_info(oss_fuzz_project_name):
|
| 119 |
+
"""Gets and returns a dictionary containing the latest coverage report info
|
| 120 |
+
for |project|."""
|
| 121 |
+
latest_report_info_url = utils.url_join(utils.GCS_BASE_URL,
|
| 122 |
+
OSS_FUZZ_LATEST_COVERAGE_INFO_PATH,
|
| 123 |
+
oss_fuzz_project_name + '.json')
|
| 124 |
+
latest_cov_info = http_utils.get_json_from_url(latest_report_info_url)
|
| 125 |
+
if latest_cov_info is None:
|
| 126 |
+
logging.error('Could not get the coverage report json from url: %s.',
|
| 127 |
+
latest_report_info_url)
|
| 128 |
+
return None
|
| 129 |
+
return latest_cov_info
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def _get_oss_fuzz_fuzzer_stats_dir_url(oss_fuzz_project_name):
|
| 133 |
+
"""Gets latest coverage report info for a specific OSS-Fuzz project from
|
| 134 |
+
GCS.
|
| 135 |
+
|
| 136 |
+
Args:
|
| 137 |
+
oss_fuzz_project_name: The name of the project.
|
| 138 |
+
|
| 139 |
+
Returns:
|
| 140 |
+
The projects coverage report info in json dict or None on failure.
|
| 141 |
+
"""
|
| 142 |
+
latest_cov_info = _get_oss_fuzz_latest_cov_report_info(oss_fuzz_project_name)
|
| 143 |
+
|
| 144 |
+
if not latest_cov_info:
|
| 145 |
+
return None
|
| 146 |
+
|
| 147 |
+
if 'fuzzer_stats_dir' not in latest_cov_info:
|
| 148 |
+
logging.error('fuzzer_stats_dir not in latest coverage info.')
|
| 149 |
+
return None
|
| 150 |
+
|
| 151 |
+
fuzzer_stats_dir_gs_url = latest_cov_info['fuzzer_stats_dir']
|
| 152 |
+
fuzzer_stats_dir_url = utils.gs_url_to_https(fuzzer_stats_dir_gs_url)
|
| 153 |
+
return fuzzer_stats_dir_url
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
class FilesystemCoverage(BaseCoverage):
|
| 157 |
+
"""Class that gets a project's coverage from the filesystem."""
|
| 158 |
+
|
| 159 |
+
def __init__(self, repo_path, project_coverage_dir):
|
| 160 |
+
super().__init__(repo_path)
|
| 161 |
+
self.project_coverage_dir = project_coverage_dir
|
| 162 |
+
|
| 163 |
+
def get_target_coverage(self, target):
|
| 164 |
+
"""Get the coverage report for a specific fuzz target.
|
| 165 |
+
|
| 166 |
+
Args:
|
| 167 |
+
target: The name of the fuzz target whose coverage is requested.
|
| 168 |
+
|
| 169 |
+
Returns:
|
| 170 |
+
The target's coverage json dict or None on failure.
|
| 171 |
+
"""
|
| 172 |
+
logging.info('Getting coverage for %s from filesystem.', target)
|
| 173 |
+
fuzzer_stats_json_path = os.path.join(self.project_coverage_dir,
|
| 174 |
+
'fuzzer_stats', target + '.json')
|
| 175 |
+
if not os.path.exists(fuzzer_stats_json_path):
|
| 176 |
+
logging.warning('%s does not exist.', fuzzer_stats_json_path)
|
| 177 |
+
return None
|
| 178 |
+
|
| 179 |
+
with open(fuzzer_stats_json_path) as fuzzer_stats_json_file_handle:
|
| 180 |
+
try:
|
| 181 |
+
return json.load(fuzzer_stats_json_file_handle)
|
| 182 |
+
except json.decoder.JSONDecodeError as err:
|
| 183 |
+
logging.error('Could not decode: %s. Error: %s.',
|
| 184 |
+
fuzzer_stats_json_path, err)
|
| 185 |
+
return None
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
def is_file_covered(file_cov):
|
| 189 |
+
"""Returns whether the file is covered."""
|
| 190 |
+
return file_cov['summary']['regions']['covered']
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
def get_coverage_per_file(target_cov):
|
| 194 |
+
"""Returns the coverage per file within |target_cov|."""
|
| 195 |
+
try:
|
| 196 |
+
return target_cov['data'][0]['files']
|
| 197 |
+
except (IndexError, TypeError, KeyError):
|
| 198 |
+
logging.error('target_cov: %s is malformed.', target_cov)
|
| 199 |
+
return None
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
def _normalize_repo_path(repo_path):
|
| 203 |
+
"""Normalizes and returns |repo_path| to make sure cases like /src/curl and
|
| 204 |
+
/src/curl/ are both handled."""
|
| 205 |
+
repo_path = os.path.normpath(repo_path)
|
| 206 |
+
if not repo_path.endswith('/'):
|
| 207 |
+
repo_path += '/'
|
| 208 |
+
return repo_path
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/cifuzz/http_utils.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Utility module for HTTP."""
|
| 15 |
+
import json
|
| 16 |
+
import logging
|
| 17 |
+
import os
|
| 18 |
+
import sys
|
| 19 |
+
import tempfile
|
| 20 |
+
import zipfile
|
| 21 |
+
|
| 22 |
+
import requests
|
| 23 |
+
|
| 24 |
+
# pylint: disable=wrong-import-position,import-error
|
| 25 |
+
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
| 26 |
+
import retry
|
| 27 |
+
|
| 28 |
+
_DOWNLOAD_URL_RETRIES = 3
|
| 29 |
+
_DOWNLOAD_URL_BACKOFF = 1
|
| 30 |
+
_HTTP_REQUEST_TIMEOUT = 10
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def download_and_unpack_zip(url, extract_directory, headers=None):
|
| 34 |
+
"""Downloads and unpacks a zip file from an HTTP URL.
|
| 35 |
+
|
| 36 |
+
Args:
|
| 37 |
+
url: A url to the zip file to be downloaded and unpacked.
|
| 38 |
+
extract_directory: The path where the zip file should be extracted to.
|
| 39 |
+
headers: (Optional) HTTP headers to send with the download request.
|
| 40 |
+
|
| 41 |
+
Returns:
|
| 42 |
+
True on success.
|
| 43 |
+
"""
|
| 44 |
+
if headers is None:
|
| 45 |
+
headers = {}
|
| 46 |
+
|
| 47 |
+
if not os.path.exists(extract_directory):
|
| 48 |
+
logging.error('Extract directory: %s does not exist.', extract_directory)
|
| 49 |
+
return False
|
| 50 |
+
|
| 51 |
+
# Gives the temporary zip file a unique identifier in the case that
|
| 52 |
+
# that download_and_unpack_zip is done in parallel.
|
| 53 |
+
with tempfile.NamedTemporaryFile(suffix='.zip') as tmp_file:
|
| 54 |
+
if not download_url(url, tmp_file.name, headers=headers):
|
| 55 |
+
return False
|
| 56 |
+
|
| 57 |
+
try:
|
| 58 |
+
with zipfile.ZipFile(tmp_file.name, 'r') as zip_file:
|
| 59 |
+
zip_file.extractall(extract_directory)
|
| 60 |
+
except zipfile.BadZipFile:
|
| 61 |
+
logging.error('Error unpacking zip from %s. Bad Zipfile.', url)
|
| 62 |
+
return False
|
| 63 |
+
|
| 64 |
+
return True
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def download_url(*args, **kwargs):
|
| 68 |
+
"""Wrapper around _download_url that returns False if _download_url
|
| 69 |
+
exceptions."""
|
| 70 |
+
try:
|
| 71 |
+
return _download_url(*args, **kwargs)
|
| 72 |
+
except Exception: # pylint: disable=broad-except
|
| 73 |
+
return False
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def get_json_from_url(url):
|
| 77 |
+
"""Gets a json object from a specified HTTP URL.
|
| 78 |
+
|
| 79 |
+
Args:
|
| 80 |
+
url: The url of the json to be downloaded.
|
| 81 |
+
|
| 82 |
+
Returns:
|
| 83 |
+
A dictionary deserialized from JSON or None on failure.
|
| 84 |
+
"""
|
| 85 |
+
try:
|
| 86 |
+
return requests.get(url, timeout=_HTTP_REQUEST_TIMEOUT).json()
|
| 87 |
+
except (ValueError, TypeError, json.JSONDecodeError,
|
| 88 |
+
requests.exceptions.ReadTimeout) as err:
|
| 89 |
+
logging.error('Loading json from url %s failed with: %s.', url, str(err))
|
| 90 |
+
return None
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
@retry.wrap(_DOWNLOAD_URL_RETRIES, _DOWNLOAD_URL_BACKOFF)
|
| 94 |
+
def _download_url(url, filename, headers=None):
|
| 95 |
+
"""Downloads the file located at |url|, using HTTP to |filename|.
|
| 96 |
+
|
| 97 |
+
Args:
|
| 98 |
+
url: A url to a file to download.
|
| 99 |
+
filename: The path the file should be downloaded to.
|
| 100 |
+
headers: (Optional) HTTP headers to send with the download request.
|
| 101 |
+
|
| 102 |
+
Returns:
|
| 103 |
+
True on success.
|
| 104 |
+
"""
|
| 105 |
+
if headers is None:
|
| 106 |
+
headers = {}
|
| 107 |
+
|
| 108 |
+
response = requests.get(url, headers=headers)
|
| 109 |
+
|
| 110 |
+
if response.status_code != 200:
|
| 111 |
+
logging.error('Unable to download from: %s. Code: %d. Content: %s.', url,
|
| 112 |
+
response.status_code, response.content)
|
| 113 |
+
return False
|
| 114 |
+
|
| 115 |
+
with open(filename, 'wb') as file_handle:
|
| 116 |
+
file_handle.write(response.content)
|
| 117 |
+
|
| 118 |
+
return True
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/cifuzz/run_fuzzers.py
ADDED
|
@@ -0,0 +1,321 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Module for running fuzzers."""
|
| 15 |
+
import enum
|
| 16 |
+
import logging
|
| 17 |
+
import os
|
| 18 |
+
import sys
|
| 19 |
+
import time
|
| 20 |
+
|
| 21 |
+
import clusterfuzz_deployment
|
| 22 |
+
import fuzz_target
|
| 23 |
+
import generate_coverage_report
|
| 24 |
+
import workspace_utils
|
| 25 |
+
import sarif_utils
|
| 26 |
+
|
| 27 |
+
# pylint: disable=wrong-import-position,import-error
|
| 28 |
+
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
| 29 |
+
|
| 30 |
+
import utils
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class RunFuzzersResult(enum.Enum):
|
| 34 |
+
"""Enum result from running fuzzers."""
|
| 35 |
+
ERROR = 0
|
| 36 |
+
BUG_FOUND = 1
|
| 37 |
+
NO_BUG_FOUND = 2
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class BaseFuzzTargetRunner:
|
| 41 |
+
"""Base class for fuzzer runners."""
|
| 42 |
+
|
| 43 |
+
def __init__(self, config):
|
| 44 |
+
self.config = config
|
| 45 |
+
self.workspace = workspace_utils.Workspace(config)
|
| 46 |
+
self.clusterfuzz_deployment = (
|
| 47 |
+
clusterfuzz_deployment.get_clusterfuzz_deployment(
|
| 48 |
+
self.config, self.workspace))
|
| 49 |
+
|
| 50 |
+
# Set by the initialize method.
|
| 51 |
+
self.fuzz_target_paths = None
|
| 52 |
+
|
| 53 |
+
def get_fuzz_targets(self):
|
| 54 |
+
"""Returns fuzz targets in out directory."""
|
| 55 |
+
return utils.get_fuzz_targets(self.workspace.out)
|
| 56 |
+
|
| 57 |
+
def initialize(self):
|
| 58 |
+
"""Initialization method. Must be called before calling run_fuzz_targets.
|
| 59 |
+
Returns True on success."""
|
| 60 |
+
# Use a separate initialization function so we can return False on failure
|
| 61 |
+
# instead of exceptioning like we need to do if this were done in the
|
| 62 |
+
# __init__ method.
|
| 63 |
+
|
| 64 |
+
logging.info('Using %s sanitizer.', self.config.sanitizer)
|
| 65 |
+
|
| 66 |
+
# TODO(metzman) Add a check to ensure we aren't over time limit.
|
| 67 |
+
if not self.config.fuzz_seconds or self.config.fuzz_seconds < 1:
|
| 68 |
+
logging.error(
|
| 69 |
+
'Fuzz_seconds argument must be greater than 1, but was: %s.',
|
| 70 |
+
self.config.fuzz_seconds)
|
| 71 |
+
return False
|
| 72 |
+
|
| 73 |
+
if not os.path.exists(self.workspace.out):
|
| 74 |
+
logging.error('Out directory: %s does not exist.', self.workspace.out)
|
| 75 |
+
return False
|
| 76 |
+
|
| 77 |
+
if not os.path.exists(self.workspace.artifacts):
|
| 78 |
+
os.makedirs(self.workspace.artifacts)
|
| 79 |
+
elif (not os.path.isdir(self.workspace.artifacts) or
|
| 80 |
+
os.listdir(self.workspace.artifacts)):
|
| 81 |
+
logging.error('Artifacts path: %s exists and is not an empty directory.',
|
| 82 |
+
self.workspace.artifacts)
|
| 83 |
+
return False
|
| 84 |
+
|
| 85 |
+
self.fuzz_target_paths = self.get_fuzz_targets()
|
| 86 |
+
logging.info('Fuzz targets: %s', self.fuzz_target_paths)
|
| 87 |
+
if not self.fuzz_target_paths:
|
| 88 |
+
logging.error('No fuzz targets were found in out directory: %s.',
|
| 89 |
+
self.workspace.out)
|
| 90 |
+
return False
|
| 91 |
+
|
| 92 |
+
return True
|
| 93 |
+
|
| 94 |
+
def cleanup_after_fuzz_target_run(self, fuzz_target_obj): # pylint: disable=no-self-use
|
| 95 |
+
"""Cleans up after running |fuzz_target_obj|."""
|
| 96 |
+
raise NotImplementedError('Child class must implement method.')
|
| 97 |
+
|
| 98 |
+
def run_fuzz_target(self, fuzz_target_obj): # pylint: disable=no-self-use
|
| 99 |
+
"""Fuzzes with |fuzz_target_obj| and returns the result."""
|
| 100 |
+
raise NotImplementedError('Child class must implement method.')
|
| 101 |
+
|
| 102 |
+
@property
|
| 103 |
+
def quit_on_bug_found(self):
|
| 104 |
+
"""Property that is checked to determine if fuzzing should quit after first
|
| 105 |
+
bug is found."""
|
| 106 |
+
raise NotImplementedError('Child class must implement method.')
|
| 107 |
+
|
| 108 |
+
def create_fuzz_target_obj(self, target_path, run_seconds):
|
| 109 |
+
"""Returns a fuzz target object."""
|
| 110 |
+
return fuzz_target.FuzzTarget(target_path, run_seconds, self.workspace,
|
| 111 |
+
self.clusterfuzz_deployment, self.config)
|
| 112 |
+
|
| 113 |
+
def run_fuzz_targets(self):
|
| 114 |
+
"""Runs fuzz targets. Returns True if a bug was found."""
|
| 115 |
+
fuzzers_left_to_run = len(self.fuzz_target_paths)
|
| 116 |
+
|
| 117 |
+
# Make a copy since we will mutate it.
|
| 118 |
+
fuzz_seconds = self.config.fuzz_seconds
|
| 119 |
+
|
| 120 |
+
min_seconds_per_fuzzer = fuzz_seconds // fuzzers_left_to_run
|
| 121 |
+
bug_found = False
|
| 122 |
+
for target_path in self.fuzz_target_paths:
|
| 123 |
+
# By doing this, we can ensure that every fuzz target runs for at least
|
| 124 |
+
# min_seconds_per_fuzzer, but that other fuzzers will have longer to run
|
| 125 |
+
# if one ends early.
|
| 126 |
+
run_seconds = max(fuzz_seconds // fuzzers_left_to_run,
|
| 127 |
+
min_seconds_per_fuzzer)
|
| 128 |
+
|
| 129 |
+
target = self.create_fuzz_target_obj(target_path, run_seconds)
|
| 130 |
+
start_time = time.time()
|
| 131 |
+
result = self.run_fuzz_target(target)
|
| 132 |
+
self.cleanup_after_fuzz_target_run(target)
|
| 133 |
+
|
| 134 |
+
# It's OK if this goes negative since we take max when determining
|
| 135 |
+
# run_seconds.
|
| 136 |
+
fuzz_seconds -= time.time() - start_time
|
| 137 |
+
|
| 138 |
+
fuzzers_left_to_run -= 1
|
| 139 |
+
if not result.testcase or not result.stacktrace:
|
| 140 |
+
logging.info('Fuzzer %s finished running without reportable crashes.',
|
| 141 |
+
target.target_name)
|
| 142 |
+
continue
|
| 143 |
+
|
| 144 |
+
bug_found = True
|
| 145 |
+
if self.quit_on_bug_found:
|
| 146 |
+
logging.info('Bug found. Stopping fuzzing.')
|
| 147 |
+
break
|
| 148 |
+
|
| 149 |
+
# pylint: disable=undefined-loop-variable
|
| 150 |
+
if not target_path:
|
| 151 |
+
logging.error('Ran no fuzz targets.')
|
| 152 |
+
elif self.config.output_sarif:
|
| 153 |
+
# TODO(metzman): Handle multiple crashes.
|
| 154 |
+
write_fuzz_result_to_sarif(result, target_path, self.workspace)
|
| 155 |
+
self.clusterfuzz_deployment.upload_crashes()
|
| 156 |
+
return bug_found
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
def write_fuzz_result_to_sarif(fuzz_result, target_path, workspace):
|
| 160 |
+
"""Write results of fuzzing to SARIF."""
|
| 161 |
+
logging.info('Writing sarif results.')
|
| 162 |
+
sarif_utils.write_stacktrace_to_sarif(fuzz_result.stacktrace, target_path,
|
| 163 |
+
workspace)
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
class PruneTargetRunner(BaseFuzzTargetRunner):
|
| 167 |
+
"""Runner that prunes corpora."""
|
| 168 |
+
|
| 169 |
+
@property
|
| 170 |
+
def quit_on_bug_found(self):
|
| 171 |
+
return False
|
| 172 |
+
|
| 173 |
+
def run_fuzz_target(self, fuzz_target_obj):
|
| 174 |
+
"""Prunes with |fuzz_target_obj| and returns the result."""
|
| 175 |
+
result = fuzz_target_obj.prune()
|
| 176 |
+
logging.debug('Corpus path contents: %s.', os.listdir(result.corpus_path))
|
| 177 |
+
self.clusterfuzz_deployment.upload_corpus(fuzz_target_obj.target_name,
|
| 178 |
+
result.corpus_path,
|
| 179 |
+
replace=True)
|
| 180 |
+
return result
|
| 181 |
+
|
| 182 |
+
def cleanup_after_fuzz_target_run(self, fuzz_target_obj): # pylint: disable=no-self-use
|
| 183 |
+
"""Cleans up after pruning with |fuzz_target_obj|."""
|
| 184 |
+
fuzz_target_obj.free_disk_if_needed()
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
NON_FUZZ_TARGETS_FOR_COVERAGE = {
|
| 188 |
+
'llvm-symbolizer',
|
| 189 |
+
'jazzer_agent_deploy.jar',
|
| 190 |
+
'jazzer_driver',
|
| 191 |
+
'jazzer_driver_with_sanitizer',
|
| 192 |
+
}
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
def is_coverage_fuzz_target(file_path):
|
| 196 |
+
"""Returns whether |file_path| is a fuzz target binary for the purposes of a
|
| 197 |
+
coverage report. Inspired by infra/base-images/base-runner/coverage."""
|
| 198 |
+
if not os.path.isfile(file_path):
|
| 199 |
+
return False
|
| 200 |
+
if not utils.is_executable(file_path):
|
| 201 |
+
return False
|
| 202 |
+
filename = os.path.basename(file_path)
|
| 203 |
+
return filename not in NON_FUZZ_TARGETS_FOR_COVERAGE
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
def get_coverage_fuzz_targets(out):
|
| 207 |
+
"""Returns a list of fuzz targets in |out| for coverage."""
|
| 208 |
+
# We only want fuzz targets from the root because during the coverage build,
|
| 209 |
+
# a lot of the image's filesystem is copied into /out for the purpose of
|
| 210 |
+
# generating coverage reports.
|
| 211 |
+
fuzz_targets = []
|
| 212 |
+
for filename in os.listdir(out):
|
| 213 |
+
file_path = os.path.join(out, filename)
|
| 214 |
+
if is_coverage_fuzz_target(file_path):
|
| 215 |
+
fuzz_targets.append(file_path)
|
| 216 |
+
return fuzz_targets
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
class CoverageTargetRunner(BaseFuzzTargetRunner):
|
| 220 |
+
"""Runner that runs the 'coverage' command."""
|
| 221 |
+
|
| 222 |
+
@property
|
| 223 |
+
def quit_on_bug_found(self):
|
| 224 |
+
raise NotImplementedError('Not implemented for CoverageTargetRunner.')
|
| 225 |
+
|
| 226 |
+
def get_fuzz_targets(self):
|
| 227 |
+
"""Returns fuzz targets in out directory."""
|
| 228 |
+
return get_coverage_fuzz_targets(self.workspace.out)
|
| 229 |
+
|
| 230 |
+
def run_fuzz_targets(self):
|
| 231 |
+
"""Generates a coverage report. Always returns False since it never finds
|
| 232 |
+
any bugs."""
|
| 233 |
+
generate_coverage_report.generate_coverage_report(
|
| 234 |
+
self.fuzz_target_paths, self.workspace, self.clusterfuzz_deployment,
|
| 235 |
+
self.config)
|
| 236 |
+
return False
|
| 237 |
+
|
| 238 |
+
def run_fuzz_target(self, fuzz_target_obj): # pylint: disable=no-self-use
|
| 239 |
+
"""Fuzzes with |fuzz_target_obj| and returns the result."""
|
| 240 |
+
raise NotImplementedError('Not implemented for CoverageTargetRunner.')
|
| 241 |
+
|
| 242 |
+
def cleanup_after_fuzz_target_run(self, fuzz_target_obj): # pylint: disable=no-self-use
|
| 243 |
+
"""Cleans up after running |fuzz_target_obj|."""
|
| 244 |
+
raise NotImplementedError('Not implemented for CoverageTargetRunner.')
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
class CiFuzzTargetRunner(BaseFuzzTargetRunner):
|
| 248 |
+
"""Runner for fuzz targets used in CI (patch-fuzzing) context."""
|
| 249 |
+
|
| 250 |
+
@property
|
| 251 |
+
def quit_on_bug_found(self):
|
| 252 |
+
return True
|
| 253 |
+
|
| 254 |
+
def cleanup_after_fuzz_target_run(self, fuzz_target_obj): # pylint: disable=no-self-use
|
| 255 |
+
"""Cleans up after running |fuzz_target_obj|."""
|
| 256 |
+
fuzz_target_obj.free_disk_if_needed()
|
| 257 |
+
|
| 258 |
+
def run_fuzz_target(self, fuzz_target_obj): # pylint: disable=no-self-use
|
| 259 |
+
return fuzz_target_obj.fuzz()
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
class BatchFuzzTargetRunner(BaseFuzzTargetRunner):
|
| 263 |
+
"""Runner for fuzz targets used in batch fuzzing context."""
|
| 264 |
+
|
| 265 |
+
@property
|
| 266 |
+
def quit_on_bug_found(self):
|
| 267 |
+
return False
|
| 268 |
+
|
| 269 |
+
def run_fuzz_target(self, fuzz_target_obj):
|
| 270 |
+
"""Fuzzes with |fuzz_target_obj| and returns the result."""
|
| 271 |
+
result = fuzz_target_obj.fuzz(batch=True)
|
| 272 |
+
logging.debug('Corpus path contents: %s.', os.listdir(result.corpus_path))
|
| 273 |
+
self.clusterfuzz_deployment.upload_corpus(fuzz_target_obj.target_name,
|
| 274 |
+
result.corpus_path)
|
| 275 |
+
return result
|
| 276 |
+
|
| 277 |
+
def cleanup_after_fuzz_target_run(self, fuzz_target_obj):
|
| 278 |
+
"""Cleans up after running |fuzz_target_obj|."""
|
| 279 |
+
# This must be done after we upload the corpus, otherwise it will be deleted
|
| 280 |
+
# before we get a chance to upload it. We can't delete the fuzz target
|
| 281 |
+
# because it is needed when we upload the build.
|
| 282 |
+
fuzz_target_obj.free_disk_if_needed(delete_fuzz_target=False)
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
_MODE_RUNNER_MAPPING = {
|
| 286 |
+
'batch': BatchFuzzTargetRunner,
|
| 287 |
+
'coverage': CoverageTargetRunner,
|
| 288 |
+
'prune': PruneTargetRunner,
|
| 289 |
+
'code-change': CiFuzzTargetRunner,
|
| 290 |
+
}
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
def get_fuzz_target_runner(config):
|
| 294 |
+
"""Returns a fuzz target runner object based on the mode of
|
| 295 |
+
|config|."""
|
| 296 |
+
runner = _MODE_RUNNER_MAPPING[config.mode](config)
|
| 297 |
+
logging.info('run fuzzers MODE is: %s. Runner: %s.', config.mode, runner)
|
| 298 |
+
return runner
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
def run_fuzzers(config): # pylint: disable=too-many-locals
|
| 302 |
+
"""Runs fuzzers for a specific OSS-Fuzz project.
|
| 303 |
+
|
| 304 |
+
Args:
|
| 305 |
+
config: A RunFuzzTargetsConfig.
|
| 306 |
+
|
| 307 |
+
Returns:
|
| 308 |
+
A RunFuzzersResult enum value indicating what happened during fuzzing.
|
| 309 |
+
"""
|
| 310 |
+
fuzz_target_runner = get_fuzz_target_runner(config)
|
| 311 |
+
if not fuzz_target_runner.initialize():
|
| 312 |
+
# We didn't fuzz at all because of internal (CIFuzz) errors. And we didn't
|
| 313 |
+
# find any bugs.
|
| 314 |
+
return RunFuzzersResult.ERROR
|
| 315 |
+
|
| 316 |
+
if not fuzz_target_runner.run_fuzz_targets():
|
| 317 |
+
# We fuzzed successfully, but didn't find any bugs (in the fuzz target).
|
| 318 |
+
return RunFuzzersResult.NO_BUG_FOUND
|
| 319 |
+
|
| 320 |
+
# We fuzzed successfully and found bug(s) in the fuzz targets.
|
| 321 |
+
return RunFuzzersResult.BUG_FOUND
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/constants.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
"""Constants for OSS-Fuzz."""
|
| 17 |
+
|
| 18 |
+
DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH = '.clusterfuzzlite'
|
| 19 |
+
|
| 20 |
+
DEFAULT_LANGUAGE = 'c++'
|
| 21 |
+
DEFAULT_SANITIZER = 'address'
|
| 22 |
+
DEFAULT_ARCHITECTURE = 'x86_64'
|
| 23 |
+
DEFAULT_ENGINE = 'libfuzzer'
|
| 24 |
+
LANGUAGES = [
|
| 25 |
+
'c',
|
| 26 |
+
'c++',
|
| 27 |
+
'go',
|
| 28 |
+
'javascript',
|
| 29 |
+
'jvm',
|
| 30 |
+
'python',
|
| 31 |
+
'rust',
|
| 32 |
+
'swift',
|
| 33 |
+
'ruby',
|
| 34 |
+
]
|
| 35 |
+
LANGUAGES_WITH_COVERAGE_SUPPORT = [
|
| 36 |
+
'c', 'c++', 'go', 'jvm', 'python', 'rust', 'swift', 'javascript', 'ruby'
|
| 37 |
+
]
|
| 38 |
+
SANITIZERS = [
|
| 39 |
+
'address',
|
| 40 |
+
'none',
|
| 41 |
+
'memory',
|
| 42 |
+
'undefined',
|
| 43 |
+
'thread',
|
| 44 |
+
'coverage',
|
| 45 |
+
'introspector',
|
| 46 |
+
'hwaddress',
|
| 47 |
+
]
|
| 48 |
+
ARCHITECTURES = ['i386', 'x86_64', 'aarch64']
|
| 49 |
+
ENGINES = ['libfuzzer', 'afl', 'honggfuzz', 'centipede', 'none', 'wycheproof']
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/Makefile
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
.POSIX:
|
| 2 |
+
CXX = clang++
|
| 3 |
+
CFLAGS = -std=c++17 -Wall -Wextra -O3 -g3 -Werror
|
| 4 |
+
|
| 5 |
+
all: SystemSan target target_file target_dns
|
| 6 |
+
|
| 7 |
+
SystemSan: SystemSan.cpp inspect_dns.cpp inspect_utils.cpp
|
| 8 |
+
$(CXX) $(CFLAGS) -lpthread -o $@ $^
|
| 9 |
+
|
| 10 |
+
# Needs atheris.
|
| 11 |
+
python-test: all
|
| 12 |
+
./SystemSan python shell_injection_poc_fuzzer.py
|
| 13 |
+
|
| 14 |
+
target: target.cpp
|
| 15 |
+
$(CXX) $(CFLAGS) -fsanitize=address,fuzzer -o $@ $^
|
| 16 |
+
|
| 17 |
+
target_file: target_file.cpp
|
| 18 |
+
$(CXX) $(CFLAGS) -fsanitize=address,fuzzer -o $@ $^
|
| 19 |
+
|
| 20 |
+
target_dns: target_dns.cpp
|
| 21 |
+
$(CXX) $(CFLAGS) -fsanitize=address,fuzzer -o $@ $^
|
| 22 |
+
|
| 23 |
+
test: all vuln.dict
|
| 24 |
+
./SystemSan ./target -dict=vuln.dict
|
| 25 |
+
./SystemSan ./target_file -dict=vuln.dict
|
| 26 |
+
./SystemSan ./target_dns -dict=vuln.dict
|
| 27 |
+
|
| 28 |
+
pytorch-lightning-1.5.10:
|
| 29 |
+
cp SystemSan.cpp PoEs/pytorch-lightning-1.5.10/; \
|
| 30 |
+
cd PoEs/pytorch-lightning-1.5.10/; \
|
| 31 |
+
docker build . --tag syssan_pytorch-lightning; \
|
| 32 |
+
docker run -t systemsan_pytorch-lightning:latest;
|
| 33 |
+
|
| 34 |
+
node-shell-quote-v1.7.3:
|
| 35 |
+
cp SystemSan.cpp PoEs/node-shell-quote-v1.7.3/; \
|
| 36 |
+
cd PoEs/node-shell-quote-v1.7.3/; \
|
| 37 |
+
docker build . --tag systemsan_node-shell-quote; \
|
| 38 |
+
docker run -t systemsan_node-shell-quote:latest;
|
| 39 |
+
|
| 40 |
+
clean:
|
| 41 |
+
rm -f SystemSan /tmp/tripwire target target_file target_dns
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/PoEs/node-shell-quote-v1.7.3/build.sh
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Build and install project (using current CFLAGS, CXXFLAGS).
|
| 19 |
+
npm install ./node-shell-quote/
|
| 20 |
+
|
| 21 |
+
# Build fuzzers.
|
| 22 |
+
npm i -g @gitlab-org/jsfuzz
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/PoEs/pytorch-lightning-1.5.10/Makefile
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
.POSIX:
|
| 2 |
+
CXX = clang++
|
| 3 |
+
CFLAGS = -std=c++17 -Wall -Wextra -O3 -g3
|
| 4 |
+
|
| 5 |
+
SystemSan: SystemSan.cpp
|
| 6 |
+
$(CXX) $(CFLAGS) -lpthread -o $@ $^
|
| 7 |
+
|
| 8 |
+
run: clean SystemSan fuzz_pytorch_lightning.py
|
| 9 |
+
./SystemSan ./fuzz_pytorch_lightning.py -dict=vuln.dict
|
| 10 |
+
|
| 11 |
+
clean:
|
| 12 |
+
rm -f SystemSan /tmp/tripwire
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/README.md
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# System Sanitizers
|
| 2 |
+
|
| 3 |
+
We use `ptrace` to instrument system calls made by the target program to detect
|
| 4 |
+
various vulnerabilities.
|
| 5 |
+
|
| 6 |
+
## Command injection
|
| 7 |
+
|
| 8 |
+
This detector currently works by
|
| 9 |
+
|
| 10 |
+
- Checking if `execve` is called with `/tmp/tripwire` (which comes from our dictionary).
|
| 11 |
+
- Checking if `execve` is invoking a shell with invalid syntax. This is likely
|
| 12 |
+
caused by our input.
|
| 13 |
+
|
| 14 |
+
## Arbitrary file open
|
| 15 |
+
|
| 16 |
+
TODO: documentation.
|
| 17 |
+
|
| 18 |
+
## Proof of concept
|
| 19 |
+
|
| 20 |
+
### Cleanup
|
| 21 |
+
Note this will delete /tmp/tripwire if it exists.
|
| 22 |
+
```shell
|
| 23 |
+
make clean
|
| 24 |
+
```
|
| 25 |
+
|
| 26 |
+
### Run test
|
| 27 |
+
Note this will overwrite /tmp/tripwire if it exists.
|
| 28 |
+
```shell
|
| 29 |
+
make test
|
| 30 |
+
```
|
| 31 |
+
|
| 32 |
+
Look for one of the following lines:
|
| 33 |
+
|
| 34 |
+
> ===BUG DETECTED: Shell injection===
|
| 35 |
+
|
| 36 |
+
which indicates the detection of executing the planted `/tmp/tripwire`.
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
> ===BUG DETECTED: Shell corruption===
|
| 40 |
+
|
| 41 |
+
which indicates the detection of executing a syntactic erroneous command.
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
### Command injection PoC in Python with `pytorch-lightning`
|
| 45 |
+
With `SystemSan`, [`Artheris`](https://github.com/google/atheris) can detect a shell injection bug in [version v1.5.10 of `pytorch-lightning`](https://github.com/PyTorchLightning/pytorch-lightning/tree/1.5.0).
|
| 46 |
+
```shell
|
| 47 |
+
make pytorch-lightning-1.5.10
|
| 48 |
+
```
|
| 49 |
+
|
| 50 |
+
### Command injection PoC in JavaScript with `shell-quote`
|
| 51 |
+
With `SystemSan`, [`Jsfuzz`](https://gitlab.com/gitlab-org/security-products/analyzers/fuzzers/jsfuzz) can detect a shell corrpution bug in [the latest version (v1.7.3) of `shell-quote`](https://github.com/substack/node-shell-quote) without any seed.
|
| 52 |
+
```shell
|
| 53 |
+
make node-shell-quote-v1.7.3
|
| 54 |
+
```
|
| 55 |
+
This is based on [a shell injection exploit report](https://wh0.github.io/2021/10/28/shell-quote-rce-exploiting.html) of [version v1.7.2 of `shell-quote`](https://github.com/substack/node-shell-quote/tree/v1.7.2).
|
| 56 |
+
`SystemSan` can also discover the same shell injection bug with a corpus file containing:
|
| 57 |
+
```
|
| 58 |
+
`:`/tmp/tripwire``:`
|
| 59 |
+
```
|
| 60 |
+
|
| 61 |
+
## Trophies
|
| 62 |
+
|
| 63 |
+
- <https://github.com/syoyo/tinygltf/issues/368>
|
| 64 |
+
- <https://github.com/substack/node-shell-quote/issues/54>
|
| 65 |
+
|
| 66 |
+
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/SystemSan.cpp
ADDED
|
@@ -0,0 +1,493 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/*
|
| 2 |
+
* Copyright 2022 Google LLC
|
| 3 |
+
|
| 4 |
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
* you may not use this file except in compliance with the License.
|
| 6 |
+
* You may obtain a copy of the License at
|
| 7 |
+
|
| 8 |
+
* http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
|
| 10 |
+
* Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
* See the License for the specific language governing permissions and
|
| 14 |
+
* limitations under the License.
|
| 15 |
+
*/
|
| 16 |
+
/* A detector that uses ptrace to identify shell injection vulnerabilities. */
|
| 17 |
+
|
| 18 |
+
/* C standard library */
|
| 19 |
+
#include <errno.h>
|
| 20 |
+
#include <signal.h>
|
| 21 |
+
#include <stdio.h>
|
| 22 |
+
#include <string.h>
|
| 23 |
+
|
| 24 |
+
/* POSIX */
|
| 25 |
+
#include <sys/stat.h>
|
| 26 |
+
#include <sys/user.h>
|
| 27 |
+
#include <sys/wait.h>
|
| 28 |
+
#include <unistd.h>
|
| 29 |
+
|
| 30 |
+
/* Linux */
|
| 31 |
+
#include <sys/ptrace.h>
|
| 32 |
+
#include <syscall.h>
|
| 33 |
+
#include <fcntl.h>
|
| 34 |
+
|
| 35 |
+
#include <fstream>
|
| 36 |
+
#include <iostream>
|
| 37 |
+
#include <map>
|
| 38 |
+
#include <set>
|
| 39 |
+
#include <sstream>
|
| 40 |
+
#include <string>
|
| 41 |
+
#include <vector>
|
| 42 |
+
|
| 43 |
+
#include "inspect_utils.h"
|
| 44 |
+
#include "inspect_dns.h"
|
| 45 |
+
|
| 46 |
+
#define DEBUG_LOGS 0
|
| 47 |
+
|
| 48 |
+
#if DEBUG_LOGS
|
| 49 |
+
#define debug_log(...) \
|
| 50 |
+
do { \
|
| 51 |
+
fprintf(stderr, __VA_ARGS__); \
|
| 52 |
+
fflush(stdout); \
|
| 53 |
+
fputc('\n', stderr); \
|
| 54 |
+
} while (0)
|
| 55 |
+
#else
|
| 56 |
+
#define debug_log(...)
|
| 57 |
+
#endif
|
| 58 |
+
|
| 59 |
+
#define fatal_log(...) \
|
| 60 |
+
do { \
|
| 61 |
+
fprintf(stderr, __VA_ARGS__); \
|
| 62 |
+
fputc('\n', stderr); \
|
| 63 |
+
exit(EXIT_FAILURE); \
|
| 64 |
+
} while (0)
|
| 65 |
+
|
| 66 |
+
// The magic string that we'll use to detect full control over the command
|
| 67 |
+
// executed.
|
| 68 |
+
const std::string kTripWire = "/tmp/tripwire";
|
| 69 |
+
// Shell injection bug confirmed with /tmp/tripwire.
|
| 70 |
+
const std::string kInjectionError = "Shell injection";
|
| 71 |
+
// Shell corruption bug detected based on syntax error.
|
| 72 |
+
const std::string kCorruptionError = "Shell corruption";
|
| 73 |
+
// The magic string that we'll use to detect arbitrary file open
|
| 74 |
+
const std::string kFzAbsoluteDirectory = "/fz/";
|
| 75 |
+
// Arbitrary file open in /fz/
|
| 76 |
+
const std::string kArbitraryFileOpenError = "Arbitrary file open";
|
| 77 |
+
// Assuming only shorter (than this constant) top dir are legitly used.
|
| 78 |
+
constexpr int kRootDirMaxLength = 16;
|
| 79 |
+
|
| 80 |
+
// The PID of the root process we're fuzzing.
|
| 81 |
+
pid_t g_root_pid;
|
| 82 |
+
|
| 83 |
+
// Map of a PID/TID its PID/TID creator and wether it ran exec.
|
| 84 |
+
std::map<pid_t, ThreadParent> root_pids;
|
| 85 |
+
|
| 86 |
+
// Assuming the longest pathname is "/bin/bash".
|
| 87 |
+
constexpr int kShellPathnameLength = 20;
|
| 88 |
+
|
| 89 |
+
// Syntax error messages of each shell.
|
| 90 |
+
const std::map<std::string, std::set<std::string>> kShellSyntaxErrors = {
|
| 91 |
+
{"bash",
|
| 92 |
+
{
|
| 93 |
+
": command not found", // General
|
| 94 |
+
": syntax error", // Unfinished " or ' or ` or if, leading | or ;
|
| 95 |
+
": missing `]'", // Unfinished [
|
| 96 |
+
": event not found", // ! leads large numbers
|
| 97 |
+
": No such file or directory", // Leading < or /
|
| 98 |
+
}},
|
| 99 |
+
{"csh",
|
| 100 |
+
{
|
| 101 |
+
": Command not found.", // General
|
| 102 |
+
": Missing }.", // Unfinished {
|
| 103 |
+
"Too many ('s.", // Unfinished (
|
| 104 |
+
"Invalid null command.", // Leading | or < or >
|
| 105 |
+
"Missing name for redirect.", // Single < or >
|
| 106 |
+
": No match.", // Leading ? or [ or *
|
| 107 |
+
"Modifier failed.", // Leading ^
|
| 108 |
+
"No previous left hand side.", // A ^
|
| 109 |
+
": No such job.", // Leading %
|
| 110 |
+
": No current job.", // A %
|
| 111 |
+
": Undefined variable.", // Containing $
|
| 112 |
+
": Event not found.", // ! leads large numbers
|
| 113 |
+
// TODO: Make this more specific.
|
| 114 |
+
"Unmatched", // Unfinished " or ' or `, leading ;
|
| 115 |
+
}},
|
| 116 |
+
{"dash",
|
| 117 |
+
{
|
| 118 |
+
"not found", // General
|
| 119 |
+
"Syntax error", // Unfinished " or ' or ` or if, leading | or ; or &
|
| 120 |
+
"missing ]", // Unfinished [
|
| 121 |
+
"No such file", // Leading <
|
| 122 |
+
}},
|
| 123 |
+
{"zsh",
|
| 124 |
+
{
|
| 125 |
+
": command not found", // General
|
| 126 |
+
": syntax error", // Unfinished " or ' or `
|
| 127 |
+
": ']' expected", // Unfinished [
|
| 128 |
+
": no such file or directory", // Leading < or /
|
| 129 |
+
": parse error near", // Leading |, or &
|
| 130 |
+
": no such user or named directory", // Leading ~
|
| 131 |
+
}},
|
| 132 |
+
};
|
| 133 |
+
|
| 134 |
+
// Shells used by Processes.
|
| 135 |
+
std::map<pid_t, std::string> g_shell_pids;
|
| 136 |
+
|
| 137 |
+
struct Tracee {
|
| 138 |
+
pid_t pid;
|
| 139 |
+
bool syscall_enter = true;
|
| 140 |
+
|
| 141 |
+
Tracee(pid_t pid) : pid(pid) {}
|
| 142 |
+
};
|
| 143 |
+
|
| 144 |
+
pid_t run_child(char **argv) {
|
| 145 |
+
// Run the program under test with its args as a child process
|
| 146 |
+
pid_t pid = fork();
|
| 147 |
+
switch (pid) {
|
| 148 |
+
case -1:
|
| 149 |
+
fatal_log("Fork failed: %s", strerror(errno));
|
| 150 |
+
case 0:
|
| 151 |
+
raise(SIGSTOP);
|
| 152 |
+
execvp(argv[0], argv);
|
| 153 |
+
fatal_log("execvp: %s", strerror(errno));
|
| 154 |
+
}
|
| 155 |
+
return pid;
|
| 156 |
+
}
|
| 157 |
+
|
| 158 |
+
// Construct a string with the memory specified in a register.
|
| 159 |
+
std::string read_string(pid_t pid, unsigned long reg, unsigned long length) {
|
| 160 |
+
auto memory = read_memory(pid, reg, length);
|
| 161 |
+
if (!memory.size()) {
|
| 162 |
+
return "";
|
| 163 |
+
}
|
| 164 |
+
|
| 165 |
+
std::string content(reinterpret_cast<char *>(memory.data()),
|
| 166 |
+
std::min(memory.size(), length));
|
| 167 |
+
return content;
|
| 168 |
+
}
|
| 169 |
+
|
| 170 |
+
void inspect_for_injection(pid_t pid, const user_regs_struct ®s) {
|
| 171 |
+
// Inspect a PID's registers for the sign of shell injection.
|
| 172 |
+
std::string path = read_string(pid, regs.rdi, kTripWire.length());
|
| 173 |
+
if (!path.length()) {
|
| 174 |
+
return;
|
| 175 |
+
}
|
| 176 |
+
debug_log("inspecting");
|
| 177 |
+
if (path == kTripWire) {
|
| 178 |
+
report_bug(kInjectionError, pid);
|
| 179 |
+
}
|
| 180 |
+
}
|
| 181 |
+
|
| 182 |
+
std::string get_pathname(pid_t pid, const user_regs_struct ®s) {
|
| 183 |
+
// Parse the pathname from the memory specified in the RDI register.
|
| 184 |
+
std::string pathname = read_string(pid, regs.rdi, kShellPathnameLength);
|
| 185 |
+
debug_log("Pathname is %s (len %lu)\n", pathname.c_str(), pathname.length());
|
| 186 |
+
return pathname;
|
| 187 |
+
}
|
| 188 |
+
|
| 189 |
+
std::string match_shell(std::string binary_pathname);
|
| 190 |
+
|
| 191 |
+
// Identify the exact shell behind sh
|
| 192 |
+
std::string identify_sh(std::string path) {
|
| 193 |
+
char shell_pathname[kShellPathnameLength];
|
| 194 |
+
auto written = readlink(path.c_str(), shell_pathname, kShellPathnameLength - 1);
|
| 195 |
+
if (written == -1) {
|
| 196 |
+
std::cerr << "Cannot query which shell is behind sh: readlink failed on "
|
| 197 |
+
<< path << ": "
|
| 198 |
+
<< strerror(errno) << "\n";
|
| 199 |
+
std::cerr << "Assuming the shell is dash\n";
|
| 200 |
+
return "dash";
|
| 201 |
+
}
|
| 202 |
+
shell_pathname[written] = '\0';
|
| 203 |
+
|
| 204 |
+
debug_log("sh links to %s\n", shell_pathname);
|
| 205 |
+
std::string shell_pathname_str(shell_pathname);
|
| 206 |
+
|
| 207 |
+
return match_shell(shell_pathname_str);
|
| 208 |
+
}
|
| 209 |
+
|
| 210 |
+
std::string match_shell(std::string binary_pathname) {
|
| 211 |
+
// Identify the name of the shell used in the pathname.
|
| 212 |
+
if (!binary_pathname.length()) {
|
| 213 |
+
return "";
|
| 214 |
+
}
|
| 215 |
+
|
| 216 |
+
// We use c_str() to accept only the null terminated string.
|
| 217 |
+
std::string binary_name = binary_pathname.substr(
|
| 218 |
+
binary_pathname.find_last_of("/") + 1).c_str();
|
| 219 |
+
|
| 220 |
+
debug_log("Binary is %s (%lu)\n", binary_name.c_str(),
|
| 221 |
+
binary_name.length());
|
| 222 |
+
|
| 223 |
+
for (const auto &item : kShellSyntaxErrors) {
|
| 224 |
+
std::string known_shell = item.first;
|
| 225 |
+
if (binary_name == "sh") {
|
| 226 |
+
debug_log("Matched sh: Needs to identify which specific shell it is.\n");
|
| 227 |
+
return identify_sh(binary_pathname);
|
| 228 |
+
}
|
| 229 |
+
if (binary_name == known_shell) {
|
| 230 |
+
debug_log("Matched %s\n", binary_name.c_str());
|
| 231 |
+
return known_shell;
|
| 232 |
+
}
|
| 233 |
+
}
|
| 234 |
+
return "";
|
| 235 |
+
}
|
| 236 |
+
|
| 237 |
+
std::string get_shell(pid_t pid, const user_regs_struct ®s) {
|
| 238 |
+
// Get shell name used in a process.
|
| 239 |
+
std::string binary_pathname = get_pathname(pid, regs);
|
| 240 |
+
return match_shell(binary_pathname);
|
| 241 |
+
}
|
| 242 |
+
|
| 243 |
+
void match_error_pattern(std::string buffer, std::string shell, pid_t pid) {
|
| 244 |
+
auto error_patterns = kShellSyntaxErrors.at(shell);
|
| 245 |
+
for (const auto &pattern : error_patterns) {
|
| 246 |
+
if (buffer.find(pattern) != std::string::npos) {
|
| 247 |
+
std::cerr << "--- Found a sign of shell corruption ---\n"
|
| 248 |
+
<< buffer.c_str()
|
| 249 |
+
<< "\n----------------------------------------\n";
|
| 250 |
+
// If a shell corruption error happens, kill its parent.
|
| 251 |
+
auto parent = root_pids[pid];
|
| 252 |
+
while (!parent.ran_exec) {
|
| 253 |
+
if (parent.parent_tid == g_root_pid) {
|
| 254 |
+
break;
|
| 255 |
+
}
|
| 256 |
+
parent = root_pids[parent.parent_tid];
|
| 257 |
+
}
|
| 258 |
+
report_bug(kCorruptionError, parent.parent_tid);
|
| 259 |
+
}
|
| 260 |
+
}
|
| 261 |
+
}
|
| 262 |
+
|
| 263 |
+
void inspect_for_corruption(pid_t pid, const user_regs_struct ®s) {
|
| 264 |
+
// Inspect a PID's registers for shell corruption.
|
| 265 |
+
std::string buffer = read_string(pid, regs.rsi, regs.rdx);
|
| 266 |
+
debug_log("Write buffer: %s\n", buffer.c_str());
|
| 267 |
+
match_error_pattern(buffer, g_shell_pids[pid], pid);
|
| 268 |
+
}
|
| 269 |
+
|
| 270 |
+
void log_file_open(std::string path, int flags, pid_t pid) {
|
| 271 |
+
report_bug(kArbitraryFileOpenError, pid);
|
| 272 |
+
std::cerr << "===File opened: " << path.c_str() << ", flags = " << flags << ",";
|
| 273 |
+
switch (flags & 3) {
|
| 274 |
+
case O_RDONLY:
|
| 275 |
+
std::cerr << "O_RDONLY";
|
| 276 |
+
break;
|
| 277 |
+
case O_WRONLY:
|
| 278 |
+
std::cerr << "O_WRONLY";
|
| 279 |
+
break;
|
| 280 |
+
case O_RDWR:
|
| 281 |
+
std::cerr << "O_RDWR";
|
| 282 |
+
break;
|
| 283 |
+
default:
|
| 284 |
+
std::cerr << "unknown";
|
| 285 |
+
}
|
| 286 |
+
std::cerr << "===\n";
|
| 287 |
+
}
|
| 288 |
+
|
| 289 |
+
bool has_unprintable(const std::string &value) {
|
| 290 |
+
for (size_t i = 0; i < value.length(); i++) {
|
| 291 |
+
if (value[i] & 0x80) {
|
| 292 |
+
return true;
|
| 293 |
+
}
|
| 294 |
+
}
|
| 295 |
+
return false;
|
| 296 |
+
}
|
| 297 |
+
|
| 298 |
+
void inspect_for_arbitrary_file_open(pid_t pid, const user_regs_struct ®s) {
|
| 299 |
+
// Inspect a PID's register for the sign of arbitrary file open.
|
| 300 |
+
std::string path = read_string(pid, regs.rsi, kRootDirMaxLength);
|
| 301 |
+
if (!path.length()) {
|
| 302 |
+
return;
|
| 303 |
+
}
|
| 304 |
+
if (path.substr(0, kFzAbsoluteDirectory.length()) == kFzAbsoluteDirectory) {
|
| 305 |
+
log_file_open(path, regs.rdx, pid);
|
| 306 |
+
return;
|
| 307 |
+
}
|
| 308 |
+
if (path[0] == '/' && path.length() > 1) {
|
| 309 |
+
std::string path_absolute_topdir = path;
|
| 310 |
+
size_t root_dir_end = path.find('/', 1);
|
| 311 |
+
if (root_dir_end != std::string::npos) {
|
| 312 |
+
path_absolute_topdir = path.substr(0, root_dir_end);
|
| 313 |
+
}
|
| 314 |
+
if (has_unprintable(path_absolute_topdir)) {
|
| 315 |
+
struct stat dirstat;
|
| 316 |
+
if (stat(path_absolute_topdir.c_str(), &dirstat) != 0) {
|
| 317 |
+
log_file_open(path, regs.rdx, pid);
|
| 318 |
+
}
|
| 319 |
+
}
|
| 320 |
+
}
|
| 321 |
+
}
|
| 322 |
+
|
| 323 |
+
int trace(std::map<pid_t, Tracee> pids) {
|
| 324 |
+
unsigned long exit_status = 0;
|
| 325 |
+
while (!pids.empty()) {
|
| 326 |
+
std::vector<pid_t> new_pids;
|
| 327 |
+
|
| 328 |
+
auto it = pids.begin();
|
| 329 |
+
|
| 330 |
+
while (it != pids.end()) {
|
| 331 |
+
auto pid = it->first;
|
| 332 |
+
auto &tracee = it->second;
|
| 333 |
+
int status = 0;
|
| 334 |
+
|
| 335 |
+
int result = waitpid(pid, &status, __WALL | WNOHANG);
|
| 336 |
+
if (result == -1) {
|
| 337 |
+
it = pids.erase(it);
|
| 338 |
+
continue;
|
| 339 |
+
}
|
| 340 |
+
|
| 341 |
+
if (result == 0) {
|
| 342 |
+
// Nothing to report yet.
|
| 343 |
+
++it;
|
| 344 |
+
continue;
|
| 345 |
+
}
|
| 346 |
+
|
| 347 |
+
if (WIFEXITED(status) || WIFSIGNALED(status)) {
|
| 348 |
+
debug_log("%d exited", pid);
|
| 349 |
+
it = pids.erase(it);
|
| 350 |
+
// Remove pid from the watchlist when it exits
|
| 351 |
+
g_shell_pids.erase(pid);
|
| 352 |
+
root_pids.erase(pid);
|
| 353 |
+
continue;
|
| 354 |
+
}
|
| 355 |
+
|
| 356 |
+
// ptrace sets 0x80 for syscalls (with PTRACE_O_TRACESYSGOOD set).
|
| 357 |
+
bool is_syscall =
|
| 358 |
+
WIFSTOPPED(status) && WSTOPSIG(status) == (SIGTRAP | 0x80);
|
| 359 |
+
int sig = 0;
|
| 360 |
+
if (!is_syscall) {
|
| 361 |
+
// Handle generic signal.
|
| 362 |
+
siginfo_t siginfo;
|
| 363 |
+
if (ptrace(PTRACE_GETSIGINFO, pid, nullptr, &siginfo) == -1) {
|
| 364 |
+
debug_log("ptrace(PTRACE_GETSIGINFO, %d): %s", pid, strerror(errno));
|
| 365 |
+
continue;
|
| 366 |
+
}
|
| 367 |
+
sig = siginfo.si_signo;
|
| 368 |
+
debug_log("forwarding signal %d to %d", sig, pid);
|
| 369 |
+
}
|
| 370 |
+
|
| 371 |
+
if ((status >> 8 == (SIGTRAP | (PTRACE_EVENT_EXIT << 8)))) {
|
| 372 |
+
debug_log("%d exiting", pid);
|
| 373 |
+
if (pid == g_root_pid) {
|
| 374 |
+
if (ptrace(PTRACE_GETEVENTMSG, pid, 0, &exit_status) == -1) {
|
| 375 |
+
debug_log("ptrace(PTRACE_GETEVENTMSG, %d): %s", pid, strerror(errno));
|
| 376 |
+
}
|
| 377 |
+
debug_log("got exit status from root process: %lu", exit_status);
|
| 378 |
+
}
|
| 379 |
+
|
| 380 |
+
if (ptrace(PTRACE_DETACH, pid, 0, 0) == -1) {
|
| 381 |
+
debug_log("ptrace(PTRACE_DETACH, %d): %s", pid, strerror(errno));
|
| 382 |
+
}
|
| 383 |
+
continue;
|
| 384 |
+
}
|
| 385 |
+
|
| 386 |
+
if (WIFSTOPPED(status) &&
|
| 387 |
+
(status >> 8 == (SIGTRAP | (PTRACE_EVENT_CLONE << 8)) ||
|
| 388 |
+
status >> 8 == (SIGTRAP | (PTRACE_EVENT_FORK << 8)) ||
|
| 389 |
+
status >> 8 == (SIGTRAP | (PTRACE_EVENT_VFORK << 8)))) {
|
| 390 |
+
long new_pid;
|
| 391 |
+
if (ptrace(PTRACE_GETEVENTMSG, pid, 0, &new_pid) == -1) {
|
| 392 |
+
debug_log("ptrace(PTRACE_GETEVENTMSG, %d): %s", pid, strerror(errno));
|
| 393 |
+
continue;
|
| 394 |
+
}
|
| 395 |
+
debug_log("forked %ld", new_pid);
|
| 396 |
+
new_pids.push_back(new_pid);
|
| 397 |
+
root_pids.emplace(new_pid, ThreadParent(pid));
|
| 398 |
+
}
|
| 399 |
+
|
| 400 |
+
if (is_syscall) {
|
| 401 |
+
user_regs_struct regs;
|
| 402 |
+
if (ptrace(PTRACE_GETREGS, pid, 0, ®s) == -1) {
|
| 403 |
+
debug_log("ptrace(PTRACE_GETREGS, %d): %s", pid, strerror(errno));
|
| 404 |
+
continue;
|
| 405 |
+
}
|
| 406 |
+
|
| 407 |
+
if (tracee.syscall_enter) {
|
| 408 |
+
if (regs.orig_rax == __NR_execve) {
|
| 409 |
+
// This is a new process.
|
| 410 |
+
auto parent = root_pids[pid];
|
| 411 |
+
parent.ran_exec = true;
|
| 412 |
+
root_pids[pid] = parent;
|
| 413 |
+
inspect_for_injection(pid, regs);
|
| 414 |
+
std::string shell = get_shell(pid, regs);
|
| 415 |
+
if (shell != "") {
|
| 416 |
+
debug_log("Shell parsed: %s", shell.c_str());
|
| 417 |
+
g_shell_pids.insert(std::make_pair(pid, shell));
|
| 418 |
+
}
|
| 419 |
+
}
|
| 420 |
+
|
| 421 |
+
inspect_dns_syscalls(pid, regs);
|
| 422 |
+
|
| 423 |
+
if (regs.orig_rax == __NR_openat) {
|
| 424 |
+
// TODO(metzman): Re-enable this once we have config/flag support.
|
| 425 |
+
// inspect_for_arbitrary_file_open(pid, regs);
|
| 426 |
+
}
|
| 427 |
+
|
| 428 |
+
if (regs.orig_rax == __NR_write &&
|
| 429 |
+
g_shell_pids.find(pid) != g_shell_pids.end()) {
|
| 430 |
+
debug_log("Inspecting the `write` buffer of shell process %d.",
|
| 431 |
+
pid);
|
| 432 |
+
inspect_for_corruption(pid, regs);
|
| 433 |
+
}
|
| 434 |
+
}
|
| 435 |
+
|
| 436 |
+
// TODO: Check for commands with invalid syntax passed to /bin/sh and
|
| 437 |
+
// other shells.
|
| 438 |
+
// TODO: It's possible the process we're fuzzing can communicate with
|
| 439 |
+
// another process to execute code. Our check wouldn't catch this
|
| 440 |
+
// currently.
|
| 441 |
+
tracee.syscall_enter = !tracee.syscall_enter;
|
| 442 |
+
}
|
| 443 |
+
|
| 444 |
+
if (ptrace(PTRACE_SYSCALL, pid, nullptr, sig) == -1) {
|
| 445 |
+
debug_log("ptrace(PTRACE_SYSCALL, %d): %s", pid, strerror(errno));
|
| 446 |
+
continue;
|
| 447 |
+
}
|
| 448 |
+
|
| 449 |
+
++it;
|
| 450 |
+
}
|
| 451 |
+
|
| 452 |
+
for (const auto &pid : new_pids) {
|
| 453 |
+
pids.emplace(pid, Tracee(pid));
|
| 454 |
+
}
|
| 455 |
+
}
|
| 456 |
+
return static_cast<int>(exit_status >> 8);
|
| 457 |
+
}
|
| 458 |
+
|
| 459 |
+
int main(int argc, char **argv) {
|
| 460 |
+
if (argc <= 1) {
|
| 461 |
+
fatal_log("Expecting at least one arguments, received %d", argc - 1);
|
| 462 |
+
}
|
| 463 |
+
|
| 464 |
+
// Create an executable tripwire file, as programs may check for existence
|
| 465 |
+
// before actually calling exec.
|
| 466 |
+
std::ofstream tripwire(kTripWire);
|
| 467 |
+
tripwire.close();
|
| 468 |
+
chmod(kTripWire.c_str(), 0755);
|
| 469 |
+
|
| 470 |
+
pid_t pid = run_child(argv + 1);
|
| 471 |
+
|
| 472 |
+
long options = PTRACE_O_TRACESYSGOOD | PTRACE_O_TRACEFORK |
|
| 473 |
+
PTRACE_O_TRACEVFORK | PTRACE_O_TRACECLONE |
|
| 474 |
+
PTRACE_O_TRACEEXIT;
|
| 475 |
+
|
| 476 |
+
if (ptrace(PTRACE_SEIZE, pid, nullptr, options) == -1) {
|
| 477 |
+
fatal_log("ptrace(PTRACE_SEIZE): %s", strerror(errno));
|
| 478 |
+
}
|
| 479 |
+
|
| 480 |
+
if (waitpid(pid, nullptr, __WALL) == -1) {
|
| 481 |
+
fatal_log("waitpid: %s", strerror(errno));
|
| 482 |
+
}
|
| 483 |
+
|
| 484 |
+
if (ptrace(PTRACE_SYSCALL, pid, 0, 0) == -1) {
|
| 485 |
+
fatal_log("ptrace(PTRACE_SYSCALL): %s", strerror(errno));
|
| 486 |
+
}
|
| 487 |
+
|
| 488 |
+
g_root_pid = pid;
|
| 489 |
+
std::map<pid_t, Tracee> pids;
|
| 490 |
+
pids.emplace(pid, Tracee(pid));
|
| 491 |
+
root_pids.emplace(pid, ThreadParent(pid));
|
| 492 |
+
return trace(pids);
|
| 493 |
+
}
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/inspect_dns.cpp
ADDED
|
@@ -0,0 +1,236 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/*
|
| 2 |
+
* Copyright 2022 Google LLC
|
| 3 |
+
|
| 4 |
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
* you may not use this file except in compliance with the License.
|
| 6 |
+
* You may obtain a copy of the License at
|
| 7 |
+
|
| 8 |
+
* http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
|
| 10 |
+
* Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
* See the License for the specific language governing permissions and
|
| 14 |
+
* limitations under the License.
|
| 15 |
+
*/
|
| 16 |
+
/* A detector that uses ptrace to identify shell injection vulnerabilities. */
|
| 17 |
+
|
| 18 |
+
/* POSIX */
|
| 19 |
+
#include <sys/user.h>
|
| 20 |
+
#include <unistd.h>
|
| 21 |
+
|
| 22 |
+
/* Linux */
|
| 23 |
+
#include <arpa/inet.h>
|
| 24 |
+
#include <syscall.h>
|
| 25 |
+
#include <sys/ptrace.h>
|
| 26 |
+
|
| 27 |
+
#include <iostream>
|
| 28 |
+
|
| 29 |
+
#include "inspect_utils.h"
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
// Arbitrary domain name resolution.
|
| 33 |
+
const std::string kArbitraryDomainNameResolution = "Arbitrary domain name resolution";
|
| 34 |
+
|
| 35 |
+
// Global constant for one file descriptor about of a DNS socket
|
| 36 |
+
int kFdDns = 0;
|
| 37 |
+
const size_t kDnsHeaderLen = 12;
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
void inspect_for_arbitrary_dns_connect(pid_t pid, const user_regs_struct ®s) {
|
| 41 |
+
auto memory = read_memory(pid, regs.rsi, sizeof(struct sockaddr_in));
|
| 42 |
+
if (memory.size()) {
|
| 43 |
+
struct sockaddr_in * sa = reinterpret_cast<struct sockaddr_in *>(memory.data());
|
| 44 |
+
if (sa->sin_family == AF_INET && htons(sa->sin_port) == 53) {
|
| 45 |
+
// save file descriptor for later sendmmsg
|
| 46 |
+
kFdDns = regs.rdi;
|
| 47 |
+
}
|
| 48 |
+
}
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
struct DnsHeader {
|
| 52 |
+
uint16_t tx_id;
|
| 53 |
+
uint16_t flags;
|
| 54 |
+
uint16_t questions;
|
| 55 |
+
uint16_t answers;
|
| 56 |
+
uint16_t nameservers;
|
| 57 |
+
uint16_t additional;
|
| 58 |
+
};
|
| 59 |
+
|
| 60 |
+
struct DnsHeader parse_dns_header(std::vector<std::byte> data) {
|
| 61 |
+
struct DnsHeader h;
|
| 62 |
+
h.tx_id = (((uint16_t) data[0]) << 8) | ((uint16_t) data[1]);
|
| 63 |
+
h.flags = (((uint16_t) data[2]) << 8) | ((uint16_t) data[3]);
|
| 64 |
+
h.questions = (((uint16_t) data[4]) << 8) | ((uint16_t) data[5]);
|
| 65 |
+
h.answers = (((uint16_t) data[6]) << 8) | ((uint16_t) data[7]);
|
| 66 |
+
h.nameservers = (((uint16_t) data[8]) << 8) | ((uint16_t) data[9]);
|
| 67 |
+
h.additional = (((uint16_t) data[10]) << 8) | ((uint16_t) data[11]);
|
| 68 |
+
return h;
|
| 69 |
+
}
|
| 70 |
+
|
| 71 |
+
bool dns_flags_standard_query(uint16_t flags) {
|
| 72 |
+
if ((flags & 0x8000) == 0) {
|
| 73 |
+
// Query, not response.
|
| 74 |
+
if (((flags & 0x7800) >> 11) == 0) {
|
| 75 |
+
// Opcode 0 is standard query.
|
| 76 |
+
if ((flags & 0x0200) == 0) {
|
| 77 |
+
// Message is not truncated.
|
| 78 |
+
if ((flags & 0x0040) == 0) {
|
| 79 |
+
// Z-bit reserved flag is unset.
|
| 80 |
+
return true;
|
| 81 |
+
}
|
| 82 |
+
}
|
| 83 |
+
}
|
| 84 |
+
}
|
| 85 |
+
return false;
|
| 86 |
+
}
|
| 87 |
+
|
| 88 |
+
struct DnsRequest {
|
| 89 |
+
// Start of name in the byte vector.
|
| 90 |
+
size_t offset;
|
| 91 |
+
// End of name in the byte vector.
|
| 92 |
+
size_t end;
|
| 93 |
+
// Length of top level domain.
|
| 94 |
+
uint8_t tld_size;
|
| 95 |
+
// Number of levels/dots in domain name.
|
| 96 |
+
size_t nb_levels;
|
| 97 |
+
// DNS type like A is 1.
|
| 98 |
+
uint16_t dns_type;
|
| 99 |
+
// DNS class like IN is 1.
|
| 100 |
+
uint16_t dns_class;
|
| 101 |
+
};
|
| 102 |
+
|
| 103 |
+
struct DnsRequest parse_dns_request(std::vector<std::byte> data, size_t offset) {
|
| 104 |
+
struct DnsRequest r;
|
| 105 |
+
r.offset = offset;
|
| 106 |
+
r.tld_size = 0;
|
| 107 |
+
r.nb_levels = 0;
|
| 108 |
+
while(offset < data.size()) {
|
| 109 |
+
uint8_t rlen = uint8_t(data[offset]);
|
| 110 |
+
if (rlen == 0) {
|
| 111 |
+
offset++;
|
| 112 |
+
break;
|
| 113 |
+
}
|
| 114 |
+
r.nb_levels++;
|
| 115 |
+
offset += rlen+1;
|
| 116 |
+
r.tld_size = rlen;
|
| 117 |
+
}
|
| 118 |
+
if (offset <= 4 + data.size()) {
|
| 119 |
+
r.end = offset;
|
| 120 |
+
r.dns_type = (((uint16_t) data[offset]) << 8) | ((uint16_t) data[offset+1]);
|
| 121 |
+
r.dns_class = (((uint16_t) data[offset+2]) << 8) | ((uint16_t) data[offset+3]);
|
| 122 |
+
} else {
|
| 123 |
+
r.end = data.size();
|
| 124 |
+
}
|
| 125 |
+
return r;
|
| 126 |
+
}
|
| 127 |
+
|
| 128 |
+
void log_dns_request(struct DnsRequest r, std::vector<std::byte> data) {
|
| 129 |
+
size_t offset = r.offset;
|
| 130 |
+
std::cerr << "===Domain resolved: ";
|
| 131 |
+
while(offset < r.end) {
|
| 132 |
+
uint8_t rlen = uint8_t(data[offset]);
|
| 133 |
+
if (rlen == 0) {
|
| 134 |
+
break;
|
| 135 |
+
}
|
| 136 |
+
std::cerr << '.';
|
| 137 |
+
for (uint8_t i = 1; i < rlen+1; i++) {
|
| 138 |
+
std::cerr << (char) data[offset + i];
|
| 139 |
+
}
|
| 140 |
+
offset += rlen+1;
|
| 141 |
+
}
|
| 142 |
+
std::cerr << "===\n";
|
| 143 |
+
std::cerr << "===DNS request type: " << r.dns_type << ", class: " << r.dns_class << "===\n";
|
| 144 |
+
}
|
| 145 |
+
|
| 146 |
+
void inspect_for_arbitrary_dns_pkt(std::vector<std::byte> data, pid_t pid) {
|
| 147 |
+
if (data.size() < kDnsHeaderLen + 1) {
|
| 148 |
+
return;
|
| 149 |
+
}
|
| 150 |
+
struct DnsHeader h = parse_dns_header(data);
|
| 151 |
+
if (h.questions != 1) {
|
| 152 |
+
return;
|
| 153 |
+
}
|
| 154 |
+
if (h.answers != 0 || h.nameservers != 0) {
|
| 155 |
+
return;
|
| 156 |
+
}
|
| 157 |
+
if (!dns_flags_standard_query(h.flags)) {
|
| 158 |
+
return;
|
| 159 |
+
}
|
| 160 |
+
|
| 161 |
+
struct DnsRequest req = parse_dns_request(data, kDnsHeaderLen);
|
| 162 |
+
// Alert if the top level domain is only one character and
|
| 163 |
+
// if there is more than just the TLD.
|
| 164 |
+
if (req.tld_size == 1 && req.nb_levels > 1 && req.end < data.size()) {
|
| 165 |
+
report_bug(kArbitraryDomainNameResolution, pid);
|
| 166 |
+
log_dns_request(req, data);
|
| 167 |
+
}
|
| 168 |
+
}
|
| 169 |
+
|
| 170 |
+
void inspect_for_arbitrary_dns_fdbuffer(pid_t pid, const user_regs_struct ®s) {
|
| 171 |
+
if (kFdDns > 0 && kFdDns == (int) regs.rdi) {
|
| 172 |
+
auto memory = read_memory(pid, regs.rsi, regs.rdx);
|
| 173 |
+
if (memory.size()) {
|
| 174 |
+
inspect_for_arbitrary_dns_pkt(memory, pid);
|
| 175 |
+
}
|
| 176 |
+
}
|
| 177 |
+
}
|
| 178 |
+
|
| 179 |
+
void inspect_for_arbitrary_dns_iov(pid_t pid, unsigned long iov) {
|
| 180 |
+
auto memory = read_memory(pid, iov, sizeof(struct iovec));
|
| 181 |
+
if (memory.size()) {
|
| 182 |
+
struct iovec * iovec = reinterpret_cast<struct iovec *>(memory.data());
|
| 183 |
+
memory = read_memory(pid, (unsigned long) iovec->iov_base, iovec->iov_len);
|
| 184 |
+
if (memory.size()) {
|
| 185 |
+
inspect_for_arbitrary_dns_pkt(memory, pid);
|
| 186 |
+
}
|
| 187 |
+
}
|
| 188 |
+
}
|
| 189 |
+
|
| 190 |
+
void inspect_for_arbitrary_dns_sendmsg(pid_t pid, const user_regs_struct ®s) {
|
| 191 |
+
if (kFdDns > 0 && kFdDns == (int) regs.rdi) {
|
| 192 |
+
auto memory = read_memory(pid, regs.rsi, sizeof(struct msghdr));
|
| 193 |
+
if (memory.size()) {
|
| 194 |
+
struct msghdr * msg = reinterpret_cast<struct msghdr *>(memory.data());
|
| 195 |
+
if (msg->msg_iovlen == 1) {
|
| 196 |
+
inspect_for_arbitrary_dns_iov(pid, (unsigned long) msg->msg_iov);
|
| 197 |
+
}
|
| 198 |
+
}
|
| 199 |
+
}
|
| 200 |
+
}
|
| 201 |
+
|
| 202 |
+
void inspect_for_arbitrary_dns_sendmmsg(pid_t pid, const user_regs_struct ®s) {
|
| 203 |
+
if (kFdDns > 0 && kFdDns == (int) regs.rdi) {
|
| 204 |
+
auto memory = read_memory(pid, regs.rsi, sizeof(struct mmsghdr));
|
| 205 |
+
if (memory.size()) {
|
| 206 |
+
struct mmsghdr * msg = reinterpret_cast<struct mmsghdr *>(memory.data());
|
| 207 |
+
if (msg->msg_hdr.msg_iovlen == 1) {
|
| 208 |
+
inspect_for_arbitrary_dns_iov(pid, (unsigned long) msg->msg_hdr.msg_iov);
|
| 209 |
+
}
|
| 210 |
+
}
|
| 211 |
+
}
|
| 212 |
+
}
|
| 213 |
+
|
| 214 |
+
void inspect_dns_syscalls(pid_t pid, const user_regs_struct ®s) {
|
| 215 |
+
switch (regs.orig_rax) {
|
| 216 |
+
case __NR_connect:
|
| 217 |
+
inspect_for_arbitrary_dns_connect(pid, regs);
|
| 218 |
+
break;
|
| 219 |
+
case __NR_close:
|
| 220 |
+
if (kFdDns > 0 && kFdDns == (int) regs.rdi) {
|
| 221 |
+
// reset DNS file descriptor on close
|
| 222 |
+
kFdDns = 0;
|
| 223 |
+
}
|
| 224 |
+
break;
|
| 225 |
+
case __NR_sendmmsg:
|
| 226 |
+
inspect_for_arbitrary_dns_sendmmsg(pid, regs);
|
| 227 |
+
break;
|
| 228 |
+
case __NR_sendmsg:
|
| 229 |
+
inspect_for_arbitrary_dns_sendmsg(pid, regs);
|
| 230 |
+
break;
|
| 231 |
+
case __NR_sendto:
|
| 232 |
+
// fallthrough
|
| 233 |
+
case __NR_write:
|
| 234 |
+
inspect_for_arbitrary_dns_fdbuffer(pid, regs);
|
| 235 |
+
}
|
| 236 |
+
}
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/inspect_utils.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/*
|
| 2 |
+
* Copyright 2022 Google LLC
|
| 3 |
+
|
| 4 |
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
* you may not use this file except in compliance with the License.
|
| 6 |
+
* You may obtain a copy of the License at
|
| 7 |
+
|
| 8 |
+
* http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
|
| 10 |
+
* Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
* See the License for the specific language governing permissions and
|
| 14 |
+
* limitations under the License.
|
| 15 |
+
*/
|
| 16 |
+
/* A detector that uses ptrace to identify DNS arbitrary resolutions. */
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
/* POSIX */
|
| 20 |
+
#include <unistd.h>
|
| 21 |
+
|
| 22 |
+
#include <string>
|
| 23 |
+
#include <vector>
|
| 24 |
+
|
| 25 |
+
// Structure to know which thread id triggered the bug.
|
| 26 |
+
struct ThreadParent {
|
| 27 |
+
// Parent thread ID, ie creator.
|
| 28 |
+
pid_t parent_tid;
|
| 29 |
+
// Current thread ID ran exec to become another process.
|
| 30 |
+
bool ran_exec = false;
|
| 31 |
+
|
| 32 |
+
ThreadParent() : parent_tid(0) {}
|
| 33 |
+
ThreadParent(pid_t tid) : parent_tid(tid) {}
|
| 34 |
+
};
|
| 35 |
+
|
| 36 |
+
std::vector<std::byte> read_memory(pid_t pid, unsigned long long address,
|
| 37 |
+
size_t size);
|
| 38 |
+
|
| 39 |
+
void report_bug(std::string bug_type, pid_t tid);
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/target.cpp
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/*
|
| 2 |
+
* Copyright 2022 Google LLC
|
| 3 |
+
|
| 4 |
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
* you may not use this file except in compliance with the License.
|
| 6 |
+
* You may obtain a copy of the License at
|
| 7 |
+
|
| 8 |
+
* http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
|
| 10 |
+
* Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
* See the License for the specific language governing permissions and
|
| 14 |
+
* limitations under the License.
|
| 15 |
+
*/
|
| 16 |
+
/* A sample target program under test,
|
| 17 |
+
* /tmp/tripwire or other commands will be injected into its shell command */
|
| 18 |
+
|
| 19 |
+
#include <stdlib.h>
|
| 20 |
+
#include <string>
|
| 21 |
+
#include <iostream>
|
| 22 |
+
|
| 23 |
+
extern "C" int LLVMFuzzerTestOneInput(char* data, size_t size) {
|
| 24 |
+
std::string str(data, size);
|
| 25 |
+
std::cout << "INPUT" << str << std::endl;
|
| 26 |
+
system(str.c_str());
|
| 27 |
+
return 0;
|
| 28 |
+
}
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/target_dns.cpp
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/*
|
| 2 |
+
* Copyright 2022 Google LLC
|
| 3 |
+
|
| 4 |
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
* you may not use this file except in compliance with the License.
|
| 6 |
+
* You may obtain a copy of the License at
|
| 7 |
+
|
| 8 |
+
* http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
|
| 10 |
+
* Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
* See the License for the specific language governing permissions and
|
| 14 |
+
* limitations under the License.
|
| 15 |
+
*/
|
| 16 |
+
|
| 17 |
+
#include <stdlib.h>
|
| 18 |
+
#include <stdio.h>
|
| 19 |
+
#include <string>
|
| 20 |
+
#include <iostream>
|
| 21 |
+
|
| 22 |
+
#include <string.h>
|
| 23 |
+
#include <sys/types.h>
|
| 24 |
+
#include <sys/socket.h>
|
| 25 |
+
#include <netdb.h>
|
| 26 |
+
|
| 27 |
+
extern "C" int LLVMFuzzerTestOneInput(char* data, size_t size) {
|
| 28 |
+
std::string str(data, size);
|
| 29 |
+
std::cout << "INPUT" << str << std::endl;
|
| 30 |
+
|
| 31 |
+
struct addrinfo *result = NULL;
|
| 32 |
+
|
| 33 |
+
getaddrinfo(str.c_str(), NULL, NULL, &result);
|
| 34 |
+
if (result) {
|
| 35 |
+
freeaddrinfo(result);
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
return 0;
|
| 39 |
+
}
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/SystemSan/vuln.dict
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"/tmp/tripwire"
|
| 2 |
+
"/fz/"
|
| 3 |
+
"f.z"
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/build_cache_local.sh
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
_PROJECT=$1
|
| 19 |
+
_FUZZING_LANGUAGE=$2
|
| 20 |
+
_SANITIZER=${3:-address}
|
| 21 |
+
|
| 22 |
+
BASE=$PWD
|
| 23 |
+
|
| 24 |
+
# Final image is either ccache or replay script, depending on which worked.
|
| 25 |
+
FINAL_IMAGE_NAME=us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/${_PROJECT}-ofg-cached-${_SANITIZER}
|
| 26 |
+
|
| 27 |
+
# Always build an image with ccache.
|
| 28 |
+
CCACHE_IMAGE_NAME=us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/${_PROJECT}-ofg-ccache-${_SANITIZER}
|
| 29 |
+
|
| 30 |
+
# Step 1: build the base image
|
| 31 |
+
cd projects/${_PROJECT}
|
| 32 |
+
docker build -t gcr.io/oss-fuzz/${_PROJECT} .
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
# Step 2: create a container where `compile` has run which enables ccaching
|
| 36 |
+
# and also generates a replay build script.
|
| 37 |
+
cd ${BASE}
|
| 38 |
+
mkdir -p ccaches/${_PROJECT}
|
| 39 |
+
mkdir -p build/out/${_PROJECT}
|
| 40 |
+
B_START=$SECONDS
|
| 41 |
+
|
| 42 |
+
docker container rm -f ${_PROJECT}-origin-${_SANITIZER}
|
| 43 |
+
|
| 44 |
+
docker run \
|
| 45 |
+
--env=SANITIZER=${_SANITIZER} \
|
| 46 |
+
--env=CCACHE_DIR=/workspace/ccache \
|
| 47 |
+
--env=FUZZING_LANGUAGE=${_FUZZING_LANGUAGE} \
|
| 48 |
+
--env=CAPTURE_REPLAY_SCRIPT=1 \
|
| 49 |
+
--name=${_PROJECT}-origin-${_SANITIZER} \
|
| 50 |
+
-v=$PWD/ccaches/${_PROJECT}/ccache:/workspace/ccache \
|
| 51 |
+
-v=$PWD/build/out/${_PROJECT}/:/out/ \
|
| 52 |
+
gcr.io/oss-fuzz/${_PROJECT} \
|
| 53 |
+
/bin/bash -c \
|
| 54 |
+
"export PATH=/ccache/bin:\$PATH && compile"
|
| 55 |
+
B_TIME=$(($SECONDS - $B_START))
|
| 56 |
+
|
| 57 |
+
# Step 3: save (commit, locally) the cached container as an image
|
| 58 |
+
docker container commit -c "ENV REPLAY_ENABLED=1" -c "ENV CAPTURE_REPLAY_SCRIPT=" ${_PROJECT}-origin-${_SANITIZER} $FINAL_IMAGE_NAME
|
| 59 |
+
|
| 60 |
+
# Step 4: save the list of executables created from a vanilla build. This is
|
| 61 |
+
# needed for validating if replay and ccaching works.
|
| 62 |
+
# notes: run a shell the container with e.g.
|
| 63 |
+
# `docker run --entrypoint /bin/bash -it local/ossfuzz/htslib-origin-address`
|
| 64 |
+
executables_vanilla="$(find ./build/out/${_PROJECT} -executable -type f | sort)"
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
# Step 5: Build with replay enabled, and validate the executables are the same
|
| 68 |
+
# in terms of naming.
|
| 69 |
+
# Note that an important step is removing everything in $OUT/ which is done
|
| 70 |
+
# in the docker command.
|
| 71 |
+
R_START=$SECONDS
|
| 72 |
+
docker run \
|
| 73 |
+
--rm \
|
| 74 |
+
--env=SANITIZER=${_SANITIZER} \
|
| 75 |
+
--env=FUZZING_LANGUAGE=${_FUZZING_LANGUAGE} \
|
| 76 |
+
-v=$PWD/build/out/${_PROJECT}/:/out/ \
|
| 77 |
+
--name=${_PROJECT}-origin-${_SANITIZER}-replay-recached \
|
| 78 |
+
$FINAL_IMAGE_NAME \
|
| 79 |
+
/bin/bash -c \
|
| 80 |
+
"export PATH=/ccache/bin:\$PATH && rm -rf /out/* && compile"
|
| 81 |
+
R_TIME=$(($SECONDS - $R_START))
|
| 82 |
+
|
| 83 |
+
# Step 6: Extract the newly build executables
|
| 84 |
+
executables_replay="$(find ./build/out/${_PROJECT}/ -executable -type f | sort)"
|
| 85 |
+
|
| 86 |
+
echo "Executables vanilla: "
|
| 87 |
+
echo ${executables_vanilla}
|
| 88 |
+
|
| 89 |
+
echo "------------------------------------------------------"
|
| 90 |
+
echo "Executables replay: "
|
| 91 |
+
echo ${executables_replay}
|
| 92 |
+
|
| 93 |
+
REPLAY_WORKED=
|
| 94 |
+
|
| 95 |
+
# Step 7: match executables from vanilla builds and replay builds.
|
| 96 |
+
# If this step is successful, then the process can exit as it's ready.
|
| 97 |
+
if [[ "$executables_replay" == "$executables_vanilla" ]]
|
| 98 |
+
then
|
| 99 |
+
REPLAY_WORKED=1
|
| 100 |
+
|
| 101 |
+
if [ -z "${RUN_ALL+1}" ]; then
|
| 102 |
+
echo "${_PROJECT}: Replay worked."
|
| 103 |
+
echo "${_PROJECT}: Compile times: Vanilla=${B_TIME}; Replay=${R_TIME};"
|
| 104 |
+
exit 0
|
| 105 |
+
fi
|
| 106 |
+
else
|
| 107 |
+
echo "${_PROJECT}: Replay did not work"
|
| 108 |
+
R_TIME="N/A"
|
| 109 |
+
fi
|
| 110 |
+
|
| 111 |
+
# Step 8: prepare Dockerfile for ccache
|
| 112 |
+
cp -rf ccaches/${_PROJECT}/ccache ./projects/${_PROJECT}/ccache-cache
|
| 113 |
+
|
| 114 |
+
infra/experimental/chronos/prepare-ccache ${_PROJECT}
|
| 115 |
+
|
| 116 |
+
cd projects/${_PROJECT}
|
| 117 |
+
|
| 118 |
+
# Step 9: Build an image with CCache's new items (modifications are done on the
|
| 119 |
+
# dockerfile)
|
| 120 |
+
docker build -t $CCACHE_IMAGE_NAME .
|
| 121 |
+
|
| 122 |
+
cd ${BASE}
|
| 123 |
+
|
| 124 |
+
# Step 10: Run a `compile` with ccache's image.
|
| 125 |
+
# Run the ccache build
|
| 126 |
+
A_START=$SECONDS
|
| 127 |
+
docker run \
|
| 128 |
+
--rm \
|
| 129 |
+
--env=SANITIZER=${_SANITIZER} \
|
| 130 |
+
--env=FUZZING_LANGUAGE=${_FUZZING_LANGUAGE} \
|
| 131 |
+
--name=${_PROJECT}-origin-${_SANITIZER}-recached \
|
| 132 |
+
-v=$PWD/build/out/${_PROJECT}/:/out/ \
|
| 133 |
+
$CCACHE_IMAGE_NAME \
|
| 134 |
+
/bin/bash -c \
|
| 135 |
+
"export PATH=/ccache/bin:\$PATH && rm -rf /out/* && compile"
|
| 136 |
+
A_TIME=$(($SECONDS - $A_START))
|
| 137 |
+
|
| 138 |
+
# Step 11: extract the executables from the ccache build
|
| 139 |
+
executables_ccache="$(find ./build/out/${_PROJECT}/ -executable -type f | sort)"
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
# Step 12: validate the ccache builds are successful
|
| 143 |
+
if [[ "$executables_ccache" == "$executables_vanilla" ]]
|
| 144 |
+
then
|
| 145 |
+
echo "${_PROJECT}: Compile times: Vanilla=${B_TIME}; Replay=${R_TIME}; CCache=${A_TIME};"
|
| 146 |
+
|
| 147 |
+
if [[ -z "${REPLAY_WORKED}" || ${R_TIME} -gt ${A_TIME} ]]; then
|
| 148 |
+
if [ ${R_TIME} -gt ${A_TIME} ]; then
|
| 149 |
+
echo "Replay was slower than ccache."
|
| 150 |
+
fi
|
| 151 |
+
|
| 152 |
+
# Replay didn't work or was slower, so make the default "cached" image use the ccache one.
|
| 153 |
+
docker image tag \
|
| 154 |
+
$CCACHE_IMAGE_NAME \
|
| 155 |
+
$FINAL_IMAGE_NAME
|
| 156 |
+
fi
|
| 157 |
+
|
| 158 |
+
exit 0
|
| 159 |
+
else
|
| 160 |
+
echo "${_PROJECT}: Replay and ccaching did not work."
|
| 161 |
+
exit 1
|
| 162 |
+
fi
|
| 163 |
+
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/build_on_cloudbuild.sh
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
PROJECT=$1
|
| 18 |
+
FUZZING_LANGUAGE=$2
|
| 19 |
+
|
| 20 |
+
gcloud builds submit "https://github.com/google/oss-fuzz" \
|
| 21 |
+
--async \
|
| 22 |
+
--git-source-revision=master \
|
| 23 |
+
--config=cloudbuild.yaml \
|
| 24 |
+
--substitutions=_PROJECT=$PROJECT,_FUZZING_LANGUAGE=$FUZZING_LANGUAGE \
|
| 25 |
+
--project=oss-fuzz \
|
| 26 |
+
--region=us-central1
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/cloudbuild.yaml
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
# CloudBuild for generating Chronos-cached images.
|
| 17 |
+
# Supports building by way of ccache now.
|
| 18 |
+
# High-level steps:
|
| 19 |
+
# 1) Build image for project
|
| 20 |
+
# 2) Run an ASAN build and store ccache
|
| 21 |
+
# 3) Copy ccache cache to host and copy into project's OSS-Fuzz folder
|
| 22 |
+
# 4) Build image for project and copy ccache in, storing image as *-ofg-cache-address
|
| 23 |
+
# 5) Run an coverage build and store ccache
|
| 24 |
+
# 6) Copy ccache cache to host and copy into project's OSS-Fuzz folder
|
| 25 |
+
# 7) Build image for project and copy ccache in, storing image as *-ofg-cache-coverage
|
| 26 |
+
# TODO (David): add support for use of dedicated replay_build.sh
|
| 27 |
+
steps:
|
| 28 |
+
- name: 'gcr.io/cloud-builders/docker'
|
| 29 |
+
entrypoint: /bin/bash
|
| 30 |
+
args:
|
| 31 |
+
- /workspace/infra/experimental/chronos/build_cache_local.sh
|
| 32 |
+
- ${_PROJECT}
|
| 33 |
+
- ${_FUZZING_LANGUAGE}
|
| 34 |
+
- address
|
| 35 |
+
env:
|
| 36 |
+
- RUN_ALL=1
|
| 37 |
+
- name: 'gcr.io/cloud-builders/docker'
|
| 38 |
+
entrypoint: /bin/bash
|
| 39 |
+
args:
|
| 40 |
+
- /workspace/infra/experimental/chronos/build_cache_local.sh
|
| 41 |
+
- ${_PROJECT}
|
| 42 |
+
- ${_FUZZING_LANGUAGE}
|
| 43 |
+
- coverage
|
| 44 |
+
env:
|
| 45 |
+
- RUN_ALL=1
|
| 46 |
+
images:
|
| 47 |
+
- us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/${_PROJECT}-ofg-cached-address
|
| 48 |
+
- us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/${_PROJECT}-ofg-cached-coverage
|
| 49 |
+
- us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/${_PROJECT}-ofg-ccache-address
|
| 50 |
+
- us-central1-docker.pkg.dev/oss-fuzz/oss-fuzz-gen/${_PROJECT}-ofg-ccache-coverage
|
| 51 |
+
timeout: 72000s # 20 hours, same as build_lib.py
|
| 52 |
+
logsBucket: oss-fuzz-gcb-logs
|
| 53 |
+
tags:
|
| 54 |
+
- ${_PROJECT}
|
| 55 |
+
- chronos
|
| 56 |
+
options:
|
| 57 |
+
pool:
|
| 58 |
+
name: projects/oss-fuzz/locations/us-central1/workerPools/buildpool-chronos
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/cloudbuild_all.yaml
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2024 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
# CloudBuild for generating Chronos-cached images.
|
| 17 |
+
steps:
|
| 18 |
+
- name: 'gcr.io/cloud-builders/docker'
|
| 19 |
+
args:
|
| 20 |
+
- build
|
| 21 |
+
- -t
|
| 22 |
+
- gcloud
|
| 23 |
+
- .
|
| 24 |
+
dir: infra/experimental/chronos
|
| 25 |
+
- name: 'gcloud'
|
| 26 |
+
args:
|
| 27 |
+
- infra/experimental/chronos/build_all.sh
|
| 28 |
+
entrypoint: /bin/bash
|
| 29 |
+
timeout: 1800s
|
| 30 |
+
serviceAccount: 'projects/oss-fuzz/serviceAccounts/llm-eval@oss-fuzz.iam.gserviceaccount.com'
|
| 31 |
+
options:
|
| 32 |
+
logging: CLOUD_LOGGING_ONLY
|
| 33 |
+
tags:
|
| 34 |
+
- chronos-all
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/e2e-replay-build.sh
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/bash
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Sample projects: simd, wt, libheif, htslib
|
| 19 |
+
PROJECT=liblouis
|
| 20 |
+
LOG=replay-${PROJECT}.txt
|
| 21 |
+
OUT1=replay-out-${PROJECT}-1
|
| 22 |
+
OUT2=replay-out-${PROJECT}-2
|
| 23 |
+
python infra/helper.py build_image --no-pull "$PROJECT"
|
| 24 |
+
|
| 25 |
+
# AddressSanitizer.
|
| 26 |
+
mkdir -p build/out/${PROJECT}
|
| 27 |
+
echo "start" >> ${LOG}
|
| 28 |
+
echo $(date +%Y:%m:%d:%H:%M:%S) >> ${LOG}
|
| 29 |
+
# Remove container name we are about to use.
|
| 30 |
+
docker container rm "${PROJECT}-origin-asan"
|
| 31 |
+
|
| 32 |
+
# Build once, clean container if needed
|
| 33 |
+
docker run -v $PWD/build/out/${PROJECT}:/out \
|
| 34 |
+
-ti --entrypoint="/bin/sh" \
|
| 35 |
+
--env FUZZING_LANGUAGE=c --env SANITIZER="address" \
|
| 36 |
+
--name "${PROJECT}-origin-asan" \
|
| 37 |
+
"gcr.io/oss-fuzz/${PROJECT}" -c "compile"
|
| 38 |
+
|
| 39 |
+
# Copy outs and log data
|
| 40 |
+
cp -rf $PWD/build/out/${PROJECT} ${OUT1}
|
| 41 |
+
rm -rf $PWD/build/out/${PROJECT}
|
| 42 |
+
ls -la $PWD/build/out/ >> ${LOG}
|
| 43 |
+
echo "next" >> ${LOG}
|
| 44 |
+
echo $(date +%Y:%m:%d:%H:%M:%S) >> ${LOG}
|
| 45 |
+
docker commit "${PROJECT}-origin-asan" "gcr.io/oss-fuzz/${PROJECT}-ofg-cached-asan"
|
| 46 |
+
|
| 47 |
+
# Run the replay command
|
| 48 |
+
docker run -v $PWD/build/out/${PROJECT}:/out \
|
| 49 |
+
-e REPLAY_ENABLED=1 -ti --entrypoint="/bin/sh" \
|
| 50 |
+
--env FUZZING_LANGUAGE=c --env SANITIZER="address" \
|
| 51 |
+
"gcr.io/oss-fuzz/${PROJECT}-ofg-cached-asan" -c "compile"
|
| 52 |
+
echo "finish" >> ${LOG}
|
| 53 |
+
echo $(date +%Y:%m:%d:%H:%M:%S) >> ${LOG}
|
| 54 |
+
cp -rf $PWD/build/out/${PROJECT} ${OUT2}
|
| 55 |
+
|
| 56 |
+
# Now match the artifacts
|
| 57 |
+
SUCCESS=$(infra/experimental/chronos/match_artifacts $OUT1 $OUT2)
|
| 58 |
+
|
| 59 |
+
if [[ $SUCCESS -eq 0 ]]; then
|
| 60 |
+
echo "SUCCESS REPLAY" >> ${LOG}
|
| 61 |
+
else
|
| 62 |
+
echo "FAIL REPLAY" >> ${LOG}
|
| 63 |
+
fi
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/match_artifacts.sh
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/bash -eux
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
CMP1=$1
|
| 19 |
+
CMP2=$2
|
| 20 |
+
|
| 21 |
+
for exec1 in $(find $CMP1/ -type f -executable); do
|
| 22 |
+
base=$(basename $exec1)
|
| 23 |
+
|
| 24 |
+
exec2=$CMP2/${base}
|
| 25 |
+
if [ ! -f ${exec2} ]; then
|
| 26 |
+
exit 1
|
| 27 |
+
fi
|
| 28 |
+
|
| 29 |
+
comparison=$(cmp --silent $exec1 $exec2; echo $?)
|
| 30 |
+
if [[ $comparison -ne 0 ]]; then
|
| 31 |
+
exit 1
|
| 32 |
+
fi
|
| 33 |
+
done
|
| 34 |
+
|
| 35 |
+
exit 0
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/experimental/chronos/prepare-replay-rebuild
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/bash
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
PROJECT=$1
|
| 19 |
+
{
|
| 20 |
+
echo "ENV REPLAY_ENABLED=1";
|
| 21 |
+
} >> "projects/$PROJECT/Dockerfile"
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/helper_test.py
ADDED
|
@@ -0,0 +1,239 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Tests for helper.py"""
|
| 15 |
+
|
| 16 |
+
import datetime
|
| 17 |
+
import os
|
| 18 |
+
import tempfile
|
| 19 |
+
import unittest
|
| 20 |
+
from unittest import mock
|
| 21 |
+
|
| 22 |
+
from pyfakefs import fake_filesystem_unittest
|
| 23 |
+
|
| 24 |
+
import constants
|
| 25 |
+
import helper
|
| 26 |
+
import templates
|
| 27 |
+
|
| 28 |
+
# pylint: disable=no-self-use,protected-access
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class ShellTest(unittest.TestCase):
|
| 32 |
+
"""Tests 'shell' command."""
|
| 33 |
+
|
| 34 |
+
@mock.patch('helper.docker_run')
|
| 35 |
+
@mock.patch('helper.build_image_impl')
|
| 36 |
+
def test_base_runner_debug(self, _, __):
|
| 37 |
+
"""Tests that shell base-runner-debug works as intended."""
|
| 38 |
+
image_name = 'base-runner-debug'
|
| 39 |
+
unparsed_args = ['shell', image_name]
|
| 40 |
+
parser = helper.get_parser()
|
| 41 |
+
args = helper.parse_args(parser, unparsed_args)
|
| 42 |
+
args.sanitizer = 'address'
|
| 43 |
+
result = helper.shell(args)
|
| 44 |
+
self.assertTrue(result)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class BuildImageImplTest(unittest.TestCase):
|
| 48 |
+
"""Tests for build_image_impl."""
|
| 49 |
+
|
| 50 |
+
@mock.patch('helper.docker_build')
|
| 51 |
+
def test_no_cache(self, mock_docker_build):
|
| 52 |
+
"""Tests that cache=False is handled properly."""
|
| 53 |
+
image_name = 'base-image'
|
| 54 |
+
helper.build_image_impl(helper.Project(image_name), cache=False)
|
| 55 |
+
self.assertIn('--no-cache', mock_docker_build.call_args_list[0][0][0])
|
| 56 |
+
|
| 57 |
+
@mock.patch('helper.docker_build')
|
| 58 |
+
@mock.patch('helper.pull_images')
|
| 59 |
+
def test_pull(self, mock_pull_images, _):
|
| 60 |
+
"""Tests that pull=True is handled properly."""
|
| 61 |
+
image_name = 'base-image'
|
| 62 |
+
project = helper.Project(image_name, is_external=True)
|
| 63 |
+
self.assertTrue(helper.build_image_impl(project, pull=True))
|
| 64 |
+
mock_pull_images.assert_called_with('c++')
|
| 65 |
+
|
| 66 |
+
@mock.patch('helper.docker_build')
|
| 67 |
+
def test_base_image(self, mock_docker_build):
|
| 68 |
+
"""Tests that build_image_impl works as intended with a base-image."""
|
| 69 |
+
image_name = 'base-image'
|
| 70 |
+
self.assertTrue(helper.build_image_impl(helper.Project(image_name)))
|
| 71 |
+
build_dir = os.path.join(helper.OSS_FUZZ_DIR,
|
| 72 |
+
'infra/base-images/base-image')
|
| 73 |
+
mock_docker_build.assert_called_with([
|
| 74 |
+
'-t', 'ghcr.io/aixcc-finals/base-image', '--file',
|
| 75 |
+
os.path.join(build_dir, 'Dockerfile'), build_dir
|
| 76 |
+
])
|
| 77 |
+
|
| 78 |
+
@mock.patch('helper.docker_build')
|
| 79 |
+
def test_oss_fuzz_project(self, mock_docker_build):
|
| 80 |
+
"""Tests that build_image_impl works as intended with an OSS-Fuzz
|
| 81 |
+
project."""
|
| 82 |
+
project_name = 'example'
|
| 83 |
+
self.assertTrue(helper.build_image_impl(helper.Project(project_name)))
|
| 84 |
+
build_dir = os.path.join(helper.OSS_FUZZ_DIR, 'projects', project_name)
|
| 85 |
+
mock_docker_build.assert_called_with([
|
| 86 |
+
'-t', 'gcr.io/oss-fuzz/example', '--file',
|
| 87 |
+
os.path.join(build_dir, 'Dockerfile'), build_dir
|
| 88 |
+
])
|
| 89 |
+
|
| 90 |
+
@mock.patch('helper.docker_build')
|
| 91 |
+
def test_external_project(self, mock_docker_build):
|
| 92 |
+
"""Tests that build_image_impl works as intended with a non-OSS-Fuzz
|
| 93 |
+
project."""
|
| 94 |
+
with tempfile.TemporaryDirectory() as temp_dir:
|
| 95 |
+
project_src_path = os.path.join(temp_dir, 'example')
|
| 96 |
+
os.mkdir(project_src_path)
|
| 97 |
+
build_integration_path = 'build-integration'
|
| 98 |
+
project = helper.Project(project_src_path,
|
| 99 |
+
is_external=True,
|
| 100 |
+
build_integration_path=build_integration_path)
|
| 101 |
+
self.assertTrue(helper.build_image_impl(project))
|
| 102 |
+
mock_docker_build.assert_called_with([
|
| 103 |
+
'-t', 'gcr.io/oss-fuzz/example', '--file',
|
| 104 |
+
os.path.join(project_src_path, build_integration_path, 'Dockerfile'),
|
| 105 |
+
project_src_path
|
| 106 |
+
])
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
class GenerateImplTest(fake_filesystem_unittest.TestCase):
|
| 110 |
+
"""Tests for _generate_impl."""
|
| 111 |
+
PROJECT_NAME = 'newfakeproject'
|
| 112 |
+
PROJECT_LANGUAGE = 'python'
|
| 113 |
+
|
| 114 |
+
def setUp(self):
|
| 115 |
+
self.maxDiff = None # pylint: disable=invalid-name
|
| 116 |
+
self.setUpPyfakefs()
|
| 117 |
+
self.fs.add_real_directory(helper.OSS_FUZZ_DIR)
|
| 118 |
+
|
| 119 |
+
def _verify_templated_files(self, template_dict, directory, language):
|
| 120 |
+
template_args = {
|
| 121 |
+
'project_name': self.PROJECT_NAME,
|
| 122 |
+
'year': 2021,
|
| 123 |
+
'base_builder': helper._base_builder_from_language(language),
|
| 124 |
+
'language': language,
|
| 125 |
+
}
|
| 126 |
+
for filename, template in template_dict.items():
|
| 127 |
+
file_path = os.path.join(directory, filename)
|
| 128 |
+
with open(file_path, 'r') as file_handle:
|
| 129 |
+
contents = file_handle.read()
|
| 130 |
+
self.assertEqual(contents, template % template_args)
|
| 131 |
+
|
| 132 |
+
@mock.patch('helper._get_current_datetime',
|
| 133 |
+
return_value=datetime.datetime(year=2021, month=1, day=1))
|
| 134 |
+
def test_generate_oss_fuzz_project(self, _):
|
| 135 |
+
"""Tests that the correct files are generated for an OSS-Fuzz project."""
|
| 136 |
+
helper._generate_impl(helper.Project(self.PROJECT_NAME),
|
| 137 |
+
self.PROJECT_LANGUAGE)
|
| 138 |
+
self._verify_templated_files(
|
| 139 |
+
templates.TEMPLATES,
|
| 140 |
+
os.path.join(helper.OSS_FUZZ_DIR, 'projects', self.PROJECT_NAME),
|
| 141 |
+
self.PROJECT_LANGUAGE)
|
| 142 |
+
|
| 143 |
+
def test_generate_external_project(self):
|
| 144 |
+
"""Tests that the correct files are generated for a non-OSS-Fuzz project."""
|
| 145 |
+
build_integration_path = '/newfakeproject/build-integration'
|
| 146 |
+
helper._generate_impl(
|
| 147 |
+
helper.Project('/newfakeproject/',
|
| 148 |
+
is_external=True,
|
| 149 |
+
build_integration_path=build_integration_path),
|
| 150 |
+
self.PROJECT_LANGUAGE)
|
| 151 |
+
self._verify_templated_files(templates.EXTERNAL_TEMPLATES,
|
| 152 |
+
build_integration_path, self.PROJECT_LANGUAGE)
|
| 153 |
+
|
| 154 |
+
@mock.patch('helper._get_current_datetime',
|
| 155 |
+
return_value=datetime.datetime(year=2021, month=1, day=1))
|
| 156 |
+
def test_generate_swift_project(self, _):
|
| 157 |
+
"""Tests that the swift project uses the correct base image."""
|
| 158 |
+
helper._generate_impl(helper.Project(self.PROJECT_NAME), 'swift')
|
| 159 |
+
self._verify_templated_files(
|
| 160 |
+
templates.TEMPLATES,
|
| 161 |
+
os.path.join(helper.OSS_FUZZ_DIR, 'projects', self.PROJECT_NAME),
|
| 162 |
+
'swift')
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
class ProjectTest(fake_filesystem_unittest.TestCase):
|
| 166 |
+
"""Tests for Project class."""
|
| 167 |
+
|
| 168 |
+
def setUp(self):
|
| 169 |
+
self.project_name = 'project'
|
| 170 |
+
self.internal_project = helper.Project(self.project_name)
|
| 171 |
+
self.external_project_path = os.path.join('/path', 'to', self.project_name)
|
| 172 |
+
self.external_project = helper.Project(self.external_project_path,
|
| 173 |
+
is_external=True)
|
| 174 |
+
self.setUpPyfakefs()
|
| 175 |
+
|
| 176 |
+
def test_init_external_project(self):
|
| 177 |
+
"""Tests __init__ method for external projects."""
|
| 178 |
+
self.assertEqual(self.external_project.name, self.project_name)
|
| 179 |
+
self.assertEqual(self.external_project.path, self.external_project_path)
|
| 180 |
+
self.assertEqual(
|
| 181 |
+
self.external_project.build_integration_path,
|
| 182 |
+
os.path.join(self.external_project_path,
|
| 183 |
+
constants.DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH))
|
| 184 |
+
|
| 185 |
+
def test_init_internal_project(self):
|
| 186 |
+
"""Tests __init__ method for internal projects."""
|
| 187 |
+
self.assertEqual(self.internal_project.name, self.project_name)
|
| 188 |
+
path = os.path.join(helper.OSS_FUZZ_DIR, 'projects', self.project_name)
|
| 189 |
+
self.assertEqual(self.internal_project.path, path)
|
| 190 |
+
self.assertEqual(self.internal_project.build_integration_path, path)
|
| 191 |
+
|
| 192 |
+
def test_dockerfile_path_internal_project(self):
|
| 193 |
+
"""Tests that dockerfile_path works as intended."""
|
| 194 |
+
self.assertEqual(
|
| 195 |
+
self.internal_project.dockerfile_path,
|
| 196 |
+
os.path.join(helper.OSS_FUZZ_DIR, 'projects', self.project_name,
|
| 197 |
+
'Dockerfile'))
|
| 198 |
+
|
| 199 |
+
def test_dockerfile_path_external_project(self):
|
| 200 |
+
"""Tests that dockerfile_path works as intended."""
|
| 201 |
+
self.assertEqual(
|
| 202 |
+
self.external_project.dockerfile_path,
|
| 203 |
+
os.path.join(self.external_project_path,
|
| 204 |
+
constants.DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH,
|
| 205 |
+
'Dockerfile'))
|
| 206 |
+
|
| 207 |
+
def test_out(self):
|
| 208 |
+
"""Tests that out works as intended."""
|
| 209 |
+
out_dir = self.internal_project.out
|
| 210 |
+
self.assertEqual(
|
| 211 |
+
out_dir,
|
| 212 |
+
os.path.join(helper.OSS_FUZZ_DIR, 'build', 'out', self.project_name))
|
| 213 |
+
self.assertTrue(os.path.exists(out_dir))
|
| 214 |
+
|
| 215 |
+
def test_work(self):
|
| 216 |
+
"""Tests that work works as intended."""
|
| 217 |
+
work_dir = self.internal_project.work
|
| 218 |
+
self.assertEqual(
|
| 219 |
+
work_dir,
|
| 220 |
+
os.path.join(helper.OSS_FUZZ_DIR, 'build', 'work', self.project_name))
|
| 221 |
+
self.assertTrue(os.path.exists(work_dir))
|
| 222 |
+
|
| 223 |
+
def test_corpus(self):
|
| 224 |
+
"""Tests that corpus works as intended."""
|
| 225 |
+
corpus_dir = self.internal_project.corpus
|
| 226 |
+
self.assertEqual(
|
| 227 |
+
corpus_dir,
|
| 228 |
+
os.path.join(helper.OSS_FUZZ_DIR, 'build', 'corpus', self.project_name))
|
| 229 |
+
self.assertTrue(os.path.exists(corpus_dir))
|
| 230 |
+
|
| 231 |
+
def test_language_internal_project(self):
|
| 232 |
+
"""Tests that language works as intended for an internal project."""
|
| 233 |
+
project_yaml_path = os.path.join(self.internal_project.path, 'project.yaml')
|
| 234 |
+
self.fs.create_file(project_yaml_path, contents='language: python')
|
| 235 |
+
self.assertEqual(self.internal_project.language, 'python')
|
| 236 |
+
|
| 237 |
+
def test_language_external_project(self):
|
| 238 |
+
"""Tests that language works as intended for an external project."""
|
| 239 |
+
self.assertEqual(self.external_project.language, 'c++')
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/presubmit.py
ADDED
|
@@ -0,0 +1,549 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2020 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Checks code for common issues before submitting."""
|
| 18 |
+
|
| 19 |
+
import argparse
|
| 20 |
+
import os
|
| 21 |
+
import re
|
| 22 |
+
import subprocess
|
| 23 |
+
import sys
|
| 24 |
+
import unittest
|
| 25 |
+
import yaml
|
| 26 |
+
|
| 27 |
+
import constants
|
| 28 |
+
|
| 29 |
+
_SRC_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
| 30 |
+
VALID_PROJECT_REGEX_STR = '^[a-z0-9_-]+$'
|
| 31 |
+
VALID_PROJECT_REGEX = re.compile(VALID_PROJECT_REGEX_STR)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def _is_project_file(actual_path, expected_filename):
|
| 35 |
+
"""Returns True if actual_path's name is |expected_filename| and is a file
|
| 36 |
+
that exists and is in in projects/."""
|
| 37 |
+
if os.path.basename(actual_path) != expected_filename:
|
| 38 |
+
return False
|
| 39 |
+
|
| 40 |
+
if os.path.basename(os.path.dirname(
|
| 41 |
+
os.path.dirname(actual_path))) != 'projects':
|
| 42 |
+
return False
|
| 43 |
+
|
| 44 |
+
return os.path.exists(actual_path)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# TODO: Check for -fsanitize=fuzzer in files as well.
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def _check_one_lib_fuzzing_engine(build_sh_file):
|
| 51 |
+
"""Returns False if |build_sh_file| contains -lFuzzingEngine.
|
| 52 |
+
This is deprecated behavior. $LIB_FUZZING_ENGINE should be used instead
|
| 53 |
+
so that -fsanitize=fuzzer is used."""
|
| 54 |
+
if not _is_project_file(build_sh_file, 'build.sh'):
|
| 55 |
+
return True
|
| 56 |
+
|
| 57 |
+
with open(build_sh_file) as build_sh:
|
| 58 |
+
build_sh_lines = build_sh.readlines()
|
| 59 |
+
for line_num, line in enumerate(build_sh_lines):
|
| 60 |
+
uncommented_code = line.split('#')[0]
|
| 61 |
+
if '-lFuzzingEngine' in uncommented_code:
|
| 62 |
+
print('Error: build.sh contains deprecated "-lFuzzingEngine" on line: '
|
| 63 |
+
f'{line_num}. Please use "$LIB_FUZZING_ENGINE" instead.')
|
| 64 |
+
return False
|
| 65 |
+
return True
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def check_lib_fuzzing_engine(paths):
|
| 69 |
+
"""Calls _check_one_lib_fuzzing_engine on each path in |paths|. Returns True
|
| 70 |
+
if the result of every call is True."""
|
| 71 |
+
return all(_check_one_lib_fuzzing_engine(path) for path in paths)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class ProjectYamlChecker:
|
| 75 |
+
"""Checks for a project.yaml file."""
|
| 76 |
+
|
| 77 |
+
# Sections in a project.yaml and the constant values that they are allowed
|
| 78 |
+
# to have.
|
| 79 |
+
SECTIONS_AND_CONSTANTS = {
|
| 80 |
+
'sanitizers': constants.SANITIZERS,
|
| 81 |
+
'architectures': constants.ARCHITECTURES,
|
| 82 |
+
'fuzzing_engines': constants.ENGINES,
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
+
# Note: this list must be updated when we allow new sections.
|
| 86 |
+
VALID_SECTION_NAMES = [
|
| 87 |
+
'architectures',
|
| 88 |
+
'auto_ccs',
|
| 89 |
+
'blackbox',
|
| 90 |
+
'builds_per_day',
|
| 91 |
+
'coverage_extra_args',
|
| 92 |
+
'disabled',
|
| 93 |
+
'fuzzing_engines',
|
| 94 |
+
'help_url',
|
| 95 |
+
'homepage',
|
| 96 |
+
'language',
|
| 97 |
+
'labels', # For internal use only, hard to lint as it uses fuzzer names.
|
| 98 |
+
'main_repo',
|
| 99 |
+
'primary_contact',
|
| 100 |
+
'run_tests',
|
| 101 |
+
'sanitizers',
|
| 102 |
+
'selective_unpack',
|
| 103 |
+
'vendor_ccs',
|
| 104 |
+
'view_restrictions',
|
| 105 |
+
'file_github_issue',
|
| 106 |
+
]
|
| 107 |
+
|
| 108 |
+
REQUIRED_SECTIONS = ['main_repo']
|
| 109 |
+
|
| 110 |
+
def __init__(self, filename):
|
| 111 |
+
self.filename = filename
|
| 112 |
+
with open(filename) as file_handle:
|
| 113 |
+
self.data = yaml.safe_load(file_handle)
|
| 114 |
+
|
| 115 |
+
self.success = True
|
| 116 |
+
|
| 117 |
+
def do_checks(self):
|
| 118 |
+
"""Does all project.yaml checks. Returns True if they pass."""
|
| 119 |
+
if self.is_disabled():
|
| 120 |
+
return True
|
| 121 |
+
|
| 122 |
+
checks = [
|
| 123 |
+
self.check_project_yaml_constants,
|
| 124 |
+
self.check_required_sections,
|
| 125 |
+
self.check_valid_section_names,
|
| 126 |
+
self.check_valid_emails,
|
| 127 |
+
self.check_valid_language,
|
| 128 |
+
self.check_valid_project_name,
|
| 129 |
+
]
|
| 130 |
+
for check_function in checks:
|
| 131 |
+
check_function()
|
| 132 |
+
return self.success
|
| 133 |
+
|
| 134 |
+
def is_disabled(self):
|
| 135 |
+
"""Returns True if this project is disabled."""
|
| 136 |
+
return self.data.get('disabled', False)
|
| 137 |
+
|
| 138 |
+
def error(self, message):
|
| 139 |
+
"""Prints an error message and sets self.success to False."""
|
| 140 |
+
self.success = False
|
| 141 |
+
print(f'Error in {self.filename}: {message}')
|
| 142 |
+
|
| 143 |
+
def check_valid_project_name(self):
|
| 144 |
+
"""Checks that the project has a valid name."""
|
| 145 |
+
banned_names = ['google', 'g00gle']
|
| 146 |
+
project_name = os.path.basename(os.path.dirname(self.filename))
|
| 147 |
+
for banned_name in banned_names:
|
| 148 |
+
if banned_name in project_name:
|
| 149 |
+
self.error('Projects can\'t have \'google\' in the name.')
|
| 150 |
+
if not VALID_PROJECT_REGEX.match(project_name):
|
| 151 |
+
self.error(f'Projects must conform to regex {VALID_PROJECT_REGEX_STR}')
|
| 152 |
+
|
| 153 |
+
def check_project_yaml_constants(self):
|
| 154 |
+
"""Returns True if certain sections only have certain constant values."""
|
| 155 |
+
for section, allowed_constants in self.SECTIONS_AND_CONSTANTS.items():
|
| 156 |
+
if section not in self.data:
|
| 157 |
+
continue
|
| 158 |
+
actual_constants = self.data[section]
|
| 159 |
+
allowed_constants_str = ', '.join(allowed_constants)
|
| 160 |
+
for constant in actual_constants:
|
| 161 |
+
if isinstance(constant, str):
|
| 162 |
+
if constant not in allowed_constants:
|
| 163 |
+
self.error(f'{constant} (in {section} section) is not a valid '
|
| 164 |
+
f'constant ({allowed_constants_str}).')
|
| 165 |
+
elif isinstance(constant, dict):
|
| 166 |
+
# The only alternative value allowed is the experimental flag, i.e.
|
| 167 |
+
# `constant == {'memory': {'experimental': True}}`. Do not check the
|
| 168 |
+
# experimental flag, but assert that the sanitizer is a valid one.
|
| 169 |
+
if (len(constant.keys()) > 1 or
|
| 170 |
+
list(constant.keys())[0] not in allowed_constants):
|
| 171 |
+
self.error(f'Not allowed value in the project.yaml: {constant}')
|
| 172 |
+
else:
|
| 173 |
+
self.error(f'Not allowed value in the project.yaml: {constant}')
|
| 174 |
+
|
| 175 |
+
def check_valid_section_names(self):
|
| 176 |
+
"""Returns True if all section names are valid."""
|
| 177 |
+
for name in self.data:
|
| 178 |
+
if name not in self.VALID_SECTION_NAMES:
|
| 179 |
+
self.error(
|
| 180 |
+
f'{name} is not a valid section name ({self.VALID_SECTION_NAMES})')
|
| 181 |
+
|
| 182 |
+
def check_required_sections(self):
|
| 183 |
+
"""Returns True if all required sections are in |self.data|."""
|
| 184 |
+
for section in self.REQUIRED_SECTIONS:
|
| 185 |
+
if section not in self.data:
|
| 186 |
+
self.error(f'{section} section is missing.')
|
| 187 |
+
|
| 188 |
+
def check_valid_emails(self):
|
| 189 |
+
"""Returns True if emails are valid looking.."""
|
| 190 |
+
# Get email addresses.
|
| 191 |
+
email_addresses = []
|
| 192 |
+
primary_contact = self.data.get('primary_contact')
|
| 193 |
+
if primary_contact:
|
| 194 |
+
email_addresses.append(primary_contact)
|
| 195 |
+
auto_ccs = self.data.get('auto_ccs')
|
| 196 |
+
if auto_ccs:
|
| 197 |
+
email_addresses.extend(auto_ccs)
|
| 198 |
+
|
| 199 |
+
# Check that email addresses seem normal.
|
| 200 |
+
for email_address in email_addresses:
|
| 201 |
+
if '@' not in email_address or '.' not in email_address:
|
| 202 |
+
self.error(f'{email_address} is an invalid email address.')
|
| 203 |
+
|
| 204 |
+
def check_valid_language(self):
|
| 205 |
+
"""Returns True if the language is specified and valid."""
|
| 206 |
+
language = self.data.get('language')
|
| 207 |
+
if not language:
|
| 208 |
+
self.error('Missing "language" attribute in project.yaml.')
|
| 209 |
+
elif language not in constants.LANGUAGES:
|
| 210 |
+
self.error(
|
| 211 |
+
f'"language: {language}" is not supported ({constants.LANGUAGES}).')
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def _check_one_project_yaml(project_yaml_filename):
|
| 215 |
+
"""Does checks on the project.yaml file. Returns True on success."""
|
| 216 |
+
if _is_project_file(project_yaml_filename, 'project.yml'):
|
| 217 |
+
print(project_yaml_filename, 'must be named project.yaml.')
|
| 218 |
+
return False
|
| 219 |
+
|
| 220 |
+
if not _is_project_file(project_yaml_filename, 'project.yaml'):
|
| 221 |
+
return True
|
| 222 |
+
|
| 223 |
+
checker = ProjectYamlChecker(project_yaml_filename)
|
| 224 |
+
return checker.do_checks()
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
def check_project_yaml(paths):
|
| 228 |
+
"""Calls _check_one_project_yaml on each path in |paths|. Returns True if the
|
| 229 |
+
result of every call is True."""
|
| 230 |
+
return all([_check_one_project_yaml(path) for path in paths])
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def _check_one_seed_corpus(path):
|
| 234 |
+
"""Returns False and prints error if |path| is a seed corpus."""
|
| 235 |
+
if os.path.basename(os.path.dirname(os.path.dirname(path))) != 'projects':
|
| 236 |
+
return True
|
| 237 |
+
|
| 238 |
+
if os.path.splitext(path)[1] == '.zip':
|
| 239 |
+
print('Don\'t commit seed corpora into the ClusterFuzz repo,'
|
| 240 |
+
'they bloat it forever.')
|
| 241 |
+
return False
|
| 242 |
+
|
| 243 |
+
return True
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
def check_seed_corpus(paths):
|
| 247 |
+
"""Calls _check_one_seed_corpus on each path in |paths|. Returns True if the
|
| 248 |
+
result of every call is True."""
|
| 249 |
+
return all([_check_one_seed_corpus(path) for path in paths])
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
def _check_one_apt_update(path):
|
| 253 |
+
"""Checks that a Dockerfile uses apt-update before apt-install"""
|
| 254 |
+
if os.path.basename(os.path.dirname(os.path.dirname(path))) != 'projects':
|
| 255 |
+
return True
|
| 256 |
+
|
| 257 |
+
if os.path.basename(path) != 'Dockerfile':
|
| 258 |
+
return True
|
| 259 |
+
|
| 260 |
+
with open(path, 'r') as file:
|
| 261 |
+
dockerfile = file.read()
|
| 262 |
+
if 'RUN apt install' in dockerfile or 'RUN apt-get install' in dockerfile:
|
| 263 |
+
print('Please add an "apt-get update" before "apt-get install". '
|
| 264 |
+
'Otherwise, a cached and outdated RUN layer may lead to install '
|
| 265 |
+
'failures in file %s.' % str(path))
|
| 266 |
+
return False
|
| 267 |
+
|
| 268 |
+
return True
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
def check_apt_update(paths):
|
| 272 |
+
"""Checks that all Dockerfile use apt-update before apt-install"""
|
| 273 |
+
return all([_check_one_apt_update(path) for path in paths])
|
| 274 |
+
|
| 275 |
+
|
| 276 |
+
def do_checks(changed_files):
|
| 277 |
+
"""Runs all presubmit checks. Returns False if any fails."""
|
| 278 |
+
checks = [
|
| 279 |
+
check_license,
|
| 280 |
+
yapf,
|
| 281 |
+
check_project_yaml,
|
| 282 |
+
check_lib_fuzzing_engine,
|
| 283 |
+
check_seed_corpus,
|
| 284 |
+
check_apt_update,
|
| 285 |
+
]
|
| 286 |
+
# Use a list comprehension here and in other cases where we use all() so that
|
| 287 |
+
# we don't quit early on failure. This is more user-friendly since the more
|
| 288 |
+
# errors we spit out at once, the less frequently the less check-fix-check
|
| 289 |
+
# cycles they need to do.
|
| 290 |
+
return all([check(changed_files) for check in checks])
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
_CHECK_LICENSE_FILENAMES = ['Dockerfile']
|
| 294 |
+
_CHECK_LICENSE_EXTENSIONS = [
|
| 295 |
+
'.bash',
|
| 296 |
+
'.c',
|
| 297 |
+
'.cc',
|
| 298 |
+
'.cpp',
|
| 299 |
+
'.css',
|
| 300 |
+
'.Dockerfile',
|
| 301 |
+
'.go',
|
| 302 |
+
'.h',
|
| 303 |
+
'.htm',
|
| 304 |
+
'.html',
|
| 305 |
+
'.java',
|
| 306 |
+
'.js',
|
| 307 |
+
'.proto',
|
| 308 |
+
'.py',
|
| 309 |
+
'.rs',
|
| 310 |
+
'.sh',
|
| 311 |
+
'.ts',
|
| 312 |
+
]
|
| 313 |
+
THIRD_PARTY_DIR_NAME = 'third_party'
|
| 314 |
+
|
| 315 |
+
_LICENSE_STRING = 'http://www.apache.org/licenses/LICENSE-2.0'
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def check_license(paths):
|
| 319 |
+
"""Validates license header."""
|
| 320 |
+
if not paths:
|
| 321 |
+
return True
|
| 322 |
+
|
| 323 |
+
success = True
|
| 324 |
+
for path in paths:
|
| 325 |
+
path_parts = str(path).split(os.sep)
|
| 326 |
+
if any(path_part == THIRD_PARTY_DIR_NAME for path_part in path_parts):
|
| 327 |
+
continue
|
| 328 |
+
filename = os.path.basename(path)
|
| 329 |
+
extension = os.path.splitext(path)[1]
|
| 330 |
+
if (filename not in _CHECK_LICENSE_FILENAMES and
|
| 331 |
+
extension not in _CHECK_LICENSE_EXTENSIONS):
|
| 332 |
+
continue
|
| 333 |
+
|
| 334 |
+
with open(path) as file_handle:
|
| 335 |
+
if _LICENSE_STRING not in file_handle.read():
|
| 336 |
+
print('Missing license header in file %s.' % str(path))
|
| 337 |
+
success = False
|
| 338 |
+
|
| 339 |
+
return success
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
def bool_to_returncode(success):
|
| 343 |
+
"""Returns 0 if |success|. Otherwise returns 1."""
|
| 344 |
+
if success:
|
| 345 |
+
print('Success.')
|
| 346 |
+
return 0
|
| 347 |
+
|
| 348 |
+
print('Failed.')
|
| 349 |
+
return 1
|
| 350 |
+
|
| 351 |
+
|
| 352 |
+
def is_nonfuzzer_python(path):
|
| 353 |
+
"""Returns True if |path| ends in .py."""
|
| 354 |
+
return os.path.splitext(path)[1] == '.py' and '/projects/' not in path
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
def lint(_=None):
|
| 358 |
+
"""Runs python's linter on infra. Returns False if it fails linting."""
|
| 359 |
+
|
| 360 |
+
# Use --score no to make linting quieter.
|
| 361 |
+
command = ['python3', '-m', 'pylint', '--score', 'no', '-j', '0', 'infra']
|
| 362 |
+
returncode = subprocess.run(command, check=False).returncode
|
| 363 |
+
return returncode == 0
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
def yapf(paths, validate=True):
|
| 367 |
+
"""Does yapf on |path| if it is Python file. Only validates format if
|
| 368 |
+
|validate|. Otherwise, formats the file. Returns False if validation or
|
| 369 |
+
formatting fails."""
|
| 370 |
+
paths = [path for path in paths if is_nonfuzzer_python(path)]
|
| 371 |
+
if not paths:
|
| 372 |
+
return True
|
| 373 |
+
|
| 374 |
+
validate_argument = '-d' if validate else '-i'
|
| 375 |
+
command = ['yapf', validate_argument, '-p']
|
| 376 |
+
command.extend(paths)
|
| 377 |
+
|
| 378 |
+
returncode = subprocess.run(command, check=False).returncode
|
| 379 |
+
return returncode == 0
|
| 380 |
+
|
| 381 |
+
|
| 382 |
+
def get_changed_files():
|
| 383 |
+
"""Returns a list of absolute paths of files changed in this git branch."""
|
| 384 |
+
branch_commit_hash = subprocess.check_output(
|
| 385 |
+
['git', 'merge-base', 'HEAD', 'origin/HEAD']).strip().decode()
|
| 386 |
+
|
| 387 |
+
diff_commands = [
|
| 388 |
+
# Return list of modified files in the commits on this branch.
|
| 389 |
+
['git', 'diff', '--name-only', branch_commit_hash + '..'],
|
| 390 |
+
# Return list of modified files from uncommitted changes.
|
| 391 |
+
['git', 'diff', '--name-only']
|
| 392 |
+
]
|
| 393 |
+
|
| 394 |
+
changed_files = set()
|
| 395 |
+
for command in diff_commands:
|
| 396 |
+
file_paths = subprocess.check_output(command).decode().splitlines()
|
| 397 |
+
for file_path in file_paths:
|
| 398 |
+
if not os.path.isfile(file_path):
|
| 399 |
+
continue
|
| 400 |
+
changed_files.add(file_path)
|
| 401 |
+
print(f'Changed files: {" ".join(changed_files)}')
|
| 402 |
+
return [os.path.abspath(f) for f in changed_files]
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
def run_build_tests():
|
| 406 |
+
"""Runs build tests because they can't be run in parallel."""
|
| 407 |
+
suite_list = [
|
| 408 |
+
unittest.TestLoader().discover(os.path.join(_SRC_ROOT, 'infra', 'build'),
|
| 409 |
+
pattern='*_test.py'),
|
| 410 |
+
]
|
| 411 |
+
suite = unittest.TestSuite(suite_list)
|
| 412 |
+
print('Running build tests.')
|
| 413 |
+
result = unittest.TextTestRunner().run(suite)
|
| 414 |
+
return not result.failures and not result.errors
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
def run_nonbuild_tests(parallel):
|
| 418 |
+
"""Runs all tests but build tests. Does them in parallel if |parallel|. The
|
| 419 |
+
reason why we exclude build tests is because they use an emulator that
|
| 420 |
+
prevents them from being used in parallel."""
|
| 421 |
+
# We look for all project directories because otherwise pytest won't run tests
|
| 422 |
+
# that are not in valid modules (e.g. "base-images").
|
| 423 |
+
relevant_dirs = set()
|
| 424 |
+
all_files = get_all_files()
|
| 425 |
+
for file_path in all_files:
|
| 426 |
+
directory = os.path.dirname(file_path)
|
| 427 |
+
relevant_dirs.add(directory)
|
| 428 |
+
|
| 429 |
+
# Use ignore-glob because ignore doesn't seem to work properly with the way we
|
| 430 |
+
# pass directories to pytest.
|
| 431 |
+
command = [
|
| 432 |
+
'pytest',
|
| 433 |
+
'--ignore-glob=infra/build/*',
|
| 434 |
+
'--ignore-glob=projects/*',
|
| 435 |
+
]
|
| 436 |
+
if parallel:
|
| 437 |
+
command.extend(['-n', 'auto'])
|
| 438 |
+
command += list(relevant_dirs)
|
| 439 |
+
print('Running non-build tests.')
|
| 440 |
+
|
| 441 |
+
# TODO(metzman): Get rid of this once config_utils stops using it.
|
| 442 |
+
env = os.environ.copy()
|
| 443 |
+
env['CIFUZZ_TEST'] = '1'
|
| 444 |
+
|
| 445 |
+
return subprocess.run(command, check=False, env=env).returncode == 0
|
| 446 |
+
|
| 447 |
+
|
| 448 |
+
def run_tests(_=None, parallel=False, build_tests=True, nonbuild_tests=True):
|
| 449 |
+
"""Runs all unit tests."""
|
| 450 |
+
build_success = True
|
| 451 |
+
nonbuild_success = True
|
| 452 |
+
if nonbuild_tests:
|
| 453 |
+
nonbuild_success = run_nonbuild_tests(parallel)
|
| 454 |
+
else:
|
| 455 |
+
print('Skipping nonbuild tests as specified.')
|
| 456 |
+
|
| 457 |
+
if build_tests:
|
| 458 |
+
build_success = run_build_tests()
|
| 459 |
+
else:
|
| 460 |
+
print('Skipping build tests as specified.')
|
| 461 |
+
|
| 462 |
+
return nonbuild_success and build_success
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
def run_systemsan_tests(_=None):
|
| 466 |
+
"""Runs SystemSan unit tests."""
|
| 467 |
+
command = ['make', 'test']
|
| 468 |
+
return subprocess.run(command,
|
| 469 |
+
cwd='infra/experimental/SystemSan',
|
| 470 |
+
check=False).returncode == 0
|
| 471 |
+
|
| 472 |
+
|
| 473 |
+
def get_all_files():
|
| 474 |
+
"""Returns a list of absolute paths of files in this repo."""
|
| 475 |
+
get_all_files_command = ['git', 'ls-files']
|
| 476 |
+
output = subprocess.check_output(get_all_files_command).decode().splitlines()
|
| 477 |
+
return [os.path.abspath(path) for path in output if os.path.isfile(path)]
|
| 478 |
+
|
| 479 |
+
|
| 480 |
+
def main():
|
| 481 |
+
"""Check changes on a branch for common issues before submitting."""
|
| 482 |
+
# Get program arguments.
|
| 483 |
+
parser = argparse.ArgumentParser(description='Presubmit script for oss-fuzz.')
|
| 484 |
+
parser.add_argument(
|
| 485 |
+
'command',
|
| 486 |
+
choices=['format', 'lint', 'license', 'infra-tests', 'systemsan-tests'],
|
| 487 |
+
nargs='?')
|
| 488 |
+
parser.add_argument('-a',
|
| 489 |
+
'--all-files',
|
| 490 |
+
action='store_true',
|
| 491 |
+
help='Run presubmit check(s) on all files',
|
| 492 |
+
default=False)
|
| 493 |
+
parser.add_argument('-p',
|
| 494 |
+
'--parallel',
|
| 495 |
+
action='store_true',
|
| 496 |
+
help='Run tests in parallel.',
|
| 497 |
+
default=False)
|
| 498 |
+
parser.add_argument('-s',
|
| 499 |
+
'--skip-build-tests',
|
| 500 |
+
action='store_true',
|
| 501 |
+
help='Skip build tests which are slow and must run '
|
| 502 |
+
'sequentially.',
|
| 503 |
+
default=False)
|
| 504 |
+
parser.add_argument('-n',
|
| 505 |
+
'--skip-nonbuild-tests',
|
| 506 |
+
action='store_true',
|
| 507 |
+
help='Only do build tests.',
|
| 508 |
+
default=False)
|
| 509 |
+
args = parser.parse_args()
|
| 510 |
+
|
| 511 |
+
if args.all_files:
|
| 512 |
+
relevant_files = get_all_files()
|
| 513 |
+
else:
|
| 514 |
+
relevant_files = get_changed_files()
|
| 515 |
+
|
| 516 |
+
os.chdir(_SRC_ROOT)
|
| 517 |
+
|
| 518 |
+
# Do one specific check if the user asked for it.
|
| 519 |
+
if args.command == 'format':
|
| 520 |
+
success = yapf(relevant_files, False)
|
| 521 |
+
return bool_to_returncode(success)
|
| 522 |
+
|
| 523 |
+
if args.command == 'lint':
|
| 524 |
+
success = lint()
|
| 525 |
+
return bool_to_returncode(success)
|
| 526 |
+
|
| 527 |
+
if args.command == 'license':
|
| 528 |
+
success = check_license(relevant_files)
|
| 529 |
+
return bool_to_returncode(success)
|
| 530 |
+
|
| 531 |
+
if args.command == 'infra-tests':
|
| 532 |
+
success = run_tests(relevant_files,
|
| 533 |
+
parallel=args.parallel,
|
| 534 |
+
build_tests=(not args.skip_build_tests),
|
| 535 |
+
nonbuild_tests=(not args.skip_nonbuild_tests))
|
| 536 |
+
return bool_to_returncode(success)
|
| 537 |
+
|
| 538 |
+
if args.command == 'systemsan-tests':
|
| 539 |
+
success = run_systemsan_tests(relevant_files)
|
| 540 |
+
return bool_to_returncode(success)
|
| 541 |
+
|
| 542 |
+
# Do all the checks (but no tests).
|
| 543 |
+
success = do_checks(relevant_files)
|
| 544 |
+
|
| 545 |
+
return bool_to_returncode(success)
|
| 546 |
+
|
| 547 |
+
|
| 548 |
+
if __name__ == '__main__':
|
| 549 |
+
sys.exit(main())
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/pytest.ini
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[pytest]
|
| 2 |
+
python_files = *_test.py
|
| 3 |
+
log_cli = true
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/repo_manager_test.py
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Test the functionality of the RepoManager class."""
|
| 15 |
+
|
| 16 |
+
import contextlib
|
| 17 |
+
import os
|
| 18 |
+
import tempfile
|
| 19 |
+
import unittest
|
| 20 |
+
from unittest import mock
|
| 21 |
+
|
| 22 |
+
import repo_manager
|
| 23 |
+
import utils
|
| 24 |
+
|
| 25 |
+
# pylint: disable=protected-access
|
| 26 |
+
|
| 27 |
+
OSS_FUZZ_REPO_URL = 'https://github.com/google/oss-fuzz'
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
@contextlib.contextmanager
|
| 31 |
+
def get_oss_fuzz_repo():
|
| 32 |
+
"""Clones a temporary copy of the OSS-Fuzz repo. Returns the path to the
|
| 33 |
+
repo."""
|
| 34 |
+
repo_name = 'oss-fuzz'
|
| 35 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 36 |
+
repo_manager._clone(OSS_FUZZ_REPO_URL, tmp_dir, repo_name)
|
| 37 |
+
yield os.path.join(tmp_dir, repo_name)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class CloneTest(unittest.TestCase):
|
| 41 |
+
"""Tests the _clone function."""
|
| 42 |
+
|
| 43 |
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
|
| 44 |
+
'INTEGRATION_TESTS=1 not set')
|
| 45 |
+
def test_clone_valid_repo_integration(self):
|
| 46 |
+
"""Integration test that tests the correct location of the git repo."""
|
| 47 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 48 |
+
git_path = os.path.join(oss_fuzz_repo, '.git')
|
| 49 |
+
self.assertTrue(os.path.isdir(git_path))
|
| 50 |
+
|
| 51 |
+
def test_clone_invalid_repo(self):
|
| 52 |
+
"""Tests that cloning an invalid repo will fail."""
|
| 53 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 54 |
+
with self.assertRaises(RuntimeError):
|
| 55 |
+
repo_manager._clone('https://github.com/oss-fuzz-not-real.git', tmp_dir,
|
| 56 |
+
'oss-fuzz')
|
| 57 |
+
|
| 58 |
+
@mock.patch('utils.execute')
|
| 59 |
+
def test_clone_with_username(self, mock_execute): # pylint: disable=no-self-use
|
| 60 |
+
"""Test clone with username."""
|
| 61 |
+
repo_manager._clone('https://github.com/fake/repo.git',
|
| 62 |
+
'/',
|
| 63 |
+
'name',
|
| 64 |
+
username='user',
|
| 65 |
+
password='password')
|
| 66 |
+
mock_execute.assert_called_once_with([
|
| 67 |
+
'git', 'clone', 'https://user:password@github.com/fake/repo.git', 'name'
|
| 68 |
+
],
|
| 69 |
+
location='/',
|
| 70 |
+
check_result=True,
|
| 71 |
+
log_command=False)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
|
| 75 |
+
'INTEGRATION_TESTS=1 not set')
|
| 76 |
+
class RepoManagerCheckoutTest(unittest.TestCase):
|
| 77 |
+
"""Tests the checkout functionality of RepoManager."""
|
| 78 |
+
|
| 79 |
+
def test_checkout_valid_commit(self):
|
| 80 |
+
"""Tests that the git checkout command works."""
|
| 81 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 82 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 83 |
+
commit_to_test = '04ea24ee15bbe46a19e5da6c5f022a2ffdfbdb3b'
|
| 84 |
+
repo_man.checkout_commit(commit_to_test)
|
| 85 |
+
self.assertEqual(commit_to_test, repo_man.get_current_commit())
|
| 86 |
+
|
| 87 |
+
def test_checkout_invalid_commit(self):
|
| 88 |
+
"""Tests that the git checkout invalid commit fails."""
|
| 89 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 90 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 91 |
+
with self.assertRaises(ValueError):
|
| 92 |
+
repo_man.checkout_commit(' ')
|
| 93 |
+
with self.assertRaises(ValueError):
|
| 94 |
+
repo_man.checkout_commit('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
|
| 95 |
+
with self.assertRaises(ValueError):
|
| 96 |
+
repo_man.checkout_commit('not-a-valid-commit')
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
|
| 100 |
+
'INTEGRATION_TESTS=1 not set')
|
| 101 |
+
class RepoManagerGetCommitListTest(unittest.TestCase):
|
| 102 |
+
"""Tests the get_commit_list method of RepoManager."""
|
| 103 |
+
|
| 104 |
+
def test_get_valid_commit_list(self):
|
| 105 |
+
"""Tests an accurate commit list can be retrieved from the repo manager."""
|
| 106 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 107 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 108 |
+
old_commit = '04ea24ee15bbe46a19e5da6c5f022a2ffdfbdb3b'
|
| 109 |
+
new_commit = 'fa662173bfeb3ba08d2e84cefc363be11e6c8463'
|
| 110 |
+
commit_list = [
|
| 111 |
+
'fa662173bfeb3ba08d2e84cefc363be11e6c8463',
|
| 112 |
+
'17035317a44fa89d22fe6846d868d4bf57def78b',
|
| 113 |
+
'97dee00a3c4ce95071c3e061592f5fd577dea886',
|
| 114 |
+
'04ea24ee15bbe46a19e5da6c5f022a2ffdfbdb3b'
|
| 115 |
+
]
|
| 116 |
+
result_list = repo_man.get_commit_list(new_commit, old_commit)
|
| 117 |
+
self.assertListEqual(commit_list, result_list)
|
| 118 |
+
|
| 119 |
+
def test_get_invalid_commit_list(self):
|
| 120 |
+
"""Tests that the proper errors are thrown when invalid commits are
|
| 121 |
+
passed to get_commit_list."""
|
| 122 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 123 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 124 |
+
old_commit = '04ea24ee15bbe46a19e5da6c5f022a2ffdfbdb3b'
|
| 125 |
+
new_commit = 'fa662173bfeb3ba08d2e84cefc363be11e6c8463'
|
| 126 |
+
with self.assertRaises(ValueError):
|
| 127 |
+
repo_man.get_commit_list('fakecommit', new_commit)
|
| 128 |
+
with self.assertRaises(ValueError):
|
| 129 |
+
repo_man.get_commit_list(new_commit, 'fakecommit')
|
| 130 |
+
with self.assertRaises(RuntimeError):
|
| 131 |
+
repo_man.get_commit_list(old_commit, new_commit) # pylint: disable=arguments-out-of-order
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
|
| 135 |
+
'INTEGRATION_TESTS=1 not set')
|
| 136 |
+
class GitDiffTest(unittest.TestCase):
|
| 137 |
+
"""Tests get_git_diff."""
|
| 138 |
+
|
| 139 |
+
def test_diff_exists(self):
|
| 140 |
+
"""Tests that a real diff is returned when a valid repo manager exists."""
|
| 141 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 142 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 143 |
+
with mock.patch.object(utils,
|
| 144 |
+
'execute',
|
| 145 |
+
return_value=('test.py\ndiff.py', None, 0)):
|
| 146 |
+
diff = repo_man.get_git_diff()
|
| 147 |
+
self.assertCountEqual(diff, ['test.py', 'diff.py'])
|
| 148 |
+
|
| 149 |
+
def test_diff_empty(self):
|
| 150 |
+
"""Tests that None is returned when there is no difference between repos."""
|
| 151 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 152 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 153 |
+
with mock.patch.object(utils, 'execute', return_value=('', None, 0)):
|
| 154 |
+
diff = repo_man.get_git_diff()
|
| 155 |
+
self.assertIsNone(diff)
|
| 156 |
+
|
| 157 |
+
def test_error_on_command(self):
|
| 158 |
+
"""Tests that None is returned when the command errors out."""
|
| 159 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 160 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 161 |
+
with mock.patch.object(utils,
|
| 162 |
+
'execute',
|
| 163 |
+
return_value=('', 'Test error.', 1)):
|
| 164 |
+
diff = repo_man.get_git_diff()
|
| 165 |
+
self.assertIsNone(diff)
|
| 166 |
+
|
| 167 |
+
def test_diff_no_change(self):
|
| 168 |
+
"""Tests that None is returned when there is no difference between repos."""
|
| 169 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 170 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 171 |
+
diff = repo_man.get_git_diff()
|
| 172 |
+
self.assertIsNone(diff)
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
|
| 176 |
+
'INTEGRATION_TESTS=1 not set')
|
| 177 |
+
class CheckoutPrIntegrationTest(unittest.TestCase):
|
| 178 |
+
"""Does Integration tests on the checkout_pr method of RepoManager."""
|
| 179 |
+
|
| 180 |
+
def test_pull_request_exists(self):
|
| 181 |
+
"""Tests that a diff is returned when a valid PR is checked out."""
|
| 182 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 183 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 184 |
+
repo_man.checkout_pr('refs/pull/3415/merge')
|
| 185 |
+
diff = repo_man.get_git_diff()
|
| 186 |
+
self.assertCountEqual(diff, ['README.md'])
|
| 187 |
+
|
| 188 |
+
def test_checkout_invalid_pull_request(self):
|
| 189 |
+
"""Tests that the git checkout invalid pull request fails."""
|
| 190 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 191 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 192 |
+
with self.assertRaises(RuntimeError):
|
| 193 |
+
repo_man.checkout_pr(' ')
|
| 194 |
+
with self.assertRaises(RuntimeError):
|
| 195 |
+
repo_man.checkout_pr('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
|
| 196 |
+
with self.assertRaises(RuntimeError):
|
| 197 |
+
repo_man.checkout_pr('not/a/valid/pr')
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
if __name__ == '__main__':
|
| 201 |
+
unittest.main()
|
local-test-commons-compress-full-01-vuln_5/fuzz-tooling/infra/utils_test.py
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Tests the functionality of the utils module's functions"""
|
| 15 |
+
|
| 16 |
+
import os
|
| 17 |
+
import tempfile
|
| 18 |
+
import unittest
|
| 19 |
+
from unittest import mock
|
| 20 |
+
|
| 21 |
+
import utils
|
| 22 |
+
import helper
|
| 23 |
+
|
| 24 |
+
EXAMPLE_PROJECT = 'example'
|
| 25 |
+
|
| 26 |
+
TEST_OUT_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
| 27 |
+
'cifuzz', 'test_data', 'build-out')
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class IsFuzzTargetLocalTest(unittest.TestCase):
|
| 31 |
+
"""Tests the is_fuzz_target_local function."""
|
| 32 |
+
|
| 33 |
+
def test_invalid_filepath(self):
|
| 34 |
+
"""Tests the function with an invalid file path."""
|
| 35 |
+
is_local = utils.is_fuzz_target_local('not/a/real/file')
|
| 36 |
+
self.assertFalse(is_local)
|
| 37 |
+
is_local = utils.is_fuzz_target_local('')
|
| 38 |
+
self.assertFalse(is_local)
|
| 39 |
+
is_local = utils.is_fuzz_target_local(' ')
|
| 40 |
+
self.assertFalse(is_local)
|
| 41 |
+
|
| 42 |
+
def test_valid_filepath(self):
|
| 43 |
+
"""Checks is_fuzz_target_local function with a valid filepath."""
|
| 44 |
+
|
| 45 |
+
is_local = utils.is_fuzz_target_local(
|
| 46 |
+
os.path.join(TEST_OUT_DIR, 'example_crash_fuzzer'))
|
| 47 |
+
self.assertTrue(is_local)
|
| 48 |
+
is_local = utils.is_fuzz_target_local(TEST_OUT_DIR)
|
| 49 |
+
self.assertFalse(is_local)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class GetFuzzTargetsTest(unittest.TestCase):
|
| 53 |
+
"""Tests the get_fuzz_targets function."""
|
| 54 |
+
|
| 55 |
+
def test_valid_filepath(self):
|
| 56 |
+
"""Tests that fuzz targets can be retrieved once the fuzzers are built."""
|
| 57 |
+
fuzz_targets = utils.get_fuzz_targets(TEST_OUT_DIR)
|
| 58 |
+
crash_fuzzer_path = os.path.join(TEST_OUT_DIR, 'example_crash_fuzzer')
|
| 59 |
+
nocrash_fuzzer_path = os.path.join(TEST_OUT_DIR, 'example_nocrash_fuzzer')
|
| 60 |
+
self.assertCountEqual(fuzz_targets,
|
| 61 |
+
[crash_fuzzer_path, nocrash_fuzzer_path])
|
| 62 |
+
|
| 63 |
+
# Testing on a arbitrary directory with no fuzz targets in it.
|
| 64 |
+
fuzz_targets = utils.get_fuzz_targets(
|
| 65 |
+
os.path.join(helper.OSS_FUZZ_DIR, 'infra', 'travis'))
|
| 66 |
+
self.assertFalse(fuzz_targets)
|
| 67 |
+
|
| 68 |
+
def test_invalid_filepath(self):
|
| 69 |
+
"""Tests what get_fuzz_targets return when invalid filepath is used."""
|
| 70 |
+
fuzz_targets = utils.get_fuzz_targets('not/a/valid/file/path')
|
| 71 |
+
self.assertFalse(fuzz_targets)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class ExecuteTest(unittest.TestCase):
|
| 75 |
+
"""Tests the execute function."""
|
| 76 |
+
|
| 77 |
+
def test_valid_command(self):
|
| 78 |
+
"""Tests that execute can produce valid output."""
|
| 79 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 80 |
+
out, err, err_code = utils.execute(['ls', '.'],
|
| 81 |
+
location=tmp_dir,
|
| 82 |
+
check_result=False)
|
| 83 |
+
self.assertEqual(err_code, 0)
|
| 84 |
+
self.assertEqual(err, '')
|
| 85 |
+
self.assertEqual(out, '')
|
| 86 |
+
out, err, err_code = utils.execute(['mkdir', 'tmp'],
|
| 87 |
+
location=tmp_dir,
|
| 88 |
+
check_result=False)
|
| 89 |
+
self.assertEqual(err_code, 0)
|
| 90 |
+
self.assertEqual(err, '')
|
| 91 |
+
self.assertEqual(out, '')
|
| 92 |
+
out, err, err_code = utils.execute(['ls', '.'],
|
| 93 |
+
location=tmp_dir,
|
| 94 |
+
check_result=False)
|
| 95 |
+
self.assertEqual(err_code, 0)
|
| 96 |
+
self.assertEqual(err, '')
|
| 97 |
+
self.assertEqual(out, 'tmp\n')
|
| 98 |
+
|
| 99 |
+
def test_error_command(self):
|
| 100 |
+
"""Tests that execute can correctly surface errors."""
|
| 101 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 102 |
+
out, err, err_code = utils.execute(['ls', 'notarealdir'],
|
| 103 |
+
location=tmp_dir,
|
| 104 |
+
check_result=False)
|
| 105 |
+
self.assertEqual(err_code, 2)
|
| 106 |
+
self.assertIsNotNone(err)
|
| 107 |
+
self.assertEqual(out, '')
|
| 108 |
+
with self.assertRaises(RuntimeError):
|
| 109 |
+
out, err, err_code = utils.execute(['ls', 'notarealdir'],
|
| 110 |
+
location=tmp_dir,
|
| 111 |
+
check_result=True)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
class BinaryPrintTest(unittest.TestCase):
|
| 115 |
+
"""Tests for utils.binary_print."""
|
| 116 |
+
|
| 117 |
+
@unittest.skip('Causes spurious failures because of side-effects.')
|
| 118 |
+
def test_string(self): # pylint: disable=no-self-use
|
| 119 |
+
"""Tests that utils.binary_print can print a regular string."""
|
| 120 |
+
# Should execute without raising any exceptions.
|
| 121 |
+
with mock.patch('sys.stdout.buffer.write') as mock_write:
|
| 122 |
+
utils.binary_print('hello')
|
| 123 |
+
mock_write.assert_called_with('hello\n')
|
| 124 |
+
|
| 125 |
+
@unittest.skip('Causes spurious failures because of side-effects.')
|
| 126 |
+
def test_binary_string(self): # pylint: disable=no-self-use
|
| 127 |
+
"""Tests that utils.binary_print can print a bianry string."""
|
| 128 |
+
# Should execute without raising any exceptions.
|
| 129 |
+
with mock.patch('sys.stdout.buffer.write') as mock_write:
|
| 130 |
+
utils.binary_print(b'hello')
|
| 131 |
+
mock_write.assert_called_with(b'hello\n')
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
class CommandToStringTest(unittest.TestCase):
|
| 135 |
+
"""Tests for command_to_string."""
|
| 136 |
+
|
| 137 |
+
def test_string(self):
|
| 138 |
+
"""Tests that command_to_string returns the argument passed to it when it is
|
| 139 |
+
passed a string."""
|
| 140 |
+
command = 'command'
|
| 141 |
+
self.assertEqual(utils.command_to_string(command), command)
|
| 142 |
+
|
| 143 |
+
def test_list(self):
|
| 144 |
+
"""Tests that command_to_string returns the correct stringwhen it is passed
|
| 145 |
+
a list."""
|
| 146 |
+
command = ['command', 'arg1', 'arg2']
|
| 147 |
+
self.assertEqual(utils.command_to_string(command), 'command arg1 arg2')
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
if __name__ == '__main__':
|
| 151 |
+
unittest.main()
|