Add files using upload-large-folder tool
Browse files- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_winrar/file_to_compare_1 +1297 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/file_to_compare_1 +38 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/file_to_compare_2 +79 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/COMPRESS-626/compress-626-pack200.jar +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/COMPRESS-644/ARW05UP.ICO +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/COMPRESS-661/testARofText.ar +5 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_group-fail.ar +8 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_length-fail.ar +8 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_bsd-fail.ar +5 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_gnu1-fail.ar +8 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_gnu2-fail.ar +6 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_gnu3-fail.ar +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_modified-fail.ar +8 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_table_length_gnu-fail.ar +8 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_user-fail.ar +8 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/arj/zero_sized_headers-fail.arj +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/cpio/bad_long_value.cpio +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/dump/directory_null_bytes-fail.dump +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/dump/invalid_compression_type-fail.dump +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/fuzz/crash-f2efd9eaeb86cda597d07b5e3c3d81363633c2da +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-daemon/procrunr.ico +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-daemon/procruns.ico +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-daemon/procrunw.ico +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-imaging/OutOfMemory_epine.ico +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/cpfloat_oom.pack +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/cputf8_oom.pack +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/favoured_oom.pack +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/filebits_oom.pack +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/flags_oom.pack +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/segment_header_oom.pack +0 -0
- local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/signatures_oom.pack +0 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/docs/.gitignore +5 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/docs/_config.yml +40 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/docs/glossary.md +1 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/docs/index.md +88 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/MAINTAINERS.csv +7 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/README.md +23 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/build_specified_commit.py +410 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/build_specified_commit_test.py +126 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/constants.py +49 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/helper.py +1810 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/helper_test.py +239 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/presubmit.py +549 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/pytest.ini +3 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/repo_manager.py +272 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/repo_manager_test.py +201 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/run_fuzzers.Dockerfile +31 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/templates.py +119 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/test +1 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/test_repos.py +84 -0
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_winrar/file_to_compare_1
ADDED
|
@@ -0,0 +1,1297 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/*
|
| 2 |
+
* Licensed to the Apache Software Foundation (ASF) under one
|
| 3 |
+
* or more contributor license agreements. See the NOTICE file
|
| 4 |
+
* distributed with this work for additional information
|
| 5 |
+
* regarding copyright ownership. The ASF licenses this file
|
| 6 |
+
* to you under the Apache License, Version 2.0 (the
|
| 7 |
+
* "License"); you may not use this file except in compliance
|
| 8 |
+
* with the License. You may obtain a copy of the License at
|
| 9 |
+
*
|
| 10 |
+
* http://www.apache.org/licenses/LICENSE-2.0
|
| 11 |
+
*
|
| 12 |
+
* Unless required by applicable law or agreed to in writing,
|
| 13 |
+
* software distributed under the License is distributed on an
|
| 14 |
+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
| 15 |
+
* KIND, either express or implied. See the License for the
|
| 16 |
+
* specific language governing permissions and limitations
|
| 17 |
+
* under the License.
|
| 18 |
+
*/
|
| 19 |
+
package org.apache.commons.compress.archivers.zip;
|
| 20 |
+
|
| 21 |
+
import java.io.ByteArrayInputStream;
|
| 22 |
+
import java.io.ByteArrayOutputStream;
|
| 23 |
+
import java.io.EOFException;
|
| 24 |
+
import java.io.IOException;
|
| 25 |
+
import java.io.InputStream;
|
| 26 |
+
import java.io.PushbackInputStream;
|
| 27 |
+
import java.math.BigInteger;
|
| 28 |
+
import java.nio.ByteBuffer;
|
| 29 |
+
import java.util.Arrays;
|
| 30 |
+
import java.util.zip.CRC32;
|
| 31 |
+
import java.util.zip.DataFormatException;
|
| 32 |
+
import java.util.zip.Inflater;
|
| 33 |
+
import java.util.zip.ZipEntry;
|
| 34 |
+
import java.util.zip.ZipException;
|
| 35 |
+
|
| 36 |
+
import org.apache.commons.compress.archivers.ArchiveEntry;
|
| 37 |
+
import org.apache.commons.compress.archivers.ArchiveInputStream;
|
| 38 |
+
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
|
| 39 |
+
import org.apache.commons.compress.compressors.deflate64.Deflate64CompressorInputStream;
|
| 40 |
+
import org.apache.commons.compress.utils.ArchiveUtils;
|
| 41 |
+
import org.apache.commons.compress.utils.IOUtils;
|
| 42 |
+
import org.apache.commons.compress.utils.InputStreamStatistics;
|
| 43 |
+
|
| 44 |
+
import static org.apache.commons.compress.archivers.zip.ZipConstants.DWORD;
|
| 45 |
+
import static org.apache.commons.compress.archivers.zip.ZipConstants.SHORT;
|
| 46 |
+
import static org.apache.commons.compress.archivers.zip.ZipConstants.WORD;
|
| 47 |
+
import static org.apache.commons.compress.archivers.zip.ZipConstants.ZIP64_MAGIC;
|
| 48 |
+
|
| 49 |
+
/**
|
| 50 |
+
* Implements an input stream that can read Zip archives.
|
| 51 |
+
*
|
| 52 |
+
* <p>As of Apache Commons Compress it transparently supports Zip64
|
| 53 |
+
* extensions and thus individual entries and archives larger than 4
|
| 54 |
+
* GB or with more than 65536 entries.</p>
|
| 55 |
+
*
|
| 56 |
+
* <p>The {@link ZipFile} class is preferred when reading from files
|
| 57 |
+
* as {@link ZipArchiveInputStream} is limited by not being able to
|
| 58 |
+
* read the central directory header before returning entries. In
|
| 59 |
+
* particular {@link ZipArchiveInputStream}</p>
|
| 60 |
+
*
|
| 61 |
+
* <ul>
|
| 62 |
+
*
|
| 63 |
+
* <li>may return entries that are not part of the central directory
|
| 64 |
+
* at all and shouldn't be considered part of the archive.</li>
|
| 65 |
+
*
|
| 66 |
+
* <li>may return several entries with the same name.</li>
|
| 67 |
+
*
|
| 68 |
+
* <li>will not return internal or external attributes.</li>
|
| 69 |
+
*
|
| 70 |
+
* <li>may return incomplete extra field data.</li>
|
| 71 |
+
*
|
| 72 |
+
* <li>may return unknown sizes and CRC values for entries until the
|
| 73 |
+
* next entry has been reached if the archive uses the data
|
| 74 |
+
* descriptor feature.</li>
|
| 75 |
+
*
|
| 76 |
+
* </ul>
|
| 77 |
+
*
|
| 78 |
+
* @see ZipFile
|
| 79 |
+
* @NotThreadSafe
|
| 80 |
+
*/
|
| 81 |
+
public class ZipArchiveInputStream extends ArchiveInputStream implements InputStreamStatistics {
|
| 82 |
+
|
| 83 |
+
/** The zip encoding to use for file names and the file comment. */
|
| 84 |
+
private final ZipEncoding zipEncoding;
|
| 85 |
+
|
| 86 |
+
// the provided encoding (for unit tests)
|
| 87 |
+
final String encoding;
|
| 88 |
+
|
| 89 |
+
/** Whether to look for and use Unicode extra fields. */
|
| 90 |
+
private final boolean useUnicodeExtraFields;
|
| 91 |
+
|
| 92 |
+
/** Wrapped stream, will always be a PushbackInputStream. */
|
| 93 |
+
private final InputStream in;
|
| 94 |
+
|
| 95 |
+
/** Inflater used for all deflated entries. */
|
| 96 |
+
private final Inflater inf = new Inflater(true);
|
| 97 |
+
|
| 98 |
+
/** Buffer used to read from the wrapped stream. */
|
| 99 |
+
private final ByteBuffer buf = ByteBuffer.allocate(ZipArchiveOutputStream.BUFFER_SIZE);
|
| 100 |
+
|
| 101 |
+
/** The entry that is currently being read. */
|
| 102 |
+
private CurrentEntry current = null;
|
| 103 |
+
|
| 104 |
+
/** Whether the stream has been closed. */
|
| 105 |
+
private boolean closed = false;
|
| 106 |
+
|
| 107 |
+
/** Whether the stream has reached the central directory - and thus found all entries. */
|
| 108 |
+
private boolean hitCentralDirectory = false;
|
| 109 |
+
|
| 110 |
+
/**
|
| 111 |
+
* When reading a stored entry that uses the data descriptor this
|
| 112 |
+
* stream has to read the full entry and caches it. This is the
|
| 113 |
+
* cache.
|
| 114 |
+
*/
|
| 115 |
+
private ByteArrayInputStream lastStoredEntry = null;
|
| 116 |
+
|
| 117 |
+
/** Whether the stream will try to read STORED entries that use a data descriptor. */
|
| 118 |
+
private boolean allowStoredEntriesWithDataDescriptor = false;
|
| 119 |
+
|
| 120 |
+
/** Count decompressed bytes for current entry */
|
| 121 |
+
private long uncompressedCount = 0;
|
| 122 |
+
|
| 123 |
+
private static final int LFH_LEN = 30;
|
| 124 |
+
/*
|
| 125 |
+
local file header signature WORD
|
| 126 |
+
version needed to extract SHORT
|
| 127 |
+
general purpose bit flag SHORT
|
| 128 |
+
compression method SHORT
|
| 129 |
+
last mod file time SHORT
|
| 130 |
+
last mod file date SHORT
|
| 131 |
+
crc-32 WORD
|
| 132 |
+
compressed size WORD
|
| 133 |
+
uncompressed size WORD
|
| 134 |
+
file name length SHORT
|
| 135 |
+
extra field length SHORT
|
| 136 |
+
*/
|
| 137 |
+
|
| 138 |
+
private static final int CFH_LEN = 46;
|
| 139 |
+
/*
|
| 140 |
+
central file header signature WORD
|
| 141 |
+
version made by SHORT
|
| 142 |
+
version needed to extract SHORT
|
| 143 |
+
general purpose bit flag SHORT
|
| 144 |
+
compression method SHORT
|
| 145 |
+
last mod file time SHORT
|
| 146 |
+
last mod file date SHORT
|
| 147 |
+
crc-32 WORD
|
| 148 |
+
compressed size WORD
|
| 149 |
+
uncompressed size WORD
|
| 150 |
+
file name length SHORT
|
| 151 |
+
extra field length SHORT
|
| 152 |
+
file comment length SHORT
|
| 153 |
+
disk number start SHORT
|
| 154 |
+
internal file attributes SHORT
|
| 155 |
+
external file attributes WORD
|
| 156 |
+
relative offset of local header WORD
|
| 157 |
+
*/
|
| 158 |
+
|
| 159 |
+
private static final long TWO_EXP_32 = ZIP64_MAGIC + 1;
|
| 160 |
+
|
| 161 |
+
// cached buffers - must only be used locally in the class (COMPRESS-172 - reduce garbage collection)
|
| 162 |
+
private final byte[] lfhBuf = new byte[LFH_LEN];
|
| 163 |
+
private final byte[] skipBuf = new byte[1024];
|
| 164 |
+
private final byte[] shortBuf = new byte[SHORT];
|
| 165 |
+
private final byte[] wordBuf = new byte[WORD];
|
| 166 |
+
private final byte[] twoDwordBuf = new byte[2 * DWORD];
|
| 167 |
+
|
| 168 |
+
private int entriesRead = 0;
|
| 169 |
+
|
| 170 |
+
/**
|
| 171 |
+
* Create an instance using UTF-8 encoding
|
| 172 |
+
* @param inputStream the stream to wrap
|
| 173 |
+
*/
|
| 174 |
+
public ZipArchiveInputStream(final InputStream inputStream) {
|
| 175 |
+
this(inputStream, ZipEncodingHelper.UTF8);
|
| 176 |
+
}
|
| 177 |
+
|
| 178 |
+
/**
|
| 179 |
+
* Create an instance using the specified encoding
|
| 180 |
+
* @param inputStream the stream to wrap
|
| 181 |
+
* @param encoding the encoding to use for file names, use null
|
| 182 |
+
* for the platform's default encoding
|
| 183 |
+
* @since 1.5
|
| 184 |
+
*/
|
| 185 |
+
public ZipArchiveInputStream(final InputStream inputStream, final String encoding) {
|
| 186 |
+
this(inputStream, encoding, true);
|
| 187 |
+
}
|
| 188 |
+
|
| 189 |
+
/**
|
| 190 |
+
* Create an instance using the specified encoding
|
| 191 |
+
* @param inputStream the stream to wrap
|
| 192 |
+
* @param encoding the encoding to use for file names, use null
|
| 193 |
+
* for the platform's default encoding
|
| 194 |
+
* @param useUnicodeExtraFields whether to use InfoZIP Unicode
|
| 195 |
+
* Extra Fields (if present) to set the file names.
|
| 196 |
+
*/
|
| 197 |
+
public ZipArchiveInputStream(final InputStream inputStream, final String encoding, final boolean useUnicodeExtraFields) {
|
| 198 |
+
this(inputStream, encoding, useUnicodeExtraFields, false);
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
/**
|
| 202 |
+
* Create an instance using the specified encoding
|
| 203 |
+
* @param inputStream the stream to wrap
|
| 204 |
+
* @param encoding the encoding to use for file names, use null
|
| 205 |
+
* for the platform's default encoding
|
| 206 |
+
* @param useUnicodeExtraFields whether to use InfoZIP Unicode
|
| 207 |
+
* Extra Fields (if present) to set the file names.
|
| 208 |
+
* @param allowStoredEntriesWithDataDescriptor whether the stream
|
| 209 |
+
* will try to read STORED entries that use a data descriptor
|
| 210 |
+
* @since 1.1
|
| 211 |
+
*/
|
| 212 |
+
public ZipArchiveInputStream(final InputStream inputStream,
|
| 213 |
+
final String encoding,
|
| 214 |
+
final boolean useUnicodeExtraFields,
|
| 215 |
+
final boolean allowStoredEntriesWithDataDescriptor) {
|
| 216 |
+
this.encoding = encoding;
|
| 217 |
+
zipEncoding = ZipEncodingHelper.getZipEncoding(encoding);
|
| 218 |
+
this.useUnicodeExtraFields = useUnicodeExtraFields;
|
| 219 |
+
in = new PushbackInputStream(inputStream, buf.capacity());
|
| 220 |
+
this.allowStoredEntriesWithDataDescriptor =
|
| 221 |
+
allowStoredEntriesWithDataDescriptor;
|
| 222 |
+
// haven't read anything so far
|
| 223 |
+
buf.limit(0);
|
| 224 |
+
}
|
| 225 |
+
|
| 226 |
+
public ZipArchiveEntry getNextZipEntry() throws IOException {
|
| 227 |
+
uncompressedCount = 0;
|
| 228 |
+
|
| 229 |
+
boolean firstEntry = true;
|
| 230 |
+
if (closed || hitCentralDirectory) {
|
| 231 |
+
return null;
|
| 232 |
+
}
|
| 233 |
+
if (current != null) {
|
| 234 |
+
closeEntry();
|
| 235 |
+
firstEntry = false;
|
| 236 |
+
}
|
| 237 |
+
|
| 238 |
+
long currentHeaderOffset = getBytesRead();
|
| 239 |
+
try {
|
| 240 |
+
if (firstEntry) {
|
| 241 |
+
// split archives have a special signature before the
|
| 242 |
+
// first local file header - look for it and fail with
|
| 243 |
+
// the appropriate error message if this is a split
|
| 244 |
+
// archive.
|
| 245 |
+
readFirstLocalFileHeader(lfhBuf);
|
| 246 |
+
} else {
|
| 247 |
+
readFully(lfhBuf);
|
| 248 |
+
}
|
| 249 |
+
} catch (final EOFException e) { //NOSONAR
|
| 250 |
+
return null;
|
| 251 |
+
}
|
| 252 |
+
|
| 253 |
+
final ZipLong sig = new ZipLong(lfhBuf);
|
| 254 |
+
if (!sig.equals(ZipLong.LFH_SIG)) {
|
| 255 |
+
if (sig.equals(ZipLong.CFH_SIG) || sig.equals(ZipLong.AED_SIG) || isApkSigningBlock(lfhBuf)) {
|
| 256 |
+
hitCentralDirectory = true;
|
| 257 |
+
skipRemainderOfArchive();
|
| 258 |
+
return null;
|
| 259 |
+
}
|
| 260 |
+
throw new ZipException(String.format("Unexpected record signature: 0X%X", sig.getValue()));
|
| 261 |
+
}
|
| 262 |
+
|
| 263 |
+
int off = WORD;
|
| 264 |
+
current = new CurrentEntry();
|
| 265 |
+
|
| 266 |
+
final int versionMadeBy = ZipShort.getValue(lfhBuf, off);
|
| 267 |
+
off += SHORT;
|
| 268 |
+
current.entry.setPlatform((versionMadeBy >> ZipFile.BYTE_SHIFT) & ZipFile.NIBLET_MASK);
|
| 269 |
+
|
| 270 |
+
final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(lfhBuf, off);
|
| 271 |
+
final boolean hasUTF8Flag = gpFlag.usesUTF8ForNames();
|
| 272 |
+
final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding;
|
| 273 |
+
current.hasDataDescriptor = gpFlag.usesDataDescriptor();
|
| 274 |
+
current.entry.setGeneralPurposeBit(gpFlag);
|
| 275 |
+
|
| 276 |
+
off += SHORT;
|
| 277 |
+
|
| 278 |
+
current.entry.setMethod(ZipShort.getValue(lfhBuf, off));
|
| 279 |
+
off += SHORT;
|
| 280 |
+
|
| 281 |
+
final long time = ZipUtil.dosToJavaTime(ZipLong.getValue(lfhBuf, off));
|
| 282 |
+
current.entry.setTime(time);
|
| 283 |
+
off += WORD;
|
| 284 |
+
|
| 285 |
+
ZipLong size = null, cSize = null;
|
| 286 |
+
if (!current.hasDataDescriptor) {
|
| 287 |
+
current.entry.setCrc(ZipLong.getValue(lfhBuf, off));
|
| 288 |
+
off += WORD;
|
| 289 |
+
|
| 290 |
+
cSize = new ZipLong(lfhBuf, off);
|
| 291 |
+
off += WORD;
|
| 292 |
+
|
| 293 |
+
size = new ZipLong(lfhBuf, off);
|
| 294 |
+
off += WORD;
|
| 295 |
+
} else {
|
| 296 |
+
off += 3 * WORD;
|
| 297 |
+
}
|
| 298 |
+
|
| 299 |
+
final int fileNameLen = ZipShort.getValue(lfhBuf, off);
|
| 300 |
+
|
| 301 |
+
off += SHORT;
|
| 302 |
+
|
| 303 |
+
final int extraLen = ZipShort.getValue(lfhBuf, off);
|
| 304 |
+
off += SHORT; // NOSONAR - assignment as documentation
|
| 305 |
+
|
| 306 |
+
final byte[] fileName = new byte[fileNameLen];
|
| 307 |
+
readFully(fileName);
|
| 308 |
+
current.entry.setName(entryEncoding.decode(fileName), fileName);
|
| 309 |
+
if (hasUTF8Flag) {
|
| 310 |
+
current.entry.setNameSource(ZipArchiveEntry.NameSource.NAME_WITH_EFS_FLAG);
|
| 311 |
+
}
|
| 312 |
+
|
| 313 |
+
final byte[] extraData = new byte[extraLen];
|
| 314 |
+
readFully(extraData);
|
| 315 |
+
current.entry.setExtra(extraData);
|
| 316 |
+
|
| 317 |
+
if (!hasUTF8Flag && useUnicodeExtraFields) {
|
| 318 |
+
ZipUtil.setNameAndCommentFromExtraFields(current.entry, fileName, null);
|
| 319 |
+
}
|
| 320 |
+
|
| 321 |
+
processZip64Extra(size, cSize);
|
| 322 |
+
|
| 323 |
+
current.entry.setLocalHeaderOffset(currentHeaderOffset);
|
| 324 |
+
current.entry.setDataOffset(getBytesRead());
|
| 325 |
+
current.entry.setStreamContiguous(true);
|
| 326 |
+
|
| 327 |
+
ZipMethod m = ZipMethod.getMethodByCode(current.entry.getMethod());
|
| 328 |
+
if (current.entry.getCompressedSize() != ArchiveEntry.SIZE_UNKNOWN) {
|
| 329 |
+
if (ZipUtil.canHandleEntryData(current.entry) && m != ZipMethod.STORED && m != ZipMethod.DEFLATED) {
|
| 330 |
+
InputStream bis = new BoundedInputStream(in, current.entry.getCompressedSize());
|
| 331 |
+
switch (m) {
|
| 332 |
+
case UNSHRINKING:
|
| 333 |
+
current.in = new UnshrinkingInputStream(bis);
|
| 334 |
+
break;
|
| 335 |
+
case IMPLODING:
|
| 336 |
+
current.in = new ExplodingInputStream(
|
| 337 |
+
current.entry.getGeneralPurposeBit().getSlidingDictionarySize(),
|
| 338 |
+
current.entry.getGeneralPurposeBit().getNumberOfShannonFanoTrees(),
|
| 339 |
+
bis);
|
| 340 |
+
break;
|
| 341 |
+
case BZIP2:
|
| 342 |
+
current.in = new BZip2CompressorInputStream(bis);
|
| 343 |
+
break;
|
| 344 |
+
case ENHANCED_DEFLATED:
|
| 345 |
+
current.in = new Deflate64CompressorInputStream(bis);
|
| 346 |
+
break;
|
| 347 |
+
default:
|
| 348 |
+
// we should never get here as all supported methods have been covered
|
| 349 |
+
// will cause an error when read is invoked, don't throw an exception here so people can
|
| 350 |
+
// skip unsupported entries
|
| 351 |
+
break;
|
| 352 |
+
}
|
| 353 |
+
}
|
| 354 |
+
} else if (m == ZipMethod.ENHANCED_DEFLATED) {
|
| 355 |
+
current.in = new Deflate64CompressorInputStream(in);
|
| 356 |
+
}
|
| 357 |
+
|
| 358 |
+
entriesRead++;
|
| 359 |
+
return current.entry;
|
| 360 |
+
}
|
| 361 |
+
|
| 362 |
+
/**
|
| 363 |
+
* Fills the given array with the first local file header and
|
| 364 |
+
* deals with splitting/spanning markers that may prefix the first
|
| 365 |
+
* LFH.
|
| 366 |
+
*/
|
| 367 |
+
private void readFirstLocalFileHeader(final byte[] lfh) throws IOException {
|
| 368 |
+
readFully(lfh);
|
| 369 |
+
final ZipLong sig = new ZipLong(lfh);
|
| 370 |
+
if (sig.equals(ZipLong.DD_SIG)) {
|
| 371 |
+
throw new UnsupportedZipFeatureException(UnsupportedZipFeatureException.Feature.SPLITTING);
|
| 372 |
+
}
|
| 373 |
+
|
| 374 |
+
if (sig.equals(ZipLong.SINGLE_SEGMENT_SPLIT_MARKER)) {
|
| 375 |
+
// The archive is not really split as only one segment was
|
| 376 |
+
// needed in the end. Just skip over the marker.
|
| 377 |
+
final byte[] missedLfhBytes = new byte[4];
|
| 378 |
+
readFully(missedLfhBytes);
|
| 379 |
+
System.arraycopy(lfh, 4, lfh, 0, LFH_LEN - 4);
|
| 380 |
+
System.arraycopy(missedLfhBytes, 0, lfh, LFH_LEN - 4, 4);
|
| 381 |
+
}
|
| 382 |
+
}
|
| 383 |
+
|
| 384 |
+
/**
|
| 385 |
+
* Records whether a Zip64 extra is present and sets the size
|
| 386 |
+
* information from it if sizes are 0xFFFFFFFF and the entry
|
| 387 |
+
* doesn't use a data descriptor.
|
| 388 |
+
*/
|
| 389 |
+
private void processZip64Extra(final ZipLong size, final ZipLong cSize) {
|
| 390 |
+
final Zip64ExtendedInformationExtraField z64 =
|
| 391 |
+
(Zip64ExtendedInformationExtraField)
|
| 392 |
+
current.entry.getExtraField(Zip64ExtendedInformationExtraField.HEADER_ID);
|
| 393 |
+
current.usesZip64 = z64 != null;
|
| 394 |
+
if (!current.hasDataDescriptor) {
|
| 395 |
+
if (z64 != null // same as current.usesZip64 but avoids NPE warning
|
| 396 |
+
&& (ZipLong.ZIP64_MAGIC.equals(cSize) || ZipLong.ZIP64_MAGIC.equals(size)) ) {
|
| 397 |
+
current.entry.setCompressedSize(z64.getCompressedSize().getLongValue());
|
| 398 |
+
current.entry.setSize(z64.getSize().getLongValue());
|
| 399 |
+
} else if (cSize != null && size != null) {
|
| 400 |
+
current.entry.setCompressedSize(cSize.getValue());
|
| 401 |
+
current.entry.setSize(size.getValue());
|
| 402 |
+
}
|
| 403 |
+
}
|
| 404 |
+
}
|
| 405 |
+
|
| 406 |
+
@Override
|
| 407 |
+
public ArchiveEntry getNextEntry() throws IOException {
|
| 408 |
+
return getNextZipEntry();
|
| 409 |
+
}
|
| 410 |
+
|
| 411 |
+
/**
|
| 412 |
+
* Whether this class is able to read the given entry.
|
| 413 |
+
*
|
| 414 |
+
* <p>May return false if it is set up to use encryption or a
|
| 415 |
+
* compression method that hasn't been implemented yet.</p>
|
| 416 |
+
* @since 1.1
|
| 417 |
+
*/
|
| 418 |
+
@Override
|
| 419 |
+
public boolean canReadEntryData(final ArchiveEntry ae) {
|
| 420 |
+
if (ae instanceof ZipArchiveEntry) {
|
| 421 |
+
final ZipArchiveEntry ze = (ZipArchiveEntry) ae;
|
| 422 |
+
return ZipUtil.canHandleEntryData(ze)
|
| 423 |
+
&& supportsDataDescriptorFor(ze)
|
| 424 |
+
&& supportsCompressedSizeFor(ze);
|
| 425 |
+
}
|
| 426 |
+
return false;
|
| 427 |
+
}
|
| 428 |
+
|
| 429 |
+
@Override
|
| 430 |
+
public int read(final byte[] buffer, final int offset, final int length) throws IOException {
|
| 431 |
+
if (length == 0) {
|
| 432 |
+
return 0;
|
| 433 |
+
}
|
| 434 |
+
if (closed) {
|
| 435 |
+
throw new IOException("The stream is closed");
|
| 436 |
+
}
|
| 437 |
+
|
| 438 |
+
if (current == null) {
|
| 439 |
+
return -1;
|
| 440 |
+
}
|
| 441 |
+
|
| 442 |
+
// avoid int overflow, check null buffer
|
| 443 |
+
if (offset > buffer.length || length < 0 || offset < 0 || buffer.length - offset < length) {
|
| 444 |
+
throw new ArrayIndexOutOfBoundsException();
|
| 445 |
+
}
|
| 446 |
+
|
| 447 |
+
ZipUtil.checkRequestedFeatures(current.entry);
|
| 448 |
+
if (!supportsDataDescriptorFor(current.entry)) {
|
| 449 |
+
throw new UnsupportedZipFeatureException(UnsupportedZipFeatureException.Feature.DATA_DESCRIPTOR,
|
| 450 |
+
current.entry);
|
| 451 |
+
}
|
| 452 |
+
if (!supportsCompressedSizeFor(current.entry)) {
|
| 453 |
+
throw new UnsupportedZipFeatureException(UnsupportedZipFeatureException.Feature.UNKNOWN_COMPRESSED_SIZE,
|
| 454 |
+
current.entry);
|
| 455 |
+
}
|
| 456 |
+
|
| 457 |
+
int read;
|
| 458 |
+
if (current.entry.getMethod() == ZipArchiveOutputStream.STORED) {
|
| 459 |
+
read = readStored(buffer, offset, length);
|
| 460 |
+
} else if (current.entry.getMethod() == ZipArchiveOutputStream.DEFLATED) {
|
| 461 |
+
read = readDeflated(buffer, offset, length);
|
| 462 |
+
} else if (current.entry.getMethod() == ZipMethod.UNSHRINKING.getCode()
|
| 463 |
+
|| current.entry.getMethod() == ZipMethod.IMPLODING.getCode()
|
| 464 |
+
|| current.entry.getMethod() == ZipMethod.ENHANCED_DEFLATED.getCode()
|
| 465 |
+
|| current.entry.getMethod() == ZipMethod.BZIP2.getCode()) {
|
| 466 |
+
read = current.in.read(buffer, offset, length);
|
| 467 |
+
} else {
|
| 468 |
+
throw new UnsupportedZipFeatureException(ZipMethod.getMethodByCode(current.entry.getMethod()),
|
| 469 |
+
current.entry);
|
| 470 |
+
}
|
| 471 |
+
|
| 472 |
+
if (read >= 0) {
|
| 473 |
+
current.crc.update(buffer, offset, read);
|
| 474 |
+
uncompressedCount += read;
|
| 475 |
+
}
|
| 476 |
+
|
| 477 |
+
return read;
|
| 478 |
+
}
|
| 479 |
+
|
| 480 |
+
/**
|
| 481 |
+
* @since 1.17
|
| 482 |
+
*/
|
| 483 |
+
@Override
|
| 484 |
+
public long getCompressedCount() {
|
| 485 |
+
if (current.entry.getMethod() == ZipArchiveOutputStream.STORED) {
|
| 486 |
+
return current.bytesRead;
|
| 487 |
+
} else if (current.entry.getMethod() == ZipArchiveOutputStream.DEFLATED) {
|
| 488 |
+
return getBytesInflated();
|
| 489 |
+
} else if (current.entry.getMethod() == ZipMethod.UNSHRINKING.getCode()) {
|
| 490 |
+
return ((UnshrinkingInputStream) current.in).getCompressedCount();
|
| 491 |
+
} else if (current.entry.getMethod() == ZipMethod.IMPLODING.getCode()) {
|
| 492 |
+
return ((ExplodingInputStream) current.in).getCompressedCount();
|
| 493 |
+
} else if (current.entry.getMethod() == ZipMethod.ENHANCED_DEFLATED.getCode()) {
|
| 494 |
+
return ((Deflate64CompressorInputStream) current.in).getCompressedCount();
|
| 495 |
+
} else if (current.entry.getMethod() == ZipMethod.BZIP2.getCode()) {
|
| 496 |
+
return ((BZip2CompressorInputStream) current.in).getCompressedCount();
|
| 497 |
+
} else {
|
| 498 |
+
return -1;
|
| 499 |
+
}
|
| 500 |
+
}
|
| 501 |
+
|
| 502 |
+
/**
|
| 503 |
+
* @since 1.17
|
| 504 |
+
*/
|
| 505 |
+
@Override
|
| 506 |
+
public long getUncompressedCount() {
|
| 507 |
+
return uncompressedCount;
|
| 508 |
+
}
|
| 509 |
+
|
| 510 |
+
/**
|
| 511 |
+
* Implementation of read for STORED entries.
|
| 512 |
+
*/
|
| 513 |
+
private int readStored(final byte[] buffer, final int offset, final int length) throws IOException {
|
| 514 |
+
|
| 515 |
+
if (current.hasDataDescriptor) {
|
| 516 |
+
if (lastStoredEntry == null) {
|
| 517 |
+
readStoredEntry();
|
| 518 |
+
}
|
| 519 |
+
return lastStoredEntry.read(buffer, offset, length);
|
| 520 |
+
}
|
| 521 |
+
|
| 522 |
+
final long csize = current.entry.getSize();
|
| 523 |
+
if (current.bytesRead >= csize) {
|
| 524 |
+
return -1;
|
| 525 |
+
}
|
| 526 |
+
|
| 527 |
+
if (buf.position() >= buf.limit()) {
|
| 528 |
+
buf.position(0);
|
| 529 |
+
final int l = in.read(buf.array());
|
| 530 |
+
if (l == -1) {
|
| 531 |
+
buf.limit(0);
|
| 532 |
+
throw new IOException("Truncated ZIP file");
|
| 533 |
+
}
|
| 534 |
+
buf.limit(l);
|
| 535 |
+
|
| 536 |
+
count(l);
|
| 537 |
+
current.bytesReadFromStream += l;
|
| 538 |
+
}
|
| 539 |
+
|
| 540 |
+
int toRead = Math.min(buf.remaining(), length);
|
| 541 |
+
if ((csize - current.bytesRead) < toRead) {
|
| 542 |
+
// if it is smaller than toRead then it fits into an int
|
| 543 |
+
toRead = (int) (csize - current.bytesRead);
|
| 544 |
+
}
|
| 545 |
+
buf.get(buffer, offset, toRead);
|
| 546 |
+
current.bytesRead += toRead;
|
| 547 |
+
return toRead;
|
| 548 |
+
}
|
| 549 |
+
|
| 550 |
+
/**
|
| 551 |
+
* Implementation of read for DEFLATED entries.
|
| 552 |
+
*/
|
| 553 |
+
private int readDeflated(final byte[] buffer, final int offset, final int length) throws IOException {
|
| 554 |
+
final int read = readFromInflater(buffer, offset, length);
|
| 555 |
+
if (read <= 0) {
|
| 556 |
+
if (inf.finished()) {
|
| 557 |
+
return -1;
|
| 558 |
+
} else if (inf.needsDictionary()) {
|
| 559 |
+
throw new ZipException("This archive needs a preset dictionary"
|
| 560 |
+
+ " which is not supported by Commons"
|
| 561 |
+
+ " Compress.");
|
| 562 |
+
} else if (read == -1) {
|
| 563 |
+
throw new IOException("Truncated ZIP file");
|
| 564 |
+
}
|
| 565 |
+
}
|
| 566 |
+
return read;
|
| 567 |
+
}
|
| 568 |
+
|
| 569 |
+
/**
|
| 570 |
+
* Potentially reads more bytes to fill the inflater's buffer and
|
| 571 |
+
* reads from it.
|
| 572 |
+
*/
|
| 573 |
+
private int readFromInflater(final byte[] buffer, final int offset, final int length) throws IOException {
|
| 574 |
+
int read = 0;
|
| 575 |
+
do {
|
| 576 |
+
if (inf.needsInput()) {
|
| 577 |
+
final int l = fill();
|
| 578 |
+
if (l > 0) {
|
| 579 |
+
current.bytesReadFromStream += buf.limit();
|
| 580 |
+
} else if (l == -1) {
|
| 581 |
+
return -1;
|
| 582 |
+
} else {
|
| 583 |
+
break;
|
| 584 |
+
}
|
| 585 |
+
}
|
| 586 |
+
try {
|
| 587 |
+
read = inf.inflate(buffer, offset, length);
|
| 588 |
+
} catch (final DataFormatException e) {
|
| 589 |
+
throw (IOException) new ZipException(e.getMessage()).initCause(e);
|
| 590 |
+
}
|
| 591 |
+
} while (read == 0 && inf.needsInput());
|
| 592 |
+
return read;
|
| 593 |
+
}
|
| 594 |
+
|
| 595 |
+
@Override
|
| 596 |
+
public void close() throws IOException {
|
| 597 |
+
if (!closed) {
|
| 598 |
+
closed = true;
|
| 599 |
+
try {
|
| 600 |
+
in.close();
|
| 601 |
+
} finally {
|
| 602 |
+
inf.end();
|
| 603 |
+
}
|
| 604 |
+
}
|
| 605 |
+
}
|
| 606 |
+
|
| 607 |
+
/**
|
| 608 |
+
* Skips over and discards value bytes of data from this input
|
| 609 |
+
* stream.
|
| 610 |
+
*
|
| 611 |
+
* <p>This implementation may end up skipping over some smaller
|
| 612 |
+
* number of bytes, possibly 0, if and only if it reaches the end
|
| 613 |
+
* of the underlying stream.</p>
|
| 614 |
+
*
|
| 615 |
+
* <p>The actual number of bytes skipped is returned.</p>
|
| 616 |
+
*
|
| 617 |
+
* @param value the number of bytes to be skipped.
|
| 618 |
+
* @return the actual number of bytes skipped.
|
| 619 |
+
* @throws IOException - if an I/O error occurs.
|
| 620 |
+
* @throws IllegalArgumentException - if value is negative.
|
| 621 |
+
*/
|
| 622 |
+
@Override
|
| 623 |
+
public long skip(final long value) throws IOException {
|
| 624 |
+
if (value >= 0) {
|
| 625 |
+
long skipped = 0;
|
| 626 |
+
while (skipped < value) {
|
| 627 |
+
final long rem = value - skipped;
|
| 628 |
+
final int x = read(skipBuf, 0, (int) (skipBuf.length > rem ? rem : skipBuf.length));
|
| 629 |
+
if (x == -1) {
|
| 630 |
+
return skipped;
|
| 631 |
+
}
|
| 632 |
+
skipped += x;
|
| 633 |
+
}
|
| 634 |
+
return skipped;
|
| 635 |
+
}
|
| 636 |
+
throw new IllegalArgumentException();
|
| 637 |
+
}
|
| 638 |
+
|
| 639 |
+
/**
|
| 640 |
+
* Checks if the signature matches what is expected for a zip file.
|
| 641 |
+
* Does not currently handle self-extracting zips which may have arbitrary
|
| 642 |
+
* leading content.
|
| 643 |
+
*
|
| 644 |
+
* @param signature the bytes to check
|
| 645 |
+
* @param length the number of bytes to check
|
| 646 |
+
* @return true, if this stream is a zip archive stream, false otherwise
|
| 647 |
+
*/
|
| 648 |
+
public static boolean matches(final byte[] signature, final int length) {
|
| 649 |
+
if (length < ZipArchiveOutputStream.LFH_SIG.length) {
|
| 650 |
+
return false;
|
| 651 |
+
}
|
| 652 |
+
|
| 653 |
+
return checksig(signature, ZipArchiveOutputStream.LFH_SIG) // normal file
|
| 654 |
+
|| checksig(signature, ZipArchiveOutputStream.EOCD_SIG) // empty zip
|
| 655 |
+
|| checksig(signature, ZipArchiveOutputStream.DD_SIG) // split zip
|
| 656 |
+
|| checksig(signature, ZipLong.SINGLE_SEGMENT_SPLIT_MARKER.getBytes());
|
| 657 |
+
}
|
| 658 |
+
|
| 659 |
+
private static boolean checksig(final byte[] signature, final byte[] expected) {
|
| 660 |
+
for (int i = 0; i < expected.length; i++) {
|
| 661 |
+
if (signature[i] != expected[i]) {
|
| 662 |
+
return false;
|
| 663 |
+
}
|
| 664 |
+
}
|
| 665 |
+
return true;
|
| 666 |
+
}
|
| 667 |
+
|
| 668 |
+
/**
|
| 669 |
+
* Closes the current ZIP archive entry and positions the underlying
|
| 670 |
+
* stream to the beginning of the next entry. All per-entry variables
|
| 671 |
+
* and data structures are cleared.
|
| 672 |
+
* <p>
|
| 673 |
+
* If the compressed size of this entry is included in the entry header,
|
| 674 |
+
* then any outstanding bytes are simply skipped from the underlying
|
| 675 |
+
* stream without uncompressing them. This allows an entry to be safely
|
| 676 |
+
* closed even if the compression method is unsupported.
|
| 677 |
+
* <p>
|
| 678 |
+
* In case we don't know the compressed size of this entry or have
|
| 679 |
+
* already buffered too much data from the underlying stream to support
|
| 680 |
+
* uncompression, then the uncompression process is completed and the
|
| 681 |
+
* end position of the stream is adjusted based on the result of that
|
| 682 |
+
* process.
|
| 683 |
+
*
|
| 684 |
+
* @throws IOException if an error occurs
|
| 685 |
+
*/
|
| 686 |
+
private void closeEntry() throws IOException {
|
| 687 |
+
if (closed) {
|
| 688 |
+
throw new IOException("The stream is closed");
|
| 689 |
+
}
|
| 690 |
+
if (current == null) {
|
| 691 |
+
return;
|
| 692 |
+
}
|
| 693 |
+
|
| 694 |
+
// Ensure all entry bytes are read
|
| 695 |
+
if (currentEntryHasOutstandingBytes()) {
|
| 696 |
+
drainCurrentEntryData();
|
| 697 |
+
} else {
|
| 698 |
+
// this is guaranteed to exhaust the stream
|
| 699 |
+
skip(Long.MAX_VALUE); //NOSONAR
|
| 700 |
+
|
| 701 |
+
final long inB = current.entry.getMethod() == ZipArchiveOutputStream.DEFLATED
|
| 702 |
+
? getBytesInflated() : current.bytesRead;
|
| 703 |
+
|
| 704 |
+
// this is at most a single read() operation and can't
|
| 705 |
+
// exceed the range of int
|
| 706 |
+
final int diff = (int) (current.bytesReadFromStream - inB);
|
| 707 |
+
|
| 708 |
+
// Pushback any required bytes
|
| 709 |
+
if (diff > 0) {
|
| 710 |
+
pushback(buf.array(), buf.limit() - diff, diff);
|
| 711 |
+
current.bytesReadFromStream -= diff;
|
| 712 |
+
}
|
| 713 |
+
|
| 714 |
+
// Drain remainder of entry if not all data bytes were required
|
| 715 |
+
if (currentEntryHasOutstandingBytes()) {
|
| 716 |
+
drainCurrentEntryData();
|
| 717 |
+
}
|
| 718 |
+
}
|
| 719 |
+
|
| 720 |
+
if (lastStoredEntry == null && current.hasDataDescriptor) {
|
| 721 |
+
readDataDescriptor();
|
| 722 |
+
}
|
| 723 |
+
|
| 724 |
+
inf.reset();
|
| 725 |
+
buf.clear().flip();
|
| 726 |
+
current = null;
|
| 727 |
+
lastStoredEntry = null;
|
| 728 |
+
}
|
| 729 |
+
|
| 730 |
+
/**
|
| 731 |
+
* If the compressed size of the current entry is included in the entry header
|
| 732 |
+
* and there are any outstanding bytes in the underlying stream, then
|
| 733 |
+
* this returns true.
|
| 734 |
+
*
|
| 735 |
+
* @return true, if current entry is determined to have outstanding bytes, false otherwise
|
| 736 |
+
*/
|
| 737 |
+
private boolean currentEntryHasOutstandingBytes() {
|
| 738 |
+
return current.bytesReadFromStream <= current.entry.getCompressedSize()
|
| 739 |
+
&& !current.hasDataDescriptor;
|
| 740 |
+
}
|
| 741 |
+
|
| 742 |
+
/**
|
| 743 |
+
* Read all data of the current entry from the underlying stream
|
| 744 |
+
* that hasn't been read, yet.
|
| 745 |
+
*/
|
| 746 |
+
private void drainCurrentEntryData() throws IOException {
|
| 747 |
+
long remaining = current.entry.getCompressedSize() - current.bytesReadFromStream;
|
| 748 |
+
while (remaining > 0) {
|
| 749 |
+
final long n = in.read(buf.array(), 0, (int) Math.min(buf.capacity(), remaining));
|
| 750 |
+
if (n < 0) {
|
| 751 |
+
throw new EOFException("Truncated ZIP entry: "
|
| 752 |
+
+ ArchiveUtils.sanitize(current.entry.getName()));
|
| 753 |
+
}
|
| 754 |
+
count(n);
|
| 755 |
+
remaining -= n;
|
| 756 |
+
}
|
| 757 |
+
}
|
| 758 |
+
|
| 759 |
+
/**
|
| 760 |
+
* Get the number of bytes Inflater has actually processed.
|
| 761 |
+
*
|
| 762 |
+
* <p>for Java < Java7 the getBytes* methods in
|
| 763 |
+
* Inflater/Deflater seem to return unsigned ints rather than
|
| 764 |
+
* longs that start over with 0 at 2^32.</p>
|
| 765 |
+
*
|
| 766 |
+
* <p>The stream knows how many bytes it has read, but not how
|
| 767 |
+
* many the Inflater actually consumed - it should be between the
|
| 768 |
+
* total number of bytes read for the entry and the total number
|
| 769 |
+
* minus the last read operation. Here we just try to make the
|
| 770 |
+
* value close enough to the bytes we've read by assuming the
|
| 771 |
+
* number of bytes consumed must be smaller than (or equal to) the
|
| 772 |
+
* number of bytes read but not smaller by more than 2^32.</p>
|
| 773 |
+
*/
|
| 774 |
+
private long getBytesInflated() {
|
| 775 |
+
long inB = inf.getBytesRead();
|
| 776 |
+
if (current.bytesReadFromStream >= TWO_EXP_32) {
|
| 777 |
+
while (inB + TWO_EXP_32 <= current.bytesReadFromStream) {
|
| 778 |
+
inB += TWO_EXP_32;
|
| 779 |
+
}
|
| 780 |
+
}
|
| 781 |
+
return inB;
|
| 782 |
+
}
|
| 783 |
+
|
| 784 |
+
private int fill() throws IOException {
|
| 785 |
+
if (closed) {
|
| 786 |
+
throw new IOException("The stream is closed");
|
| 787 |
+
}
|
| 788 |
+
final int length = in.read(buf.array());
|
| 789 |
+
if (length > 0) {
|
| 790 |
+
buf.limit(length);
|
| 791 |
+
count(buf.limit());
|
| 792 |
+
inf.setInput(buf.array(), 0, buf.limit());
|
| 793 |
+
}
|
| 794 |
+
return length;
|
| 795 |
+
}
|
| 796 |
+
|
| 797 |
+
private void readFully(final byte[] b) throws IOException {
|
| 798 |
+
readFully(b, 0);
|
| 799 |
+
}
|
| 800 |
+
|
| 801 |
+
private void readFully(final byte[] b, final int off) throws IOException {
|
| 802 |
+
final int len = b.length - off;
|
| 803 |
+
final int count = IOUtils.readFully(in, b, off, len);
|
| 804 |
+
count(count);
|
| 805 |
+
if (count < len) {
|
| 806 |
+
throw new EOFException();
|
| 807 |
+
}
|
| 808 |
+
}
|
| 809 |
+
|
| 810 |
+
private void readDataDescriptor() throws IOException {
|
| 811 |
+
readFully(wordBuf);
|
| 812 |
+
ZipLong val = new ZipLong(wordBuf);
|
| 813 |
+
if (ZipLong.DD_SIG.equals(val)) {
|
| 814 |
+
// data descriptor with signature, skip sig
|
| 815 |
+
readFully(wordBuf);
|
| 816 |
+
val = new ZipLong(wordBuf);
|
| 817 |
+
}
|
| 818 |
+
current.entry.setCrc(val.getValue());
|
| 819 |
+
|
| 820 |
+
// if there is a ZIP64 extra field, sizes are eight bytes
|
| 821 |
+
// each, otherwise four bytes each. Unfortunately some
|
| 822 |
+
// implementations - namely Java7 - use eight bytes without
|
| 823 |
+
// using a ZIP64 extra field -
|
| 824 |
+
// https://bugs.sun.com/bugdatabase/view_bug.do?bug_id=7073588
|
| 825 |
+
|
| 826 |
+
// just read 16 bytes and check whether bytes nine to twelve
|
| 827 |
+
// look like one of the signatures of what could follow a data
|
| 828 |
+
// descriptor (ignoring archive decryption headers for now).
|
| 829 |
+
// If so, push back eight bytes and assume sizes are four
|
| 830 |
+
// bytes, otherwise sizes are eight bytes each.
|
| 831 |
+
readFully(twoDwordBuf);
|
| 832 |
+
final ZipLong potentialSig = new ZipLong(twoDwordBuf, DWORD);
|
| 833 |
+
if (potentialSig.equals(ZipLong.CFH_SIG) || potentialSig.equals(ZipLong.LFH_SIG)) {
|
| 834 |
+
pushback(twoDwordBuf, DWORD, DWORD);
|
| 835 |
+
current.entry.setCompressedSize(ZipLong.getValue(twoDwordBuf));
|
| 836 |
+
current.entry.setSize(ZipLong.getValue(twoDwordBuf, WORD));
|
| 837 |
+
} else {
|
| 838 |
+
current.entry.setCompressedSize(ZipEightByteInteger.getLongValue(twoDwordBuf));
|
| 839 |
+
current.entry.setSize(ZipEightByteInteger.getLongValue(twoDwordBuf, DWORD));
|
| 840 |
+
}
|
| 841 |
+
}
|
| 842 |
+
|
| 843 |
+
/**
|
| 844 |
+
* Whether this entry requires a data descriptor this library can work with.
|
| 845 |
+
*
|
| 846 |
+
* @return true if allowStoredEntriesWithDataDescriptor is true,
|
| 847 |
+
* the entry doesn't require any data descriptor or the method is
|
| 848 |
+
* DEFLATED or ENHANCED_DEFLATED.
|
| 849 |
+
*/
|
| 850 |
+
private boolean supportsDataDescriptorFor(final ZipArchiveEntry entry) {
|
| 851 |
+
return !entry.getGeneralPurposeBit().usesDataDescriptor()
|
| 852 |
+
|
| 853 |
+
|| (allowStoredEntriesWithDataDescriptor && entry.getMethod() == ZipEntry.STORED)
|
| 854 |
+
|| entry.getMethod() == ZipEntry.DEFLATED
|
| 855 |
+
|| entry.getMethod() == ZipMethod.ENHANCED_DEFLATED.getCode();
|
| 856 |
+
}
|
| 857 |
+
|
| 858 |
+
/**
|
| 859 |
+
* Whether the compressed size for the entry is either known or
|
| 860 |
+
* not required by the compression method being used.
|
| 861 |
+
*/
|
| 862 |
+
private boolean supportsCompressedSizeFor(final ZipArchiveEntry entry) {
|
| 863 |
+
return entry.getCompressedSize() != ArchiveEntry.SIZE_UNKNOWN
|
| 864 |
+
|| entry.getMethod() == ZipEntry.DEFLATED
|
| 865 |
+
|| entry.getMethod() == ZipMethod.ENHANCED_DEFLATED.getCode()
|
| 866 |
+
|| (entry.getGeneralPurposeBit().usesDataDescriptor()
|
| 867 |
+
&& allowStoredEntriesWithDataDescriptor
|
| 868 |
+
&& entry.getMethod() == ZipEntry.STORED);
|
| 869 |
+
}
|
| 870 |
+
|
| 871 |
+
private static final String USE_ZIPFILE_INSTEAD_OF_STREAM_DISCLAIMER =
|
| 872 |
+
" while reading a stored entry using data descriptor. Either the archive is broken"
|
| 873 |
+
+ " or it can not be read using ZipArchiveInputStream and you must use ZipFile."
|
| 874 |
+
+ " A common cause for this is a ZIP archive containing a ZIP archive."
|
| 875 |
+
+ " See http://commons.apache.org/proper/commons-compress/zip.html#ZipArchiveInputStream_vs_ZipFile";
|
| 876 |
+
|
| 877 |
+
/**
|
| 878 |
+
* Caches a stored entry that uses the data descriptor.
|
| 879 |
+
*
|
| 880 |
+
* <ul>
|
| 881 |
+
* <li>Reads a stored entry until the signature of a local file
|
| 882 |
+
* header, central directory header or data descriptor has been
|
| 883 |
+
* found.</li>
|
| 884 |
+
* <li>Stores all entry data in lastStoredEntry.</p>
|
| 885 |
+
* <li>Rewinds the stream to position at the data
|
| 886 |
+
* descriptor.</li>
|
| 887 |
+
* <li>reads the data descriptor</li>
|
| 888 |
+
* </ul>
|
| 889 |
+
*
|
| 890 |
+
* <p>After calling this method the entry should know its size,
|
| 891 |
+
* the entry's data is cached and the stream is positioned at the
|
| 892 |
+
* next local file or central directory header.</p>
|
| 893 |
+
*/
|
| 894 |
+
private void readStoredEntry() throws IOException {
|
| 895 |
+
final ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
| 896 |
+
int off = 0;
|
| 897 |
+
boolean done = false;
|
| 898 |
+
|
| 899 |
+
// length of DD without signature
|
| 900 |
+
final int ddLen = current.usesZip64 ? WORD + 2 * DWORD : 3 * WORD;
|
| 901 |
+
|
| 902 |
+
while (!done) {
|
| 903 |
+
final int r = in.read(buf.array(), off, ZipArchiveOutputStream.BUFFER_SIZE - off);
|
| 904 |
+
if (r <= 0) {
|
| 905 |
+
// read the whole archive without ever finding a
|
| 906 |
+
// central directory
|
| 907 |
+
throw new IOException("Truncated ZIP file");
|
| 908 |
+
}
|
| 909 |
+
if (r + off < 4) {
|
| 910 |
+
// buffer too small to check for a signature, loop
|
| 911 |
+
off += r;
|
| 912 |
+
continue;
|
| 913 |
+
}
|
| 914 |
+
|
| 915 |
+
done = bufferContainsSignature(bos, off, r, ddLen);
|
| 916 |
+
if (!done) {
|
| 917 |
+
off = cacheBytesRead(bos, off, r, ddLen);
|
| 918 |
+
}
|
| 919 |
+
}
|
| 920 |
+
if (current.entry.getCompressedSize() != current.entry.getSize()) {
|
| 921 |
+
throw new ZipException("compressed and uncompressed size don't match"
|
| 922 |
+
+ USE_ZIPFILE_INSTEAD_OF_STREAM_DISCLAIMER);
|
| 923 |
+
}
|
| 924 |
+
final byte[] b = bos.toByteArray();
|
| 925 |
+
if (b.length != current.entry.getSize()) {
|
| 926 |
+
throw new ZipException("actual and claimed size don't match"
|
| 927 |
+
+ USE_ZIPFILE_INSTEAD_OF_STREAM_DISCLAIMER);
|
| 928 |
+
}
|
| 929 |
+
lastStoredEntry = new ByteArrayInputStream(b);
|
| 930 |
+
}
|
| 931 |
+
|
| 932 |
+
private static final byte[] LFH = ZipLong.LFH_SIG.getBytes();
|
| 933 |
+
private static final byte[] CFH = ZipLong.CFH_SIG.getBytes();
|
| 934 |
+
private static final byte[] DD = ZipLong.DD_SIG.getBytes();
|
| 935 |
+
|
| 936 |
+
/**
|
| 937 |
+
* Checks whether the current buffer contains the signature of a
|
| 938 |
+
* "data descriptor", "local file header" or
|
| 939 |
+
* "central directory entry".
|
| 940 |
+
*
|
| 941 |
+
* <p>If it contains such a signature, reads the data descriptor
|
| 942 |
+
* and positions the stream right after the data descriptor.</p>
|
| 943 |
+
*/
|
| 944 |
+
private boolean bufferContainsSignature(final ByteArrayOutputStream bos, final int offset, final int lastRead, final int expectedDDLen)
|
| 945 |
+
throws IOException {
|
| 946 |
+
|
| 947 |
+
boolean done = false;
|
| 948 |
+
for (int i = 0; !done && i < offset + lastRead - 4; i++) {
|
| 949 |
+
if (buf.array()[i] == LFH[0] && buf.array()[i + 1] == LFH[1]) {
|
| 950 |
+
int expectDDPos = i;
|
| 951 |
+
if (i >= expectedDDLen &&
|
| 952 |
+
(buf.array()[i + 2] == LFH[2] && buf.array()[i + 3] == LFH[3])
|
| 953 |
+
|| (buf.array()[i] == CFH[2] && buf.array()[i + 3] == CFH[3])) {
|
| 954 |
+
// found a LFH or CFH:
|
| 955 |
+
expectDDPos = i - expectedDDLen;
|
| 956 |
+
done = true;
|
| 957 |
+
}
|
| 958 |
+
else if (buf.array()[i + 2] == DD[2] && buf.array()[i + 3] == DD[3]) {
|
| 959 |
+
// found DD:
|
| 960 |
+
done = true;
|
| 961 |
+
}
|
| 962 |
+
if (done) {
|
| 963 |
+
// * push back bytes read in excess as well as the data
|
| 964 |
+
// descriptor
|
| 965 |
+
// * copy the remaining bytes to cache
|
| 966 |
+
// * read data descriptor
|
| 967 |
+
pushback(buf.array(), expectDDPos, offset + lastRead - expectDDPos);
|
| 968 |
+
bos.write(buf.array(), 0, expectDDPos);
|
| 969 |
+
readDataDescriptor();
|
| 970 |
+
}
|
| 971 |
+
}
|
| 972 |
+
}
|
| 973 |
+
return done;
|
| 974 |
+
}
|
| 975 |
+
|
| 976 |
+
/**
|
| 977 |
+
* If the last read bytes could hold a data descriptor and an
|
| 978 |
+
* incomplete signature then save the last bytes to the front of
|
| 979 |
+
* the buffer and cache everything in front of the potential data
|
| 980 |
+
* descriptor into the given ByteArrayOutputStream.
|
| 981 |
+
*
|
| 982 |
+
* <p>Data descriptor plus incomplete signature (3 bytes in the
|
| 983 |
+
* worst case) can be 20 bytes max.</p>
|
| 984 |
+
*/
|
| 985 |
+
private int cacheBytesRead(final ByteArrayOutputStream bos, int offset, final int lastRead, final int expecteDDLen) {
|
| 986 |
+
final int cacheable = offset + lastRead - expecteDDLen - 3;
|
| 987 |
+
if (cacheable > 0) {
|
| 988 |
+
bos.write(buf.array(), 0, cacheable);
|
| 989 |
+
System.arraycopy(buf.array(), cacheable, buf.array(), 0, expecteDDLen + 3);
|
| 990 |
+
offset = expecteDDLen + 3;
|
| 991 |
+
} else {
|
| 992 |
+
offset += lastRead;
|
| 993 |
+
}
|
| 994 |
+
return offset;
|
| 995 |
+
}
|
| 996 |
+
|
| 997 |
+
private void pushback(final byte[] buf, final int offset, final int length) throws IOException {
|
| 998 |
+
((PushbackInputStream) in).unread(buf, offset, length);
|
| 999 |
+
pushedBackBytes(length);
|
| 1000 |
+
}
|
| 1001 |
+
|
| 1002 |
+
// End of Central Directory Record
|
| 1003 |
+
// end of central dir signature WORD
|
| 1004 |
+
// number of this disk SHORT
|
| 1005 |
+
// number of the disk with the
|
| 1006 |
+
// start of the central directory SHORT
|
| 1007 |
+
// total number of entries in the
|
| 1008 |
+
// central directory on this disk SHORT
|
| 1009 |
+
// total number of entries in
|
| 1010 |
+
// the central directory SHORT
|
| 1011 |
+
// size of the central directory WORD
|
| 1012 |
+
// offset of start of central
|
| 1013 |
+
// directory with respect to
|
| 1014 |
+
// the starting disk number WORD
|
| 1015 |
+
// .ZIP file comment length SHORT
|
| 1016 |
+
// .ZIP file comment up to 64KB
|
| 1017 |
+
//
|
| 1018 |
+
|
| 1019 |
+
/**
|
| 1020 |
+
* Reads the stream until it find the "End of central directory
|
| 1021 |
+
* record" and consumes it as well.
|
| 1022 |
+
*/
|
| 1023 |
+
private void skipRemainderOfArchive() throws IOException {
|
| 1024 |
+
// skip over central directory. One LFH has been read too much
|
| 1025 |
+
// already. The calculation discounts file names and extra
|
| 1026 |
+
// data so it will be too short.
|
| 1027 |
+
realSkip((long) entriesRead * CFH_LEN - LFH_LEN);
|
| 1028 |
+
findEocdRecord();
|
| 1029 |
+
realSkip((long) ZipFile.MIN_EOCD_SIZE - WORD /* signature */ - SHORT /* comment len */);
|
| 1030 |
+
readFully(shortBuf);
|
| 1031 |
+
// file comment
|
| 1032 |
+
realSkip(ZipShort.getValue(shortBuf));
|
| 1033 |
+
}
|
| 1034 |
+
|
| 1035 |
+
/**
|
| 1036 |
+
* Reads forward until the signature of the "End of central
|
| 1037 |
+
* directory" record is found.
|
| 1038 |
+
*/
|
| 1039 |
+
private void findEocdRecord() throws IOException {
|
| 1040 |
+
int currentByte = -1;
|
| 1041 |
+
boolean skipReadCall = false;
|
| 1042 |
+
while (skipReadCall || (currentByte = readOneByte()) > -1) {
|
| 1043 |
+
skipReadCall = false;
|
| 1044 |
+
if (!isFirstByteOfEocdSig(currentByte)) {
|
| 1045 |
+
continue;
|
| 1046 |
+
}
|
| 1047 |
+
currentByte = readOneByte();
|
| 1048 |
+
if (currentByte != ZipArchiveOutputStream.EOCD_SIG[1]) {
|
| 1049 |
+
if (currentByte == -1) {
|
| 1050 |
+
break;
|
| 1051 |
+
}
|
| 1052 |
+
skipReadCall = isFirstByteOfEocdSig(currentByte);
|
| 1053 |
+
continue;
|
| 1054 |
+
}
|
| 1055 |
+
currentByte = readOneByte();
|
| 1056 |
+
if (currentByte != ZipArchiveOutputStream.EOCD_SIG[2]) {
|
| 1057 |
+
if (currentByte == -1) {
|
| 1058 |
+
break;
|
| 1059 |
+
}
|
| 1060 |
+
skipReadCall = isFirstByteOfEocdSig(currentByte);
|
| 1061 |
+
continue;
|
| 1062 |
+
}
|
| 1063 |
+
currentByte = readOneByte();
|
| 1064 |
+
if (currentByte == -1
|
| 1065 |
+
|| currentByte == ZipArchiveOutputStream.EOCD_SIG[3]) {
|
| 1066 |
+
break;
|
| 1067 |
+
}
|
| 1068 |
+
skipReadCall = isFirstByteOfEocdSig(currentByte);
|
| 1069 |
+
}
|
| 1070 |
+
}
|
| 1071 |
+
|
| 1072 |
+
/**
|
| 1073 |
+
* Skips bytes by reading from the underlying stream rather than
|
| 1074 |
+
* the (potentially inflating) archive stream - which {@link
|
| 1075 |
+
* #skip} would do.
|
| 1076 |
+
*
|
| 1077 |
+
* Also updates bytes-read counter.
|
| 1078 |
+
*/
|
| 1079 |
+
private void realSkip(final long value) throws IOException {
|
| 1080 |
+
if (value >= 0) {
|
| 1081 |
+
long skipped = 0;
|
| 1082 |
+
while (skipped < value) {
|
| 1083 |
+
final long rem = value - skipped;
|
| 1084 |
+
final int x = in.read(skipBuf, 0, (int) (skipBuf.length > rem ? rem : skipBuf.length));
|
| 1085 |
+
if (x == -1) {
|
| 1086 |
+
return;
|
| 1087 |
+
}
|
| 1088 |
+
count(x);
|
| 1089 |
+
skipped += x;
|
| 1090 |
+
}
|
| 1091 |
+
return;
|
| 1092 |
+
}
|
| 1093 |
+
throw new IllegalArgumentException();
|
| 1094 |
+
}
|
| 1095 |
+
|
| 1096 |
+
/**
|
| 1097 |
+
* Reads bytes by reading from the underlying stream rather than
|
| 1098 |
+
* the (potentially inflating) archive stream - which {@link #read} would do.
|
| 1099 |
+
*
|
| 1100 |
+
* Also updates bytes-read counter.
|
| 1101 |
+
*/
|
| 1102 |
+
private int readOneByte() throws IOException {
|
| 1103 |
+
final int b = in.read();
|
| 1104 |
+
if (b != -1) {
|
| 1105 |
+
count(1);
|
| 1106 |
+
}
|
| 1107 |
+
return b;
|
| 1108 |
+
}
|
| 1109 |
+
|
| 1110 |
+
private boolean isFirstByteOfEocdSig(final int b) {
|
| 1111 |
+
return b == ZipArchiveOutputStream.EOCD_SIG[0];
|
| 1112 |
+
}
|
| 1113 |
+
|
| 1114 |
+
private static final byte[] APK_SIGNING_BLOCK_MAGIC = new byte[] {
|
| 1115 |
+
'A', 'P', 'K', ' ', 'S', 'i', 'g', ' ', 'B', 'l', 'o', 'c', 'k', ' ', '4', '2',
|
| 1116 |
+
};
|
| 1117 |
+
private static final BigInteger LONG_MAX = BigInteger.valueOf(Long.MAX_VALUE);
|
| 1118 |
+
|
| 1119 |
+
/**
|
| 1120 |
+
* Checks whether this might be an APK Signing Block.
|
| 1121 |
+
*
|
| 1122 |
+
* <p>Unfortunately the APK signing block does not start with some kind of signature, it rather ends with one. It
|
| 1123 |
+
* starts with a length, so what we do is parse the suspect length, skip ahead far enough, look for the signature
|
| 1124 |
+
* and if we've found it, return true.</p>
|
| 1125 |
+
*
|
| 1126 |
+
* @param suspectLocalFileHeader the bytes read from the underlying stream in the expectation that they would hold
|
| 1127 |
+
* the local file header of the next entry.
|
| 1128 |
+
*
|
| 1129 |
+
* @return true if this looks like a APK signing block
|
| 1130 |
+
*
|
| 1131 |
+
* @see <a href="https://source.android.com/security/apksigning/v2">https://source.android.com/security/apksigning/v2</a>
|
| 1132 |
+
*/
|
| 1133 |
+
private boolean isApkSigningBlock(byte[] suspectLocalFileHeader) throws IOException {
|
| 1134 |
+
// length of block excluding the size field itself
|
| 1135 |
+
BigInteger len = ZipEightByteInteger.getValue(suspectLocalFileHeader);
|
| 1136 |
+
// LFH has already been read and all but the first eight bytes contain (part of) the APK signing block,
|
| 1137 |
+
// also subtract 16 bytes in order to position us at the magic string
|
| 1138 |
+
BigInteger toSkip = len.add(BigInteger.valueOf(DWORD - suspectLocalFileHeader.length
|
| 1139 |
+
- (long) APK_SIGNING_BLOCK_MAGIC.length));
|
| 1140 |
+
byte[] magic = new byte[APK_SIGNING_BLOCK_MAGIC.length];
|
| 1141 |
+
|
| 1142 |
+
try {
|
| 1143 |
+
if (toSkip.signum() < 0) {
|
| 1144 |
+
// suspectLocalFileHeader contains the start of suspect magic string
|
| 1145 |
+
int off = suspectLocalFileHeader.length + toSkip.intValue();
|
| 1146 |
+
// length was shorter than magic length
|
| 1147 |
+
if (off < DWORD) {
|
| 1148 |
+
return false;
|
| 1149 |
+
}
|
| 1150 |
+
int bytesInBuffer = Math.abs(toSkip.intValue());
|
| 1151 |
+
System.arraycopy(suspectLocalFileHeader, off, magic, 0, Math.min(bytesInBuffer, magic.length));
|
| 1152 |
+
if (bytesInBuffer < magic.length) {
|
| 1153 |
+
readFully(magic, bytesInBuffer);
|
| 1154 |
+
}
|
| 1155 |
+
} else {
|
| 1156 |
+
while (toSkip.compareTo(LONG_MAX) > 0) {
|
| 1157 |
+
realSkip(Long.MAX_VALUE);
|
| 1158 |
+
toSkip = toSkip.add(LONG_MAX.negate());
|
| 1159 |
+
}
|
| 1160 |
+
realSkip(toSkip.longValue());
|
| 1161 |
+
readFully(magic);
|
| 1162 |
+
}
|
| 1163 |
+
} catch (EOFException ex) { //NOSONAR
|
| 1164 |
+
// length was invalid
|
| 1165 |
+
return false;
|
| 1166 |
+
}
|
| 1167 |
+
return Arrays.equals(magic, APK_SIGNING_BLOCK_MAGIC);
|
| 1168 |
+
}
|
| 1169 |
+
|
| 1170 |
+
/**
|
| 1171 |
+
* Structure collecting information for the entry that is
|
| 1172 |
+
* currently being read.
|
| 1173 |
+
*/
|
| 1174 |
+
private static final class CurrentEntry {
|
| 1175 |
+
|
| 1176 |
+
/**
|
| 1177 |
+
* Current ZIP entry.
|
| 1178 |
+
*/
|
| 1179 |
+
private final ZipArchiveEntry entry = new ZipArchiveEntry();
|
| 1180 |
+
|
| 1181 |
+
/**
|
| 1182 |
+
* Does the entry use a data descriptor?
|
| 1183 |
+
*/
|
| 1184 |
+
private boolean hasDataDescriptor;
|
| 1185 |
+
|
| 1186 |
+
/**
|
| 1187 |
+
* Does the entry have a ZIP64 extended information extra field.
|
| 1188 |
+
*/
|
| 1189 |
+
private boolean usesZip64;
|
| 1190 |
+
|
| 1191 |
+
/**
|
| 1192 |
+
* Number of bytes of entry content read by the client if the
|
| 1193 |
+
* entry is STORED.
|
| 1194 |
+
*/
|
| 1195 |
+
private long bytesRead;
|
| 1196 |
+
|
| 1197 |
+
/**
|
| 1198 |
+
* Number of bytes of entry content read from the stream.
|
| 1199 |
+
*
|
| 1200 |
+
* <p>This may be more than the actual entry's length as some
|
| 1201 |
+
* stuff gets buffered up and needs to be pushed back when the
|
| 1202 |
+
* end of the entry has been reached.</p>
|
| 1203 |
+
*/
|
| 1204 |
+
private long bytesReadFromStream;
|
| 1205 |
+
|
| 1206 |
+
/**
|
| 1207 |
+
* The checksum calculated as the current entry is read.
|
| 1208 |
+
*/
|
| 1209 |
+
private final CRC32 crc = new CRC32();
|
| 1210 |
+
|
| 1211 |
+
/**
|
| 1212 |
+
* The input stream decompressing the data for shrunk and imploded entries.
|
| 1213 |
+
*/
|
| 1214 |
+
private InputStream in;
|
| 1215 |
+
}
|
| 1216 |
+
|
| 1217 |
+
/**
|
| 1218 |
+
* Bounded input stream adapted from commons-io
|
| 1219 |
+
*/
|
| 1220 |
+
private class BoundedInputStream extends InputStream {
|
| 1221 |
+
|
| 1222 |
+
/** the wrapped input stream */
|
| 1223 |
+
private final InputStream in;
|
| 1224 |
+
|
| 1225 |
+
/** the max length to provide */
|
| 1226 |
+
private final long max;
|
| 1227 |
+
|
| 1228 |
+
/** the number of bytes already returned */
|
| 1229 |
+
private long pos = 0;
|
| 1230 |
+
|
| 1231 |
+
/**
|
| 1232 |
+
* Creates a new <code>BoundedInputStream</code> that wraps the given input
|
| 1233 |
+
* stream and limits it to a certain size.
|
| 1234 |
+
*
|
| 1235 |
+
* @param in The wrapped input stream
|
| 1236 |
+
* @param size The maximum number of bytes to return
|
| 1237 |
+
*/
|
| 1238 |
+
public BoundedInputStream(final InputStream in, final long size) {
|
| 1239 |
+
this.max = size;
|
| 1240 |
+
this.in = in;
|
| 1241 |
+
}
|
| 1242 |
+
|
| 1243 |
+
@Override
|
| 1244 |
+
public int read() throws IOException {
|
| 1245 |
+
if (max >= 0 && pos >= max) {
|
| 1246 |
+
return -1;
|
| 1247 |
+
}
|
| 1248 |
+
final int result = in.read();
|
| 1249 |
+
pos++;
|
| 1250 |
+
count(1);
|
| 1251 |
+
current.bytesReadFromStream++;
|
| 1252 |
+
return result;
|
| 1253 |
+
}
|
| 1254 |
+
|
| 1255 |
+
@Override
|
| 1256 |
+
public int read(final byte[] b) throws IOException {
|
| 1257 |
+
return this.read(b, 0, b.length);
|
| 1258 |
+
}
|
| 1259 |
+
|
| 1260 |
+
@Override
|
| 1261 |
+
public int read(final byte[] b, final int off, final int len) throws IOException {
|
| 1262 |
+
if (len == 0) {
|
| 1263 |
+
return 0;
|
| 1264 |
+
}
|
| 1265 |
+
if (max >= 0 && pos >= max) {
|
| 1266 |
+
return -1;
|
| 1267 |
+
}
|
| 1268 |
+
final long maxRead = max >= 0 ? Math.min(len, max - pos) : len;
|
| 1269 |
+
final int bytesRead = in.read(b, off, (int) maxRead);
|
| 1270 |
+
|
| 1271 |
+
if (bytesRead == -1) {
|
| 1272 |
+
return -1;
|
| 1273 |
+
}
|
| 1274 |
+
|
| 1275 |
+
pos += bytesRead;
|
| 1276 |
+
count(bytesRead);
|
| 1277 |
+
current.bytesReadFromStream += bytesRead;
|
| 1278 |
+
return bytesRead;
|
| 1279 |
+
}
|
| 1280 |
+
|
| 1281 |
+
@Override
|
| 1282 |
+
public long skip(final long n) throws IOException {
|
| 1283 |
+
final long toSkip = max >= 0 ? Math.min(n, max - pos) : n;
|
| 1284 |
+
final long skippedBytes = IOUtils.skip(in, toSkip);
|
| 1285 |
+
pos += skippedBytes;
|
| 1286 |
+
return skippedBytes;
|
| 1287 |
+
}
|
| 1288 |
+
|
| 1289 |
+
@Override
|
| 1290 |
+
public int available() throws IOException {
|
| 1291 |
+
if (max >= 0 && pos >= max) {
|
| 1292 |
+
return 0;
|
| 1293 |
+
}
|
| 1294 |
+
return in.available();
|
| 1295 |
+
}
|
| 1296 |
+
}
|
| 1297 |
+
}
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/file_to_compare_1
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/*
|
| 2 |
+
* Licensed to the Apache Software Foundation (ASF) under one
|
| 3 |
+
* or more contributor license agreements. See the NOTICE file
|
| 4 |
+
* distributed with this work for additional information
|
| 5 |
+
* regarding copyright ownership. The ASF licenses this file
|
| 6 |
+
* to you under the Apache License, Version 2.0 (the
|
| 7 |
+
* "License"); you may not use this file except in compliance
|
| 8 |
+
* with the License. You may obtain a copy of the License at
|
| 9 |
+
*
|
| 10 |
+
* http://www.apache.org/licenses/LICENSE-2.0
|
| 11 |
+
*
|
| 12 |
+
* Unless required by applicable law or agreed to in writing,
|
| 13 |
+
* software distributed under the License is distributed on an
|
| 14 |
+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
| 15 |
+
* KIND, either express or implied. See the License for the
|
| 16 |
+
* specific language governing permissions and limitations
|
| 17 |
+
* under the License.
|
| 18 |
+
*/
|
| 19 |
+
package org.apache.commons.compress.archivers.dump;
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
/**
|
| 23 |
+
* Unsupported compression algorithm. The dump archive uses an unsupported
|
| 24 |
+
* compression algorithm (BZLIB2 or LZO).
|
| 25 |
+
*/
|
| 26 |
+
public class UnsupportedCompressionAlgorithmException
|
| 27 |
+
extends DumpArchiveException {
|
| 28 |
+
private static final long serialVersionUID = 1L;
|
| 29 |
+
|
| 30 |
+
public UnsupportedCompressionAlgorithmException() {
|
| 31 |
+
super("this file uses an unsupported compression algorithm.");
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
public UnsupportedCompressionAlgorithmException(final String alg) {
|
| 35 |
+
super("this file uses an unsupported compression algorithm: " + alg +
|
| 36 |
+
".");
|
| 37 |
+
}
|
| 38 |
+
}
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/file_to_compare_2
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/*
|
| 2 |
+
* Licensed to the Apache Software Foundation (ASF) under one
|
| 3 |
+
* or more contributor license agreements. See the NOTICE file
|
| 4 |
+
* distributed with this work for additional information
|
| 5 |
+
* regarding copyright ownership. The ASF licenses this file
|
| 6 |
+
* to you under the Apache License, Version 2.0 (the
|
| 7 |
+
* "License"); you may not use this file except in compliance
|
| 8 |
+
* with the License. You may obtain a copy of the License at
|
| 9 |
+
*
|
| 10 |
+
* http://www.apache.org/licenses/LICENSE-2.0
|
| 11 |
+
*
|
| 12 |
+
* Unless required by applicable law or agreed to in writing,
|
| 13 |
+
* software distributed under the License is distributed on an
|
| 14 |
+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
| 15 |
+
* KIND, either express or implied. See the License for the
|
| 16 |
+
* specific language governing permissions and limitations
|
| 17 |
+
* under the License.
|
| 18 |
+
*/
|
| 19 |
+
|
| 20 |
+
package org.apache.commons.compress.compressors.deflate;
|
| 21 |
+
|
| 22 |
+
import java.util.zip.Deflater;
|
| 23 |
+
|
| 24 |
+
/**
|
| 25 |
+
* Parameters for the Deflate compressor.
|
| 26 |
+
* @since 1.9
|
| 27 |
+
*/
|
| 28 |
+
public class DeflateParameters {
|
| 29 |
+
|
| 30 |
+
private boolean zlibHeader = true;
|
| 31 |
+
private int compressionLevel = Deflater.DEFAULT_COMPRESSION;
|
| 32 |
+
|
| 33 |
+
/**
|
| 34 |
+
* Whether or not the zlib header shall be written (when
|
| 35 |
+
* compressing) or expected (when decompressing).
|
| 36 |
+
* @return true if zlib header shall be written
|
| 37 |
+
*/
|
| 38 |
+
public boolean withZlibHeader() {
|
| 39 |
+
return zlibHeader;
|
| 40 |
+
}
|
| 41 |
+
|
| 42 |
+
/**
|
| 43 |
+
* Sets the zlib header presence parameter.
|
| 44 |
+
*
|
| 45 |
+
* <p>This affects whether or not the zlib header will be written
|
| 46 |
+
* (when compressing) or expected (when decompressing).</p>
|
| 47 |
+
*
|
| 48 |
+
* @param zlibHeader true if zlib header shall be written
|
| 49 |
+
*/
|
| 50 |
+
public void setWithZlibHeader(final boolean zlibHeader) {
|
| 51 |
+
this.zlibHeader = zlibHeader;
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
/**
|
| 55 |
+
* The compression level.
|
| 56 |
+
* @see #setCompressionLevel
|
| 57 |
+
* @return the compression level
|
| 58 |
+
*/
|
| 59 |
+
public int getCompressionLevel() {
|
| 60 |
+
return compressionLevel;
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
/**
|
| 64 |
+
* Sets the compression level.
|
| 65 |
+
*
|
| 66 |
+
* @param compressionLevel the compression level (between 0 and 9)
|
| 67 |
+
* @see Deflater#NO_COMPRESSION
|
| 68 |
+
* @see Deflater#BEST_SPEED
|
| 69 |
+
* @see Deflater#DEFAULT_COMPRESSION
|
| 70 |
+
* @see Deflater#BEST_COMPRESSION
|
| 71 |
+
*/
|
| 72 |
+
public void setCompressionLevel(final int compressionLevel) {
|
| 73 |
+
if (compressionLevel < -1 || compressionLevel > 9) {
|
| 74 |
+
throw new IllegalArgumentException("Invalid Deflate compression level: " + compressionLevel);
|
| 75 |
+
}
|
| 76 |
+
this.compressionLevel = compressionLevel;
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
}
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/COMPRESS-626/compress-626-pack200.jar
ADDED
|
Binary file (781 Bytes). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/COMPRESS-644/ARW05UP.ICO
ADDED
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/COMPRESS-661/testARofText.ar
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
testTXT.txt/ 1262968202 500 500 100644 47 `
|
| 3 |
+
Test d'indexation de Txt
|
| 4 |
+
http://www.apache.org
|
| 5 |
+
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_group-fail.ar
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
// 68 `
|
| 3 |
+
this_is_a_long_file_name.txt/
|
| 4 |
+
this_is_a_long_file_name_as_well.txt/
|
| 5 |
+
/0 1454693980 1000 1.23 100664 14 `
|
| 6 |
+
Hello, world!
|
| 7 |
+
/30 1454694016 1000 1000 100664 4 `
|
| 8 |
+
Bye
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_length-fail.ar
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
// 68 `
|
| 3 |
+
this_is_a_long_file_name.txt/
|
| 4 |
+
this_is_a_long_file_name_as_well.txt/
|
| 5 |
+
/0 1454693980 1000 1000 100664 1.23 `
|
| 6 |
+
Hello, world!
|
| 7 |
+
/30 1454694016 1000 1000 100664 4 `
|
| 8 |
+
Bye
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_bsd-fail.ar
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
#1/123456789012 1311256511 1000 1000 100644 42 `
|
| 3 |
+
this_is_a_long_file_name.txtHello, world!
|
| 4 |
+
#1/36 1454694016 1000 1000 100664 40 `
|
| 5 |
+
this_is_a_long_file_name_as_well.txtBye
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_gnu1-fail.ar
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
// 68 `
|
| 3 |
+
this_is_a_long_file_name.txt/
|
| 4 |
+
this_is_a_long_file_name_as_well.txt/
|
| 5 |
+
/9999999999 1454693980 1000 1000 100664 14 `
|
| 6 |
+
Hello, world!
|
| 7 |
+
/30 1454694016 1000 1000 100664 4 `
|
| 8 |
+
Bye
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_gnu2-fail.ar
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
// 68 `
|
| 3 |
+
this_is_a_long_file_name.txt/
|
| 4 |
+
this_is_a_long_file_name_as_well.txt/
|
| 5 |
+
/29 1454694016 1000 1000 100664 4 `
|
| 6 |
+
Bye
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_long_namelen_gnu3-fail.ar
ADDED
|
Binary file (274 Bytes). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_modified-fail.ar
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
// 68 `
|
| 3 |
+
this_is_a_long_file_name.txt/
|
| 4 |
+
this_is_a_long_file_name_as_well.txt/
|
| 5 |
+
/0 9e99999999 1000 1000 100664 14 `
|
| 6 |
+
Hello, world!
|
| 7 |
+
/30 1454694016 1000 1000 100664 4 `
|
| 8 |
+
Bye
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_table_length_gnu-fail.ar
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
// 1.23 `
|
| 3 |
+
this_is_a_long_file_name.txt/
|
| 4 |
+
this_is_a_long_file_name_as_well.txt/
|
| 5 |
+
/0 1454693980 1000 1000 100664 14 `
|
| 6 |
+
Hello, world!
|
| 7 |
+
/30 1454694016 1000 1000 100664 4 `
|
| 8 |
+
Bye
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ar/number_parsing/bad_user-fail.ar
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
!<arch>
|
| 2 |
+
// 68 `
|
| 3 |
+
this_is_a_long_file_name.txt/
|
| 4 |
+
this_is_a_long_file_name_as_well.txt/
|
| 5 |
+
/0 1454693980 9e99 1000 100664 14 `
|
| 6 |
+
Hello, world!
|
| 7 |
+
/30 1454694016 1000 1000 100664 4 `
|
| 8 |
+
Bye
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/arj/zero_sized_headers-fail.arj
ADDED
|
Binary file (3.14 kB). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/cpio/bad_long_value.cpio
ADDED
|
Binary file (1.54 kB). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/dump/directory_null_bytes-fail.dump
ADDED
|
Binary file (78.9 kB). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/dump/invalid_compression_type-fail.dump
ADDED
|
Binary file (11.9 kB). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/fuzz/crash-f2efd9eaeb86cda597d07b5e3c3d81363633c2da
ADDED
|
Binary file (8.94 kB). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-daemon/procrunr.ico
ADDED
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-daemon/procruns.ico
ADDED
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-daemon/procrunw.ico
ADDED
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/ico/commons-imaging/OutOfMemory_epine.ico
ADDED
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/cpfloat_oom.pack
ADDED
|
Binary file (178 Bytes). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/cputf8_oom.pack
ADDED
|
Binary file (427 Bytes). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/favoured_oom.pack
ADDED
|
Binary file (84 Bytes). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/filebits_oom.pack
ADDED
|
Binary file (309 Bytes). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/flags_oom.pack
ADDED
|
Binary file (1.62 kB). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/segment_header_oom.pack
ADDED
|
Binary file (41 Bytes). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/afc-commons-compress/src/test/resources/org/apache/commons/compress/pack/signatures_oom.pack
ADDED
|
Binary file (121 Bytes). View file
|
|
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/docs/.gitignore
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
_site
|
| 2 |
+
.bundle
|
| 3 |
+
.sass-cache
|
| 4 |
+
.jekyll-metadata
|
| 5 |
+
vendor
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/docs/_config.yml
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Welcome to Jekyll!
|
| 2 |
+
#
|
| 3 |
+
# This config file is meant for settings that affect your whole blog, values
|
| 4 |
+
# which you are expected to set up once and rarely edit after that. If you find
|
| 5 |
+
# yourself editing this file very often, consider using Jekyll's data files
|
| 6 |
+
# feature for the data you need to update frequently.
|
| 7 |
+
#
|
| 8 |
+
# For technical reasons, this file is *NOT* reloaded automatically when you use
|
| 9 |
+
# 'bundle exec jekyll serve'. If you change this file, please restart the server process.
|
| 10 |
+
|
| 11 |
+
# Site settings
|
| 12 |
+
# These are used to personalize your new site. If you look in the HTML files,
|
| 13 |
+
# you will see them accessed via {{ site.title }}, {{ site.email }}, and so on.
|
| 14 |
+
# You can create any custom variable you would like, and they will be accessible
|
| 15 |
+
# in the templates via {{ site.myvariable }}.
|
| 16 |
+
title: OSS-Fuzz
|
| 17 |
+
description: Documentation for OSS-Fuzz
|
| 18 |
+
baseurl: "/oss-fuzz" # the subpath of your site, e.g. /blog
|
| 19 |
+
url: "" # the base hostname & protocol for your site, e.g. http://example.com
|
| 20 |
+
|
| 21 |
+
# Build settings
|
| 22 |
+
markdown: kramdown
|
| 23 |
+
remote_theme: pmarsceill/just-the-docs
|
| 24 |
+
search_enabled: true
|
| 25 |
+
|
| 26 |
+
ga_tracking: G-LRX1V3S5P
|
| 27 |
+
|
| 28 |
+
aux_links:
|
| 29 |
+
"OSS-Fuzz on GitHub":
|
| 30 |
+
- https://github.com/google/oss-fuzz
|
| 31 |
+
|
| 32 |
+
# Exclude from processing.
|
| 33 |
+
exclude:
|
| 34 |
+
- Gemfile
|
| 35 |
+
- Gemfile.lock
|
| 36 |
+
- node_modules
|
| 37 |
+
- vendor/bundle/
|
| 38 |
+
- vendor/cache/
|
| 39 |
+
- vendor/gems/
|
| 40 |
+
- vendor/ruby/
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/docs/glossary.md
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
This page has moved [here](https://google.github.io/oss-fuzz/reference/glossary/)
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/docs/index.md
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
layout: default
|
| 3 |
+
title: OSS-Fuzz
|
| 4 |
+
permalink: /
|
| 5 |
+
nav_order: 1
|
| 6 |
+
has_children: true
|
| 7 |
+
has_toc: false
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
# OSS-Fuzz
|
| 11 |
+
|
| 12 |
+
[Fuzz testing] is a well-known technique for uncovering programming errors in
|
| 13 |
+
software. Many of these detectable errors, like [buffer overflow], can have
|
| 14 |
+
serious security implications. Google has found [thousands] of security
|
| 15 |
+
vulnerabilities and stability bugs by deploying [guided in-process fuzzing of
|
| 16 |
+
Chrome components], and we now want to share that service with the open source
|
| 17 |
+
community.
|
| 18 |
+
|
| 19 |
+
[Fuzz testing]: https://en.wikipedia.org/wiki/Fuzz_testing
|
| 20 |
+
[buffer overflow]: https://en.wikipedia.org/wiki/Buffer_overflow
|
| 21 |
+
[thousands]: https://bugs.chromium.org/p/chromium/issues/list?q=label%3AStability-LibFuzzer%2CStability-AFL%20-status%3ADuplicate%2CWontFix&can=1
|
| 22 |
+
[guided in-process fuzzing of Chrome components]: https://security.googleblog.com/2016/08/guided-in-process-fuzzing-of-chrome.html
|
| 23 |
+
|
| 24 |
+
In cooperation with the [Core Infrastructure Initiative] and the [OpenSSF],
|
| 25 |
+
OSS-Fuzz aims to make common open source software more secure and stable by
|
| 26 |
+
combining modern fuzzing techniques with scalable, distributed execution.
|
| 27 |
+
Projects that do not qualify for OSS-Fuzz (e.g. closed source) can run their own
|
| 28 |
+
instances of [ClusterFuzz] or [ClusterFuzzLite].
|
| 29 |
+
|
| 30 |
+
[Core Infrastructure Initiative]: https://www.coreinfrastructure.org/
|
| 31 |
+
[OpenSSF]: https://www.openssf.org/
|
| 32 |
+
|
| 33 |
+
We support the [libFuzzer], [AFL++], [Honggfuzz], and [Centipede] fuzzing engines in
|
| 34 |
+
combination with [Sanitizers], as well as [ClusterFuzz], a distributed fuzzer
|
| 35 |
+
execution environment and reporting tool.
|
| 36 |
+
|
| 37 |
+
[libFuzzer]: https://llvm.org/docs/LibFuzzer.html
|
| 38 |
+
[AFL++]: https://github.com/AFLplusplus/AFLplusplus
|
| 39 |
+
[Honggfuzz]: https://github.com/google/honggfuzz
|
| 40 |
+
[Centipede]: https://github.com/google/centipede
|
| 41 |
+
[Sanitizers]: https://github.com/google/sanitizers
|
| 42 |
+
[ClusterFuzz]: https://github.com/google/clusterfuzz
|
| 43 |
+
[ClusterFuzzLite]: https://google.github.io/clusterfuzzlite/
|
| 44 |
+
|
| 45 |
+
Currently, OSS-Fuzz supports C/C++, Rust, Go, Python and Java/JVM code. Other
|
| 46 |
+
languages supported by [LLVM] may work too. OSS-Fuzz supports fuzzing x86_64
|
| 47 |
+
and i386 builds.
|
| 48 |
+
|
| 49 |
+
[LLVM]: https://llvm.org
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
## Project history
|
| 53 |
+
OSS-Fuzz was launched in 2016 in response to the
|
| 54 |
+
[Heartbleed] vulnerability, discovered in [OpenSSL], one of the
|
| 55 |
+
most popular open source projects for encrypting web traffic. The vulnerability
|
| 56 |
+
had the potential to affect almost every internet user, yet was caused by a
|
| 57 |
+
relatively simple memory buffer overflow bug that could have been detected by
|
| 58 |
+
fuzzing—that is, by running the code on randomized inputs to intentionally cause
|
| 59 |
+
unexpected behaviors or crashes. At the time, though, fuzzing
|
| 60 |
+
was not widely used and was cumbersome for developers, requiring extensive
|
| 61 |
+
manual effort.
|
| 62 |
+
|
| 63 |
+
Google created OSS-Fuzz to fill this gap: it's a free service that runs fuzzers
|
| 64 |
+
for open source projects and privately alerts developers to the bugs detected.
|
| 65 |
+
Since its launch, OSS-Fuzz has become a critical service for the open source
|
| 66 |
+
community, growing beyond C/C++ to
|
| 67 |
+
detect problems in memory-safe languages such as Go, Rust, and Python.
|
| 68 |
+
|
| 69 |
+
[Heartbleed]: https://heartbleed.com/
|
| 70 |
+
[OpenSSL]: https://www.openssl.org/
|
| 71 |
+
|
| 72 |
+
## Learn more about fuzzing
|
| 73 |
+
|
| 74 |
+
This documentation describes how to use OSS-Fuzz service for your open source
|
| 75 |
+
project. To learn more about fuzzing in general, we recommend reading [libFuzzer
|
| 76 |
+
tutorial] and the other docs in [google/fuzzing] repository. These and some
|
| 77 |
+
other resources are listed on the [useful links] page.
|
| 78 |
+
|
| 79 |
+
[google/fuzzing]: https://github.com/google/fuzzing/tree/master/docs
|
| 80 |
+
[libFuzzer tutorial]: https://github.com/google/fuzzing/blob/master/tutorial/libFuzzerTutorial.md
|
| 81 |
+
[useful links]: {{ site.baseurl }}/reference/useful-links/#tutorials
|
| 82 |
+
|
| 83 |
+
## Trophies
|
| 84 |
+
As of August 2023, OSS-Fuzz has helped identify and fix over [10,000] vulnerabilities and [36,000] bugs across [1,000] projects.
|
| 85 |
+
|
| 86 |
+
[10,000]: https://bugs.chromium.org/p/oss-fuzz/issues/list?q=Type%3DBug-Security%20label%3Aclusterfuzz%20-status%3ADuplicate%2CWontFix&can=1
|
| 87 |
+
[36,000]: https://bugs.chromium.org/p/oss-fuzz/issues/list?q=Type%3DBug%20label%3Aclusterfuzz%20-status%3ADuplicate%2CWontFix&can=1
|
| 88 |
+
[1,000]: https://github.com/google/oss-fuzz/tree/master/projects
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/MAINTAINERS.csv
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Name,Email,Github Username
|
| 2 |
+
Adam Korcz,adam@adalogics.com,AdamKorcz
|
| 3 |
+
David Korczynski,david@adalogics.com,DavidKorczynski
|
| 4 |
+
Dongge Liu,donggeliu@google.com,Alan32Liu
|
| 5 |
+
Holly Gong,gongh@google.com,hogo6002
|
| 6 |
+
Jonathan Metzman,metzman@google.com,jonathanmetzman
|
| 7 |
+
Oliver Chang,ochang@google.com,oliverchang
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/README.md
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# infra
|
| 2 |
+
> OSS-Fuzz project infrastructure
|
| 3 |
+
|
| 4 |
+
Core infrastructure:
|
| 5 |
+
* [`base-images`](base-images/) - docker images for building fuzz targets & corresponding jenkins
|
| 6 |
+
pipeline.
|
| 7 |
+
|
| 8 |
+
Continuous Integration infrastructure:
|
| 9 |
+
|
| 10 |
+
* [`ci`](ci/) - script to build projects in CI.
|
| 11 |
+
|
| 12 |
+
## helper.py
|
| 13 |
+
> script to automate common docker operations
|
| 14 |
+
|
| 15 |
+
| Command | Description |
|
| 16 |
+
|---------|-------------
|
| 17 |
+
| `generate` | Generates skeleton files for a new project |
|
| 18 |
+
| `build_image` | Builds a docker image for a given project |
|
| 19 |
+
| `build_fuzzers` | Builds fuzz targets for a given project |
|
| 20 |
+
| `run_fuzzer` | Runs a fuzz target in a docker container |
|
| 21 |
+
| `coverage` | Runs fuzz target(s) in a docker container and generates a code coverage report. See [Code Coverage doc](https://google.github.io/oss-fuzz/advanced-topics/code-coverage/) |
|
| 22 |
+
| `reproduce` | Runs a testcase to reproduce a crash |
|
| 23 |
+
| `shell` | Starts a shell inside the docker image for a project |
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/build_specified_commit.py
ADDED
|
@@ -0,0 +1,410 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Module to build a image from a specific commit, branch or pull request.
|
| 15 |
+
|
| 16 |
+
This module is allows each of the OSS Fuzz projects fuzzers to be built
|
| 17 |
+
from a specific point in time. This feature can be used for implementations
|
| 18 |
+
like continuious integration fuzzing and bisection to find errors
|
| 19 |
+
"""
|
| 20 |
+
import argparse
|
| 21 |
+
import bisect
|
| 22 |
+
import datetime
|
| 23 |
+
import os
|
| 24 |
+
import collections
|
| 25 |
+
import json
|
| 26 |
+
import logging
|
| 27 |
+
import re
|
| 28 |
+
import shutil
|
| 29 |
+
import tempfile
|
| 30 |
+
|
| 31 |
+
import helper
|
| 32 |
+
import repo_manager
|
| 33 |
+
import retry
|
| 34 |
+
import utils
|
| 35 |
+
|
| 36 |
+
BuildData = collections.namedtuple(
|
| 37 |
+
'BuildData', ['project_name', 'engine', 'sanitizer', 'architecture'])
|
| 38 |
+
|
| 39 |
+
_GIT_DIR_MARKER = 'gitdir: '
|
| 40 |
+
_IMAGE_BUILD_TRIES = 3
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class BaseBuilderRepo:
|
| 44 |
+
"""Repo of base-builder images."""
|
| 45 |
+
|
| 46 |
+
def __init__(self):
|
| 47 |
+
self.timestamps = []
|
| 48 |
+
self.digests = []
|
| 49 |
+
|
| 50 |
+
def add_digest(self, timestamp, digest):
|
| 51 |
+
"""Add a digest."""
|
| 52 |
+
self.timestamps.append(timestamp)
|
| 53 |
+
self.digests.append(digest)
|
| 54 |
+
|
| 55 |
+
def find_digest(self, timestamp):
|
| 56 |
+
"""Find the latest image before the given timestamp."""
|
| 57 |
+
index = bisect.bisect_right(self.timestamps, timestamp)
|
| 58 |
+
if index > 0:
|
| 59 |
+
return self.digests[index - 1]
|
| 60 |
+
|
| 61 |
+
logging.error('Failed to find suitable base-builder.')
|
| 62 |
+
return None
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def _replace_gitdir(src_dir, file_path):
|
| 66 |
+
"""Replace gitdir with a relative path."""
|
| 67 |
+
with open(file_path) as handle:
|
| 68 |
+
lines = handle.readlines()
|
| 69 |
+
|
| 70 |
+
new_lines = []
|
| 71 |
+
for line in lines:
|
| 72 |
+
if line.startswith(_GIT_DIR_MARKER):
|
| 73 |
+
absolute_path = line[len(_GIT_DIR_MARKER):].strip()
|
| 74 |
+
if not os.path.isabs(absolute_path):
|
| 75 |
+
# Already relative.
|
| 76 |
+
return
|
| 77 |
+
|
| 78 |
+
current_dir = os.path.dirname(file_path)
|
| 79 |
+
# Rebase to /src rather than the host src dir.
|
| 80 |
+
base_dir = current_dir.replace(src_dir, '/src')
|
| 81 |
+
relative_path = os.path.relpath(absolute_path, base_dir)
|
| 82 |
+
logging.info('Replacing absolute submodule gitdir from %s to %s',
|
| 83 |
+
absolute_path, relative_path)
|
| 84 |
+
|
| 85 |
+
line = _GIT_DIR_MARKER + relative_path
|
| 86 |
+
|
| 87 |
+
new_lines.append(line)
|
| 88 |
+
|
| 89 |
+
with open(file_path, 'w') as handle:
|
| 90 |
+
handle.write(''.join(new_lines))
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def _make_gitdirs_relative(src_dir):
|
| 94 |
+
"""Make gitdirs relative."""
|
| 95 |
+
for root_dir, _, files in os.walk(src_dir):
|
| 96 |
+
for filename in files:
|
| 97 |
+
if filename != '.git':
|
| 98 |
+
continue
|
| 99 |
+
|
| 100 |
+
file_path = os.path.join(root_dir, filename)
|
| 101 |
+
_replace_gitdir(src_dir, file_path)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def _replace_base_builder_digest(dockerfile_path, digest):
|
| 105 |
+
"""Replace the base-builder digest in a Dockerfile."""
|
| 106 |
+
with open(dockerfile_path) as handle:
|
| 107 |
+
lines = handle.readlines()
|
| 108 |
+
|
| 109 |
+
new_lines = []
|
| 110 |
+
for line in lines:
|
| 111 |
+
if line.strip().startswith('FROM'):
|
| 112 |
+
line = 'FROM ghcr.io/aixcc-finals/base-builder@' + digest + '\n'
|
| 113 |
+
|
| 114 |
+
new_lines.append(line)
|
| 115 |
+
|
| 116 |
+
with open(dockerfile_path, 'w') as handle:
|
| 117 |
+
handle.write(''.join(new_lines))
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def copy_src_from_docker(project_name, host_dir):
|
| 121 |
+
"""Copy /src from docker to the host."""
|
| 122 |
+
# Copy /src to host.
|
| 123 |
+
image_name = 'gcr.io/oss-fuzz/' + project_name
|
| 124 |
+
src_dir = os.path.join(host_dir, 'src')
|
| 125 |
+
if os.path.exists(src_dir):
|
| 126 |
+
shutil.rmtree(src_dir, ignore_errors=True)
|
| 127 |
+
|
| 128 |
+
docker_args = [
|
| 129 |
+
'-v',
|
| 130 |
+
host_dir + ':/out',
|
| 131 |
+
image_name,
|
| 132 |
+
'cp',
|
| 133 |
+
'-r',
|
| 134 |
+
'-p',
|
| 135 |
+
'/src',
|
| 136 |
+
'/out',
|
| 137 |
+
]
|
| 138 |
+
helper.docker_run(docker_args)
|
| 139 |
+
|
| 140 |
+
# Submodules can have gitdir entries which point to absolute paths. Make them
|
| 141 |
+
# relative, as otherwise we can't do operations on the checkout on the host.
|
| 142 |
+
_make_gitdirs_relative(src_dir)
|
| 143 |
+
return src_dir
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
@retry.wrap(_IMAGE_BUILD_TRIES, 2)
|
| 147 |
+
def _build_image_with_retries(project_name):
|
| 148 |
+
"""Build image with retries."""
|
| 149 |
+
return helper.build_image_impl(helper.Project(project_name))
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def get_required_post_checkout_steps(dockerfile_path):
|
| 153 |
+
"""Get required post checkout steps (best effort)."""
|
| 154 |
+
|
| 155 |
+
checkout_pattern = re.compile(r'\s*RUN\s*(git|svn|hg)')
|
| 156 |
+
|
| 157 |
+
# If the build.sh is copied from upstream, we need to copy it again after
|
| 158 |
+
# changing the revision to ensure correct building.
|
| 159 |
+
post_run_pattern = re.compile(r'\s*RUN\s*(.*build\.sh.*(\$SRC|/src).*)')
|
| 160 |
+
|
| 161 |
+
with open(dockerfile_path) as handle:
|
| 162 |
+
lines = handle.readlines()
|
| 163 |
+
|
| 164 |
+
subsequent_run_cmds = []
|
| 165 |
+
for i, line in enumerate(lines):
|
| 166 |
+
if checkout_pattern.match(line):
|
| 167 |
+
subsequent_run_cmds = []
|
| 168 |
+
continue
|
| 169 |
+
|
| 170 |
+
match = post_run_pattern.match(line)
|
| 171 |
+
if match:
|
| 172 |
+
workdir = helper.workdir_from_lines(lines[:i])
|
| 173 |
+
command = match.group(1)
|
| 174 |
+
subsequent_run_cmds.append((workdir, command))
|
| 175 |
+
|
| 176 |
+
return subsequent_run_cmds
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
# pylint: disable=too-many-locals
|
| 180 |
+
def build_fuzzers_from_commit(commit,
|
| 181 |
+
build_repo_manager,
|
| 182 |
+
host_src_path,
|
| 183 |
+
build_data,
|
| 184 |
+
base_builder_repo=None):
|
| 185 |
+
"""Builds a OSS-Fuzz fuzzer at a specific commit SHA.
|
| 186 |
+
|
| 187 |
+
Args:
|
| 188 |
+
commit: The commit SHA to build the fuzzers at.
|
| 189 |
+
build_repo_manager: The OSS-Fuzz project's repo manager to be built at.
|
| 190 |
+
build_data: A struct containing project build information.
|
| 191 |
+
base_builder_repo: A BaseBuilderRepo.
|
| 192 |
+
Returns:
|
| 193 |
+
0 on successful build or error code on failure.
|
| 194 |
+
"""
|
| 195 |
+
oss_fuzz_repo_manager = repo_manager.RepoManager(helper.OSS_FUZZ_DIR)
|
| 196 |
+
num_retry = 1
|
| 197 |
+
|
| 198 |
+
def cleanup():
|
| 199 |
+
# Re-copy /src for a clean checkout every time.
|
| 200 |
+
copy_src_from_docker(build_data.project_name,
|
| 201 |
+
os.path.dirname(host_src_path))
|
| 202 |
+
build_repo_manager.fetch_all_remotes()
|
| 203 |
+
|
| 204 |
+
projects_dir = os.path.join('projects', build_data.project_name)
|
| 205 |
+
dockerfile_path = os.path.join(projects_dir, 'Dockerfile')
|
| 206 |
+
|
| 207 |
+
for i in range(num_retry + 1):
|
| 208 |
+
build_repo_manager.checkout_commit(commit, clean=False)
|
| 209 |
+
|
| 210 |
+
post_checkout_steps = get_required_post_checkout_steps(dockerfile_path)
|
| 211 |
+
for workdir, post_checkout_step in post_checkout_steps:
|
| 212 |
+
logging.info('Running post-checkout step `%s` in %s.', post_checkout_step,
|
| 213 |
+
workdir)
|
| 214 |
+
helper.docker_run([
|
| 215 |
+
'-w',
|
| 216 |
+
workdir,
|
| 217 |
+
'-v',
|
| 218 |
+
host_src_path + ':' + '/src',
|
| 219 |
+
'gcr.io/oss-fuzz/' + build_data.project_name,
|
| 220 |
+
'/bin/bash',
|
| 221 |
+
'-c',
|
| 222 |
+
post_checkout_step,
|
| 223 |
+
])
|
| 224 |
+
|
| 225 |
+
project = helper.Project(build_data.project_name)
|
| 226 |
+
result = helper.build_fuzzers_impl(project=project,
|
| 227 |
+
clean=True,
|
| 228 |
+
engine=build_data.engine,
|
| 229 |
+
sanitizer=build_data.sanitizer,
|
| 230 |
+
architecture=build_data.architecture,
|
| 231 |
+
env_to_add=None,
|
| 232 |
+
source_path=host_src_path,
|
| 233 |
+
mount_path='/src')
|
| 234 |
+
if result or i == num_retry:
|
| 235 |
+
break
|
| 236 |
+
|
| 237 |
+
# Retry with an OSS-Fuzz builder container that's closer to the project
|
| 238 |
+
# commit date.
|
| 239 |
+
commit_date = build_repo_manager.commit_date(commit)
|
| 240 |
+
|
| 241 |
+
# Find first change in the projects/<PROJECT> directory before the project
|
| 242 |
+
# commit date.
|
| 243 |
+
oss_fuzz_commit, _, _ = oss_fuzz_repo_manager.git([
|
| 244 |
+
'log', '--before=' + commit_date.isoformat(), '-n1', '--format=%H',
|
| 245 |
+
projects_dir
|
| 246 |
+
],
|
| 247 |
+
check_result=True)
|
| 248 |
+
oss_fuzz_commit = oss_fuzz_commit.strip()
|
| 249 |
+
if not oss_fuzz_commit:
|
| 250 |
+
logging.info(
|
| 251 |
+
'Could not find first OSS-Fuzz commit prior to upstream commit. '
|
| 252 |
+
'Falling back to oldest integration commit.')
|
| 253 |
+
|
| 254 |
+
# Find the oldest commit.
|
| 255 |
+
oss_fuzz_commit, _, _ = oss_fuzz_repo_manager.git(
|
| 256 |
+
['log', '--reverse', '--format=%H', projects_dir], check_result=True)
|
| 257 |
+
|
| 258 |
+
oss_fuzz_commit = oss_fuzz_commit.splitlines()[0].strip()
|
| 259 |
+
|
| 260 |
+
if not oss_fuzz_commit:
|
| 261 |
+
logging.error('Failed to get oldest integration commit.')
|
| 262 |
+
break
|
| 263 |
+
|
| 264 |
+
logging.info('Build failed. Retrying on earlier OSS-Fuzz commit %s.',
|
| 265 |
+
oss_fuzz_commit)
|
| 266 |
+
|
| 267 |
+
# Check out projects/<PROJECT> dir to the commit that was found.
|
| 268 |
+
oss_fuzz_repo_manager.git(['checkout', oss_fuzz_commit, projects_dir],
|
| 269 |
+
check_result=True)
|
| 270 |
+
|
| 271 |
+
# Also use the closest base-builder we can find.
|
| 272 |
+
if base_builder_repo:
|
| 273 |
+
base_builder_digest = base_builder_repo.find_digest(commit_date)
|
| 274 |
+
if not base_builder_digest:
|
| 275 |
+
return False
|
| 276 |
+
|
| 277 |
+
logging.info('Using base-builder with digest %s.', base_builder_digest)
|
| 278 |
+
_replace_base_builder_digest(dockerfile_path, base_builder_digest)
|
| 279 |
+
|
| 280 |
+
# Rebuild image and re-copy src dir since things in /src could have changed.
|
| 281 |
+
if not _build_image_with_retries(build_data.project_name):
|
| 282 |
+
logging.error('Failed to rebuild image.')
|
| 283 |
+
return False
|
| 284 |
+
|
| 285 |
+
cleanup()
|
| 286 |
+
|
| 287 |
+
cleanup()
|
| 288 |
+
return result
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
def detect_main_repo(project_name, repo_name=None, commit=None):
|
| 292 |
+
"""Checks a docker image for the main repo of an OSS-Fuzz project.
|
| 293 |
+
|
| 294 |
+
Note: The default is to use the repo name to detect the main repo.
|
| 295 |
+
|
| 296 |
+
Args:
|
| 297 |
+
project_name: The name of the oss-fuzz project.
|
| 298 |
+
repo_name: The name of the main repo in an OSS-Fuzz project.
|
| 299 |
+
commit: A commit SHA that is associated with the main repo.
|
| 300 |
+
|
| 301 |
+
Returns:
|
| 302 |
+
A tuple containing (the repo's origin, the repo's path).
|
| 303 |
+
"""
|
| 304 |
+
|
| 305 |
+
if not repo_name and not commit:
|
| 306 |
+
logging.error(
|
| 307 |
+
'Error: can not detect main repo without a repo_name or a commit.')
|
| 308 |
+
return None, None
|
| 309 |
+
if repo_name and commit:
|
| 310 |
+
logging.info(
|
| 311 |
+
'Both repo name and commit specific. Using repo name for detection.')
|
| 312 |
+
|
| 313 |
+
# Change to oss-fuzz main directory so helper.py runs correctly.
|
| 314 |
+
utils.chdir_to_root()
|
| 315 |
+
if not _build_image_with_retries(project_name):
|
| 316 |
+
logging.error('Error: building %s image failed.', project_name)
|
| 317 |
+
return None, None
|
| 318 |
+
docker_image_name = 'gcr.io/oss-fuzz/' + project_name
|
| 319 |
+
command_to_run = [
|
| 320 |
+
'docker', 'run', '--rm', '-t', docker_image_name, 'python3',
|
| 321 |
+
os.path.join('/opt', 'cifuzz', 'detect_repo.py')
|
| 322 |
+
]
|
| 323 |
+
if repo_name:
|
| 324 |
+
command_to_run.extend(['--repo_name', repo_name])
|
| 325 |
+
else:
|
| 326 |
+
command_to_run.extend(['--example_commit', commit])
|
| 327 |
+
out, _, _ = utils.execute(command_to_run)
|
| 328 |
+
match = re.search(r'\bDetected repo: ([^ ]+) ([^ ]+)', out.rstrip())
|
| 329 |
+
if match and match.group(1) and match.group(2):
|
| 330 |
+
return match.group(1), match.group(2)
|
| 331 |
+
|
| 332 |
+
logging.error('Failed to detect repo:\n%s', out)
|
| 333 |
+
return None, None
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
def load_base_builder_repo():
|
| 337 |
+
"""Get base-image digests."""
|
| 338 |
+
gcloud_path = shutil.which('gcloud')
|
| 339 |
+
if not gcloud_path:
|
| 340 |
+
logging.warning('gcloud not found in PATH.')
|
| 341 |
+
return None
|
| 342 |
+
|
| 343 |
+
result, _, _ = utils.execute([
|
| 344 |
+
gcloud_path,
|
| 345 |
+
'container',
|
| 346 |
+
'images',
|
| 347 |
+
'list-tags',
|
| 348 |
+
'ghcr.io/aixcc-finals/base-builder',
|
| 349 |
+
'--format=json',
|
| 350 |
+
'--sort-by=timestamp',
|
| 351 |
+
],
|
| 352 |
+
check_result=True)
|
| 353 |
+
result = json.loads(result)
|
| 354 |
+
|
| 355 |
+
repo = BaseBuilderRepo()
|
| 356 |
+
for image in result:
|
| 357 |
+
timestamp = datetime.datetime.fromisoformat(
|
| 358 |
+
image['timestamp']['datetime']).astimezone(datetime.timezone.utc)
|
| 359 |
+
repo.add_digest(timestamp, image['digest'])
|
| 360 |
+
|
| 361 |
+
return repo
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
def main():
|
| 365 |
+
"""Main function."""
|
| 366 |
+
logging.getLogger().setLevel(logging.INFO)
|
| 367 |
+
|
| 368 |
+
parser = argparse.ArgumentParser(
|
| 369 |
+
description='Build fuzzers at a specific commit')
|
| 370 |
+
parser.add_argument('--project_name',
|
| 371 |
+
help='The name of the project where the bug occurred.',
|
| 372 |
+
required=True)
|
| 373 |
+
parser.add_argument('--commit',
|
| 374 |
+
help='The newest commit SHA to be bisected.',
|
| 375 |
+
required=True)
|
| 376 |
+
parser.add_argument('--engine',
|
| 377 |
+
help='The default is "libfuzzer".',
|
| 378 |
+
default='libfuzzer')
|
| 379 |
+
parser.add_argument('--sanitizer',
|
| 380 |
+
default='address',
|
| 381 |
+
help='The default is "address".')
|
| 382 |
+
parser.add_argument('--architecture', default='x86_64')
|
| 383 |
+
|
| 384 |
+
args = parser.parse_args()
|
| 385 |
+
|
| 386 |
+
repo_url, repo_path = detect_main_repo(args.project_name, commit=args.commit)
|
| 387 |
+
|
| 388 |
+
if not repo_url or not repo_path:
|
| 389 |
+
raise ValueError('Main git repo can not be determined.')
|
| 390 |
+
|
| 391 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 392 |
+
host_src_dir = copy_src_from_docker(args.project_name, tmp_dir)
|
| 393 |
+
build_repo_manager = repo_manager.RepoManager(
|
| 394 |
+
os.path.join(host_src_dir, os.path.basename(repo_path)))
|
| 395 |
+
base_builder_repo = load_base_builder_repo()
|
| 396 |
+
|
| 397 |
+
build_data = BuildData(project_name=args.project_name,
|
| 398 |
+
engine=args.engine,
|
| 399 |
+
sanitizer=args.sanitizer,
|
| 400 |
+
architecture=args.architecture)
|
| 401 |
+
if not build_fuzzers_from_commit(args.commit,
|
| 402 |
+
build_repo_manager,
|
| 403 |
+
host_src_dir,
|
| 404 |
+
build_data,
|
| 405 |
+
base_builder_repo=base_builder_repo):
|
| 406 |
+
raise RuntimeError('Failed to build.')
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
if __name__ == '__main__':
|
| 410 |
+
main()
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/build_specified_commit_test.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Test the functionality of the build image from commit module.
|
| 15 |
+
The will consist of the following functional tests:
|
| 16 |
+
1. The inference of the main repo for a specific project.
|
| 17 |
+
2. The building of a projects fuzzers from a specific commit.
|
| 18 |
+
|
| 19 |
+
"""
|
| 20 |
+
import os
|
| 21 |
+
import tempfile
|
| 22 |
+
import unittest
|
| 23 |
+
|
| 24 |
+
import build_specified_commit
|
| 25 |
+
import helper
|
| 26 |
+
import repo_manager
|
| 27 |
+
import test_repos
|
| 28 |
+
|
| 29 |
+
# necessary because __file__ changes with os.chdir
|
| 30 |
+
TEST_DIR_PATH = os.path.dirname(os.path.realpath(__file__))
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
|
| 34 |
+
'INTEGRATION_TESTS=1 not set')
|
| 35 |
+
class BuildImageIntegrationTest(unittest.TestCase):
|
| 36 |
+
"""Tests if an image can be built from different states e.g. a commit."""
|
| 37 |
+
|
| 38 |
+
@unittest.skip('Test is failing (spuriously?).')
|
| 39 |
+
def test_build_fuzzers_from_commit(self):
|
| 40 |
+
"""Tests if the fuzzers can build at a specified commit.
|
| 41 |
+
|
| 42 |
+
This is done by using a known regression range for a specific test case.
|
| 43 |
+
The old commit should show the error when its fuzzers run and the new one
|
| 44 |
+
should not.
|
| 45 |
+
"""
|
| 46 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 47 |
+
test_repo = test_repos.TEST_REPOS[1]
|
| 48 |
+
self.assertTrue(helper.build_image_impl(test_repo.project_name))
|
| 49 |
+
host_src_dir = build_specified_commit.copy_src_from_docker(
|
| 50 |
+
test_repo.project_name, tmp_dir)
|
| 51 |
+
|
| 52 |
+
test_repo_manager = repo_manager.clone_repo_and_get_manager(
|
| 53 |
+
test_repo.git_url, host_src_dir, test_repo.oss_repo_name)
|
| 54 |
+
build_data = build_specified_commit.BuildData(
|
| 55 |
+
sanitizer='address',
|
| 56 |
+
architecture='x86_64',
|
| 57 |
+
engine='libfuzzer',
|
| 58 |
+
project_name=test_repo.project_name)
|
| 59 |
+
|
| 60 |
+
build_specified_commit.build_fuzzers_from_commit(test_repo.old_commit,
|
| 61 |
+
test_repo_manager,
|
| 62 |
+
host_src_dir, build_data)
|
| 63 |
+
project = helper.Project(test_repo.project_name)
|
| 64 |
+
old_result = helper.reproduce_impl(project=project,
|
| 65 |
+
fuzzer_name=test_repo.fuzz_target,
|
| 66 |
+
valgrind=False,
|
| 67 |
+
env_to_add=[],
|
| 68 |
+
fuzzer_args=[],
|
| 69 |
+
testcase_path=test_repo.testcase_path)
|
| 70 |
+
build_specified_commit.build_fuzzers_from_commit(test_repo.project_name,
|
| 71 |
+
test_repo_manager,
|
| 72 |
+
host_src_dir, build_data)
|
| 73 |
+
new_result = helper.reproduce_impl(project=project,
|
| 74 |
+
fuzzer_name=test_repo.fuzz_target,
|
| 75 |
+
valgrind=False,
|
| 76 |
+
env_to_add=[],
|
| 77 |
+
fuzzer_args=[],
|
| 78 |
+
testcase_path=test_repo.testcase_path)
|
| 79 |
+
self.assertNotEqual(new_result, old_result)
|
| 80 |
+
|
| 81 |
+
def test_detect_main_repo_from_commit(self):
|
| 82 |
+
"""Test the detect main repo function from build specific commit module."""
|
| 83 |
+
# TODO(metzman): Fix these tests so they don't randomly break because of
|
| 84 |
+
# changes in the outside world.
|
| 85 |
+
for example_repo in test_repos.TEST_REPOS:
|
| 86 |
+
if example_repo.new_commit:
|
| 87 |
+
# TODO(metzman): This function calls _build_image_with_retries which
|
| 88 |
+
# has a long delay (30 seconds). Figure out how to make this quicker.
|
| 89 |
+
repo_origin, repo_name = build_specified_commit.detect_main_repo(
|
| 90 |
+
example_repo.project_name, commit=example_repo.new_commit)
|
| 91 |
+
self.assertEqual(repo_origin, example_repo.git_url)
|
| 92 |
+
self.assertEqual(repo_name,
|
| 93 |
+
os.path.join('/src', example_repo.oss_repo_name))
|
| 94 |
+
|
| 95 |
+
repo_origin, repo_name = build_specified_commit.detect_main_repo(
|
| 96 |
+
test_repos.INVALID_REPO.project_name,
|
| 97 |
+
test_repos.INVALID_REPO.new_commit)
|
| 98 |
+
self.assertIsNone(repo_origin)
|
| 99 |
+
self.assertIsNone(repo_name)
|
| 100 |
+
|
| 101 |
+
def test_detect_main_repo_from_name(self):
|
| 102 |
+
"""Test the detect main repo function from build specific commit module."""
|
| 103 |
+
for example_repo in test_repos.TEST_REPOS:
|
| 104 |
+
if example_repo.project_name == 'gonids':
|
| 105 |
+
# It's unclear how this test ever passed, but we can't infer the repo
|
| 106 |
+
# because gonids doesn't really check it out, it uses "go get".
|
| 107 |
+
continue
|
| 108 |
+
repo_origin, repo_name = build_specified_commit.detect_main_repo(
|
| 109 |
+
example_repo.project_name, repo_name=example_repo.git_repo_name)
|
| 110 |
+
self.assertEqual(repo_origin, example_repo.git_url)
|
| 111 |
+
self.assertEqual(
|
| 112 |
+
repo_name,
|
| 113 |
+
os.path.join(example_repo.image_location, example_repo.oss_repo_name))
|
| 114 |
+
|
| 115 |
+
repo_origin, repo_name = build_specified_commit.detect_main_repo(
|
| 116 |
+
test_repos.INVALID_REPO.project_name,
|
| 117 |
+
test_repos.INVALID_REPO.oss_repo_name)
|
| 118 |
+
self.assertIsNone(repo_origin)
|
| 119 |
+
self.assertIsNone(repo_name)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
if __name__ == '__main__':
|
| 123 |
+
# Change to oss-fuzz main directory so helper.py runs correctly.
|
| 124 |
+
if os.getcwd() != os.path.dirname(TEST_DIR_PATH):
|
| 125 |
+
os.chdir(os.path.dirname(TEST_DIR_PATH))
|
| 126 |
+
unittest.main()
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/constants.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
"""Constants for OSS-Fuzz."""
|
| 17 |
+
|
| 18 |
+
DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH = '.clusterfuzzlite'
|
| 19 |
+
|
| 20 |
+
DEFAULT_LANGUAGE = 'c++'
|
| 21 |
+
DEFAULT_SANITIZER = 'address'
|
| 22 |
+
DEFAULT_ARCHITECTURE = 'x86_64'
|
| 23 |
+
DEFAULT_ENGINE = 'libfuzzer'
|
| 24 |
+
LANGUAGES = [
|
| 25 |
+
'c',
|
| 26 |
+
'c++',
|
| 27 |
+
'go',
|
| 28 |
+
'javascript',
|
| 29 |
+
'jvm',
|
| 30 |
+
'python',
|
| 31 |
+
'rust',
|
| 32 |
+
'swift',
|
| 33 |
+
'ruby',
|
| 34 |
+
]
|
| 35 |
+
LANGUAGES_WITH_COVERAGE_SUPPORT = [
|
| 36 |
+
'c', 'c++', 'go', 'jvm', 'python', 'rust', 'swift', 'javascript', 'ruby'
|
| 37 |
+
]
|
| 38 |
+
SANITIZERS = [
|
| 39 |
+
'address',
|
| 40 |
+
'none',
|
| 41 |
+
'memory',
|
| 42 |
+
'undefined',
|
| 43 |
+
'thread',
|
| 44 |
+
'coverage',
|
| 45 |
+
'introspector',
|
| 46 |
+
'hwaddress',
|
| 47 |
+
]
|
| 48 |
+
ARCHITECTURES = ['i386', 'x86_64', 'aarch64']
|
| 49 |
+
ENGINES = ['libfuzzer', 'afl', 'honggfuzz', 'centipede', 'none', 'wycheproof']
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/helper.py
ADDED
|
@@ -0,0 +1,1810 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python
|
| 2 |
+
# Copyright 2016 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Helper script for OSS-Fuzz users. Can do common tasks like building
|
| 18 |
+
projects/fuzzers, running them etc."""
|
| 19 |
+
|
| 20 |
+
from __future__ import print_function
|
| 21 |
+
from multiprocessing.dummy import Pool as ThreadPool
|
| 22 |
+
import argparse
|
| 23 |
+
import datetime
|
| 24 |
+
import errno
|
| 25 |
+
import logging
|
| 26 |
+
import os
|
| 27 |
+
import re
|
| 28 |
+
import shlex
|
| 29 |
+
import shutil
|
| 30 |
+
import subprocess
|
| 31 |
+
import sys
|
| 32 |
+
import tempfile
|
| 33 |
+
|
| 34 |
+
import constants
|
| 35 |
+
import templates
|
| 36 |
+
|
| 37 |
+
OSS_FUZZ_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
| 38 |
+
BUILD_DIR = os.path.join(OSS_FUZZ_DIR, 'build')
|
| 39 |
+
|
| 40 |
+
BASE_IMAGE_TAG = ':v1.2.1' # no tag for latest
|
| 41 |
+
|
| 42 |
+
BASE_RUNNER_IMAGE = f'ghcr.io/aixcc-finals/base-runner{BASE_IMAGE_TAG}'
|
| 43 |
+
|
| 44 |
+
BASE_IMAGES = {
|
| 45 |
+
'generic': [
|
| 46 |
+
f'ghcr.io/aixcc-finals/base-image{BASE_IMAGE_TAG}',
|
| 47 |
+
f'ghcr.io/aixcc-finals/base-clang{BASE_IMAGE_TAG}',
|
| 48 |
+
f'ghcr.io/aixcc-finals/base-builder{BASE_IMAGE_TAG}',
|
| 49 |
+
BASE_RUNNER_IMAGE,
|
| 50 |
+
f'ghcr.io/aixcc-finals/base-runner-debug{BASE_IMAGE_TAG}',
|
| 51 |
+
],
|
| 52 |
+
'go': [f'ghcr.io/aixcc-finals/base-builder-go{BASE_IMAGE_TAG}'],
|
| 53 |
+
'javascript': [f'ghcr.io/aixcc-finals/base-builder-javascript{BASE_IMAGE_TAG}'],
|
| 54 |
+
'jvm': [f'ghcr.io/aixcc-finals/base-builder-jvm{BASE_IMAGE_TAG}'],
|
| 55 |
+
'python': [f'ghcr.io/aixcc-finals/base-builder-python{BASE_IMAGE_TAG}'],
|
| 56 |
+
'rust': [f'ghcr.io/aixcc-finals/base-builder-rust{BASE_IMAGE_TAG}'],
|
| 57 |
+
'ruby': [f'ghcr.io/aixcc-finals/base-builder-ruby{BASE_IMAGE_TAG}'],
|
| 58 |
+
'swift': [f'ghcr.io/aixcc-finals/base-builder-swift{BASE_IMAGE_TAG}'],
|
| 59 |
+
}
|
| 60 |
+
|
| 61 |
+
VALID_PROJECT_NAME_REGEX = re.compile(r'^[a-zA-Z0-9_-]+$')
|
| 62 |
+
MAX_PROJECT_NAME_LENGTH = 26
|
| 63 |
+
|
| 64 |
+
CORPUS_URL_FORMAT = (
|
| 65 |
+
'gs://{project_name}-corpus.clusterfuzz-external.appspot.com/libFuzzer/'
|
| 66 |
+
'{fuzz_target}/')
|
| 67 |
+
CORPUS_BACKUP_URL_FORMAT = (
|
| 68 |
+
'gs://{project_name}-backup.clusterfuzz-external.appspot.com/corpus/'
|
| 69 |
+
'libFuzzer/{fuzz_target}/')
|
| 70 |
+
|
| 71 |
+
HTTPS_CORPUS_BACKUP_URL_FORMAT = (
|
| 72 |
+
'https://storage.googleapis.com/{project_name}-backup.clusterfuzz-external'
|
| 73 |
+
'.appspot.com/corpus/libFuzzer/{fuzz_target}/public.zip')
|
| 74 |
+
|
| 75 |
+
LANGUAGE_REGEX = re.compile(r'[^\s]+')
|
| 76 |
+
PROJECT_LANGUAGE_REGEX = re.compile(r'\s*language\s*:\s*([^\s]+)')
|
| 77 |
+
|
| 78 |
+
WORKDIR_REGEX = re.compile(r'\s*WORKDIR\s*([^\s]+)')
|
| 79 |
+
|
| 80 |
+
# Regex to match special chars in project name.
|
| 81 |
+
SPECIAL_CHARS_REGEX = re.compile('[^a-zA-Z0-9_-]')
|
| 82 |
+
|
| 83 |
+
LANGUAGE_TO_BASE_BUILDER_IMAGE = {
|
| 84 |
+
'c': 'base-builder',
|
| 85 |
+
'c++': 'base-builder',
|
| 86 |
+
'go': 'base-builder-go',
|
| 87 |
+
'javascript': 'base-builder-javascript',
|
| 88 |
+
'jvm': 'base-builder-jvm',
|
| 89 |
+
'python': 'base-builder-python',
|
| 90 |
+
'ruby': 'base-builder-ruby',
|
| 91 |
+
'rust': 'base-builder-rust',
|
| 92 |
+
'swift': 'base-builder-swift'
|
| 93 |
+
}
|
| 94 |
+
ARM_BUILDER_NAME = 'oss-fuzz-buildx-builder'
|
| 95 |
+
|
| 96 |
+
CLUSTERFUZZLITE_ENGINE = 'libfuzzer'
|
| 97 |
+
CLUSTERFUZZLITE_ARCHITECTURE = 'x86_64'
|
| 98 |
+
CLUSTERFUZZLITE_FILESTORE_DIR = 'filestore'
|
| 99 |
+
CLUSTERFUZZLITE_DOCKER_IMAGE = 'ghcr.io/aixcc-finals/cifuzz-run-fuzzers'
|
| 100 |
+
|
| 101 |
+
logger = logging.getLogger(__name__)
|
| 102 |
+
|
| 103 |
+
if sys.version_info[0] >= 3:
|
| 104 |
+
raw_input = input # pylint: disable=invalid-name
|
| 105 |
+
|
| 106 |
+
# pylint: disable=too-many-lines
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
class Project:
|
| 110 |
+
"""Class representing a project that is in OSS-Fuzz or an external project
|
| 111 |
+
(ClusterFuzzLite user)."""
|
| 112 |
+
|
| 113 |
+
def __init__(
|
| 114 |
+
self,
|
| 115 |
+
project_name_or_path,
|
| 116 |
+
is_external=False,
|
| 117 |
+
build_integration_path=constants.DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH):
|
| 118 |
+
self.is_external = is_external
|
| 119 |
+
if self.is_external:
|
| 120 |
+
self.path = os.path.abspath(project_name_or_path)
|
| 121 |
+
self.name = os.path.basename(self.path)
|
| 122 |
+
self.build_integration_path = os.path.join(self.path,
|
| 123 |
+
build_integration_path)
|
| 124 |
+
else:
|
| 125 |
+
self.name = project_name_or_path
|
| 126 |
+
self.path = os.path.join(OSS_FUZZ_DIR, 'projects', self.name)
|
| 127 |
+
self.build_integration_path = self.path
|
| 128 |
+
|
| 129 |
+
@property
|
| 130 |
+
def dockerfile_path(self):
|
| 131 |
+
"""Returns path to the project Dockerfile."""
|
| 132 |
+
return os.path.join(self.build_integration_path, 'Dockerfile')
|
| 133 |
+
|
| 134 |
+
@property
|
| 135 |
+
def language(self):
|
| 136 |
+
"""Returns project language."""
|
| 137 |
+
project_yaml_path = os.path.join(self.build_integration_path,
|
| 138 |
+
'project.yaml')
|
| 139 |
+
if not os.path.exists(project_yaml_path):
|
| 140 |
+
logger.warning('No project.yaml. Assuming c++.')
|
| 141 |
+
return constants.DEFAULT_LANGUAGE
|
| 142 |
+
|
| 143 |
+
with open(project_yaml_path) as file_handle:
|
| 144 |
+
content = file_handle.read()
|
| 145 |
+
for line in content.splitlines():
|
| 146 |
+
match = PROJECT_LANGUAGE_REGEX.match(line)
|
| 147 |
+
if match:
|
| 148 |
+
return match.group(1)
|
| 149 |
+
|
| 150 |
+
logger.warning('Language not specified in project.yaml. Assuming c++.')
|
| 151 |
+
return constants.DEFAULT_LANGUAGE
|
| 152 |
+
|
| 153 |
+
@property
|
| 154 |
+
def coverage_extra_args(self):
|
| 155 |
+
"""Returns project coverage extra args."""
|
| 156 |
+
project_yaml_path = os.path.join(self.build_integration_path,
|
| 157 |
+
'project.yaml')
|
| 158 |
+
if not os.path.exists(project_yaml_path):
|
| 159 |
+
logger.warning('project.yaml not found: %s.', project_yaml_path)
|
| 160 |
+
return ''
|
| 161 |
+
|
| 162 |
+
with open(project_yaml_path) as file_handle:
|
| 163 |
+
content = file_handle.read()
|
| 164 |
+
|
| 165 |
+
coverage_flags = ''
|
| 166 |
+
read_coverage_extra_args = False
|
| 167 |
+
# Pass the yaml file and extract the value of the coverage_extra_args key.
|
| 168 |
+
# This is naive yaml parsing and we do not handle comments at this point.
|
| 169 |
+
for line in content.splitlines():
|
| 170 |
+
if read_coverage_extra_args:
|
| 171 |
+
# Break reading coverage args if a new yaml key is defined.
|
| 172 |
+
if len(line) > 0 and line[0] != ' ':
|
| 173 |
+
break
|
| 174 |
+
coverage_flags += line
|
| 175 |
+
if 'coverage_extra_args' in line:
|
| 176 |
+
read_coverage_extra_args = True
|
| 177 |
+
# Include the first line only if it's not a multi-line value.
|
| 178 |
+
if 'coverage_extra_args: >' not in line:
|
| 179 |
+
coverage_flags += line.replace('coverage_extra_args: ', '')
|
| 180 |
+
return coverage_flags
|
| 181 |
+
|
| 182 |
+
@property
|
| 183 |
+
def out(self):
|
| 184 |
+
"""Returns the out dir for the project. Creates it if needed."""
|
| 185 |
+
return _get_out_dir(self.name)
|
| 186 |
+
|
| 187 |
+
@property
|
| 188 |
+
def work(self):
|
| 189 |
+
"""Returns the out dir for the project. Creates it if needed."""
|
| 190 |
+
return _get_project_build_subdir(self.name, 'work')
|
| 191 |
+
|
| 192 |
+
@property
|
| 193 |
+
def corpus(self):
|
| 194 |
+
"""Returns the out dir for the project. Creates it if needed."""
|
| 195 |
+
return _get_project_build_subdir(self.name, 'corpus')
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def main(): # pylint: disable=too-many-branches,too-many-return-statements
|
| 199 |
+
"""Gets subcommand from program arguments and does it. Returns 0 on success 1
|
| 200 |
+
on error."""
|
| 201 |
+
logging.basicConfig(level=logging.INFO)
|
| 202 |
+
parser = get_parser()
|
| 203 |
+
args = parse_args(parser)
|
| 204 |
+
|
| 205 |
+
# Need to do this before chdir.
|
| 206 |
+
# TODO(https://github.com/google/oss-fuzz/issues/6758): Get rid of chdir.
|
| 207 |
+
if hasattr(args, 'testcase_path'):
|
| 208 |
+
args.testcase_path = _get_absolute_path(args.testcase_path)
|
| 209 |
+
# Note: this has to happen after parse_args above as parse_args needs to know
|
| 210 |
+
# the original CWD for external projects.
|
| 211 |
+
os.chdir(OSS_FUZZ_DIR)
|
| 212 |
+
if not os.path.exists(BUILD_DIR):
|
| 213 |
+
os.mkdir(BUILD_DIR)
|
| 214 |
+
|
| 215 |
+
# We have different default values for `sanitizer` depending on the `engine`.
|
| 216 |
+
# Some commands do not have `sanitizer` argument, so `hasattr` is necessary.
|
| 217 |
+
if hasattr(args, 'sanitizer') and not args.sanitizer:
|
| 218 |
+
if args.project.language == 'javascript':
|
| 219 |
+
args.sanitizer = 'none'
|
| 220 |
+
else:
|
| 221 |
+
args.sanitizer = constants.DEFAULT_SANITIZER
|
| 222 |
+
|
| 223 |
+
if args.command == 'generate':
|
| 224 |
+
result = generate(args)
|
| 225 |
+
elif args.command == 'build_image':
|
| 226 |
+
result = build_image(args)
|
| 227 |
+
elif args.command == 'build_fuzzers':
|
| 228 |
+
result = build_fuzzers(args)
|
| 229 |
+
elif args.command == 'fuzzbench_build_fuzzers':
|
| 230 |
+
result = fuzzbench_build_fuzzers(args)
|
| 231 |
+
elif args.command == 'fuzzbench_run_fuzzer':
|
| 232 |
+
result = fuzzbench_run_fuzzer(args)
|
| 233 |
+
elif args.command == 'fuzzbench_measure':
|
| 234 |
+
result = fuzzbench_measure(args)
|
| 235 |
+
elif args.command == 'check_build':
|
| 236 |
+
result = check_build(args)
|
| 237 |
+
elif args.command == 'download_corpora':
|
| 238 |
+
result = download_corpora(args)
|
| 239 |
+
elif args.command == 'run_fuzzer':
|
| 240 |
+
result = run_fuzzer(args)
|
| 241 |
+
elif args.command == 'coverage':
|
| 242 |
+
result = coverage(args)
|
| 243 |
+
elif args.command == 'introspector':
|
| 244 |
+
result = introspector(args)
|
| 245 |
+
elif args.command == 'reproduce':
|
| 246 |
+
result = reproduce(args)
|
| 247 |
+
if args.propagate_exit_codes:
|
| 248 |
+
return result
|
| 249 |
+
elif args.command == 'shell':
|
| 250 |
+
result = shell(args)
|
| 251 |
+
elif args.command == 'pull_images':
|
| 252 |
+
result = pull_images()
|
| 253 |
+
elif args.command == 'run_clusterfuzzlite':
|
| 254 |
+
result = run_clusterfuzzlite(args)
|
| 255 |
+
else:
|
| 256 |
+
# Print help string if no arguments provided.
|
| 257 |
+
parser.print_help()
|
| 258 |
+
result = False
|
| 259 |
+
return bool_to_retcode(result)
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
def bool_to_retcode(boolean):
|
| 263 |
+
"""Returns 0 if |boolean| is Truthy, 0 is the standard return code for a
|
| 264 |
+
successful process execution. Returns 1 otherwise, indicating the process
|
| 265 |
+
failed."""
|
| 266 |
+
return 0 if boolean else 1
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
def parse_args(parser, args=None):
|
| 270 |
+
"""Parses |args| using |parser| and returns parsed args. Also changes
|
| 271 |
+
|args.build_integration_path| to have correct default behavior."""
|
| 272 |
+
# Use default argument None for args so that in production, argparse does its
|
| 273 |
+
# normal behavior, but unittesting is easier.
|
| 274 |
+
parsed_args = parser.parse_args(args)
|
| 275 |
+
project = getattr(parsed_args, 'project', None)
|
| 276 |
+
if not project:
|
| 277 |
+
return parsed_args
|
| 278 |
+
|
| 279 |
+
# Use hacky method for extracting attributes so that ShellTest works.
|
| 280 |
+
# TODO(metzman): Fix this.
|
| 281 |
+
is_external = getattr(parsed_args, 'external', False)
|
| 282 |
+
parsed_args.project = Project(parsed_args.project, is_external)
|
| 283 |
+
return parsed_args
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
def _add_external_project_args(parser):
|
| 287 |
+
parser.add_argument(
|
| 288 |
+
'--external',
|
| 289 |
+
help='Is project external?',
|
| 290 |
+
default=False,
|
| 291 |
+
action='store_true',
|
| 292 |
+
)
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
def get_parser(): # pylint: disable=too-many-statements,too-many-locals
|
| 296 |
+
"""Returns an argparse parser."""
|
| 297 |
+
parser = argparse.ArgumentParser('helper.py', description='oss-fuzz helpers')
|
| 298 |
+
subparsers = parser.add_subparsers(dest='command')
|
| 299 |
+
|
| 300 |
+
generate_parser = subparsers.add_parser(
|
| 301 |
+
'generate', help='Generate files for new project.')
|
| 302 |
+
generate_parser.add_argument('project')
|
| 303 |
+
generate_parser.add_argument('--language',
|
| 304 |
+
default=constants.DEFAULT_LANGUAGE,
|
| 305 |
+
choices=LANGUAGE_TO_BASE_BUILDER_IMAGE.keys(),
|
| 306 |
+
help='Project language.')
|
| 307 |
+
_add_external_project_args(generate_parser)
|
| 308 |
+
|
| 309 |
+
build_image_parser = subparsers.add_parser('build_image',
|
| 310 |
+
help='Build an image.')
|
| 311 |
+
build_image_parser.add_argument('project')
|
| 312 |
+
build_image_parser.add_argument('--pull',
|
| 313 |
+
action='store_true',
|
| 314 |
+
help='Pull latest base image.')
|
| 315 |
+
_add_architecture_args(build_image_parser)
|
| 316 |
+
build_image_parser.add_argument('--cache',
|
| 317 |
+
action='store_true',
|
| 318 |
+
default=False,
|
| 319 |
+
help='Use docker cache when building image.')
|
| 320 |
+
build_image_parser.add_argument('--no-pull',
|
| 321 |
+
action='store_true',
|
| 322 |
+
help='Do not pull latest base image.')
|
| 323 |
+
build_image_parser.add_argument('--docker_image_tag',
|
| 324 |
+
dest='docker_image_tag',
|
| 325 |
+
default='latest',
|
| 326 |
+
help='docker image build tag'
|
| 327 |
+
'default: latest')
|
| 328 |
+
_add_external_project_args(build_image_parser)
|
| 329 |
+
|
| 330 |
+
build_fuzzers_parser = subparsers.add_parser(
|
| 331 |
+
'build_fuzzers', help='Build fuzzers for a project.')
|
| 332 |
+
_add_architecture_args(build_fuzzers_parser)
|
| 333 |
+
_add_engine_args(build_fuzzers_parser)
|
| 334 |
+
_add_sanitizer_args(build_fuzzers_parser)
|
| 335 |
+
_add_environment_args(build_fuzzers_parser)
|
| 336 |
+
_add_external_project_args(build_fuzzers_parser)
|
| 337 |
+
build_fuzzers_parser.add_argument('project')
|
| 338 |
+
build_fuzzers_parser.add_argument('source_path',
|
| 339 |
+
help='path of local source',
|
| 340 |
+
nargs='?')
|
| 341 |
+
build_fuzzers_parser.add_argument('--mount_path',
|
| 342 |
+
dest='mount_path',
|
| 343 |
+
help='path to mount local source in '
|
| 344 |
+
'(defaults to WORKDIR)')
|
| 345 |
+
build_fuzzers_parser.add_argument('--clean',
|
| 346 |
+
dest='clean',
|
| 347 |
+
action='store_true',
|
| 348 |
+
help='clean existing artifacts.')
|
| 349 |
+
build_fuzzers_parser.add_argument('--no-clean',
|
| 350 |
+
dest='clean',
|
| 351 |
+
action='store_false',
|
| 352 |
+
help='do not clean existing artifacts '
|
| 353 |
+
'(default).')
|
| 354 |
+
build_fuzzers_parser.add_argument('--docker_image_tag',
|
| 355 |
+
dest='docker_image_tag',
|
| 356 |
+
default='latest',
|
| 357 |
+
help='docker image build tag'
|
| 358 |
+
'default: latest')
|
| 359 |
+
build_fuzzers_parser.set_defaults(clean=False)
|
| 360 |
+
|
| 361 |
+
fuzzbench_build_fuzzers_parser = subparsers.add_parser(
|
| 362 |
+
'fuzzbench_build_fuzzers')
|
| 363 |
+
_add_architecture_args(fuzzbench_build_fuzzers_parser)
|
| 364 |
+
fuzzbench_build_fuzzers_parser.add_argument('--engine')
|
| 365 |
+
_add_sanitizer_args(fuzzbench_build_fuzzers_parser)
|
| 366 |
+
_add_environment_args(fuzzbench_build_fuzzers_parser)
|
| 367 |
+
_add_external_project_args(fuzzbench_build_fuzzers_parser)
|
| 368 |
+
fuzzbench_build_fuzzers_parser.add_argument('project')
|
| 369 |
+
check_build_parser = subparsers.add_parser(
|
| 370 |
+
'check_build', help='Checks that fuzzers execute without errors.')
|
| 371 |
+
_add_architecture_args(check_build_parser)
|
| 372 |
+
_add_engine_args(check_build_parser, choices=constants.ENGINES)
|
| 373 |
+
_add_sanitizer_args(check_build_parser, choices=constants.SANITIZERS)
|
| 374 |
+
_add_environment_args(check_build_parser)
|
| 375 |
+
check_build_parser.add_argument('project',
|
| 376 |
+
help='name of the project or path (external)')
|
| 377 |
+
check_build_parser.add_argument('fuzzer_name',
|
| 378 |
+
help='name of the fuzzer',
|
| 379 |
+
nargs='?')
|
| 380 |
+
_add_external_project_args(check_build_parser)
|
| 381 |
+
|
| 382 |
+
run_fuzzer_parser = subparsers.add_parser(
|
| 383 |
+
'run_fuzzer', help='Run a fuzzer in the emulated fuzzing environment.')
|
| 384 |
+
_add_architecture_args(run_fuzzer_parser)
|
| 385 |
+
_add_engine_args(run_fuzzer_parser)
|
| 386 |
+
_add_sanitizer_args(run_fuzzer_parser)
|
| 387 |
+
_add_environment_args(run_fuzzer_parser)
|
| 388 |
+
_add_external_project_args(run_fuzzer_parser)
|
| 389 |
+
run_fuzzer_parser.add_argument(
|
| 390 |
+
'--corpus-dir', help='directory to store corpus for the fuzz target')
|
| 391 |
+
run_fuzzer_parser.add_argument('project',
|
| 392 |
+
help='name of the project or path (external)')
|
| 393 |
+
run_fuzzer_parser.add_argument('fuzzer_name', help='name of the fuzzer')
|
| 394 |
+
run_fuzzer_parser.add_argument('fuzzer_args',
|
| 395 |
+
help='arguments to pass to the fuzzer',
|
| 396 |
+
nargs='*')
|
| 397 |
+
|
| 398 |
+
fuzzbench_run_fuzzer_parser = subparsers.add_parser('fuzzbench_run_fuzzer')
|
| 399 |
+
_add_architecture_args(fuzzbench_run_fuzzer_parser)
|
| 400 |
+
fuzzbench_run_fuzzer_parser.add_argument('--engine')
|
| 401 |
+
_add_sanitizer_args(fuzzbench_run_fuzzer_parser)
|
| 402 |
+
_add_environment_args(fuzzbench_run_fuzzer_parser)
|
| 403 |
+
_add_external_project_args(fuzzbench_run_fuzzer_parser)
|
| 404 |
+
fuzzbench_run_fuzzer_parser.add_argument(
|
| 405 |
+
'--corpus-dir', help='directory to store corpus for the fuzz target')
|
| 406 |
+
fuzzbench_run_fuzzer_parser.add_argument(
|
| 407 |
+
'project', help='name of the project or path (external)')
|
| 408 |
+
fuzzbench_run_fuzzer_parser.add_argument('fuzzer_name',
|
| 409 |
+
help='name of the fuzzer')
|
| 410 |
+
fuzzbench_run_fuzzer_parser.add_argument(
|
| 411 |
+
'fuzzer_args', help='arguments to pass to the fuzzer', nargs='*')
|
| 412 |
+
|
| 413 |
+
fuzzbench_measure_parser = subparsers.add_parser('fuzzbench_measure')
|
| 414 |
+
fuzzbench_measure_parser.add_argument(
|
| 415 |
+
'project', help='name of the project or path (external)')
|
| 416 |
+
fuzzbench_measure_parser.add_argument('engine_name',
|
| 417 |
+
help='name of the fuzzer')
|
| 418 |
+
fuzzbench_measure_parser.add_argument('fuzz_target_name',
|
| 419 |
+
help='name of the fuzzer')
|
| 420 |
+
|
| 421 |
+
coverage_parser = subparsers.add_parser(
|
| 422 |
+
'coverage', help='Generate code coverage report for the project.')
|
| 423 |
+
coverage_parser.add_argument('--no-corpus-download',
|
| 424 |
+
action='store_true',
|
| 425 |
+
help='do not download corpus backup from '
|
| 426 |
+
'OSS-Fuzz; use corpus located in '
|
| 427 |
+
'build/corpus/<project>/<fuzz_target>/')
|
| 428 |
+
coverage_parser.add_argument('--no-serve',
|
| 429 |
+
action='store_true',
|
| 430 |
+
help='do not serve a local HTTP server.')
|
| 431 |
+
coverage_parser.add_argument('--port',
|
| 432 |
+
default='8008',
|
| 433 |
+
help='specify port for'
|
| 434 |
+
' a local HTTP server rendering coverage report')
|
| 435 |
+
coverage_parser.add_argument('--fuzz-target',
|
| 436 |
+
help='specify name of a fuzz '
|
| 437 |
+
'target to be run for generating coverage '
|
| 438 |
+
'report')
|
| 439 |
+
coverage_parser.add_argument('--corpus-dir',
|
| 440 |
+
help='specify location of corpus'
|
| 441 |
+
' to be used (requires --fuzz-target argument)')
|
| 442 |
+
coverage_parser.add_argument('--public',
|
| 443 |
+
action='store_true',
|
| 444 |
+
help='if set, will download public '
|
| 445 |
+
'corpus using wget')
|
| 446 |
+
coverage_parser.add_argument('project',
|
| 447 |
+
help='name of the project or path (external)')
|
| 448 |
+
coverage_parser.add_argument('extra_args',
|
| 449 |
+
help='additional arguments to '
|
| 450 |
+
'pass to llvm-cov utility.',
|
| 451 |
+
nargs='*')
|
| 452 |
+
_add_external_project_args(coverage_parser)
|
| 453 |
+
_add_architecture_args(coverage_parser)
|
| 454 |
+
|
| 455 |
+
introspector_parser = subparsers.add_parser(
|
| 456 |
+
'introspector',
|
| 457 |
+
help='Run a complete end-to-end run of '
|
| 458 |
+
'fuzz introspector. This involves (1) '
|
| 459 |
+
'building the fuzzers with ASAN; (2) '
|
| 460 |
+
'running all fuzzers; (3) building '
|
| 461 |
+
'fuzzers with coverge; (4) extracting '
|
| 462 |
+
'coverage; (5) building fuzzers using '
|
| 463 |
+
'introspector')
|
| 464 |
+
introspector_parser.add_argument('project', help='name of the project')
|
| 465 |
+
introspector_parser.add_argument('--seconds',
|
| 466 |
+
help='number of seconds to run fuzzers',
|
| 467 |
+
default=10)
|
| 468 |
+
introspector_parser.add_argument('source_path',
|
| 469 |
+
help='path of local source',
|
| 470 |
+
nargs='?')
|
| 471 |
+
introspector_parser.add_argument(
|
| 472 |
+
'--public-corpora',
|
| 473 |
+
help='if specified, will use public corpora for code coverage',
|
| 474 |
+
default=False,
|
| 475 |
+
action='store_true')
|
| 476 |
+
introspector_parser.add_argument(
|
| 477 |
+
'--private-corpora',
|
| 478 |
+
help='if specified, will use private corpora',
|
| 479 |
+
default=False,
|
| 480 |
+
action='store_true')
|
| 481 |
+
|
| 482 |
+
download_corpora_parser = subparsers.add_parser(
|
| 483 |
+
'download_corpora', help='Download all corpora for a project.')
|
| 484 |
+
download_corpora_parser.add_argument('--fuzz-target',
|
| 485 |
+
nargs='+',
|
| 486 |
+
help='specify name of a fuzz target')
|
| 487 |
+
download_corpora_parser.add_argument('--public',
|
| 488 |
+
action='store_true',
|
| 489 |
+
help='if set, will download public '
|
| 490 |
+
'corpus using wget')
|
| 491 |
+
download_corpora_parser.add_argument(
|
| 492 |
+
'project', help='name of the project or path (external)')
|
| 493 |
+
|
| 494 |
+
reproduce_parser = subparsers.add_parser('reproduce',
|
| 495 |
+
help='Reproduce a crash.')
|
| 496 |
+
reproduce_parser.add_argument('--valgrind',
|
| 497 |
+
action='store_true',
|
| 498 |
+
help='run with valgrind')
|
| 499 |
+
reproduce_parser.add_argument('--propagate_exit_codes',
|
| 500 |
+
action='store_true',
|
| 501 |
+
default=False,
|
| 502 |
+
help='return underlying exit codes instead of True/False.')
|
| 503 |
+
reproduce_parser.add_argument('--not_privileged',
|
| 504 |
+
dest='privileged',
|
| 505 |
+
action='store_false',
|
| 506 |
+
default=True,
|
| 507 |
+
help='reproduce without running docker in privileged mode.')
|
| 508 |
+
reproduce_parser.add_argument('--err_result',
|
| 509 |
+
help='exit code override for missing harness / fuzz targets '
|
| 510 |
+
'(default err_result = 1).',
|
| 511 |
+
type=int)
|
| 512 |
+
reproduce_parser.add_argument('--timeout',
|
| 513 |
+
help='timeout for reproduce subprocess '
|
| 514 |
+
'(default: None).',
|
| 515 |
+
default=None,
|
| 516 |
+
type=int)
|
| 517 |
+
reproduce_parser.add_argument('project',
|
| 518 |
+
help='name of the project or path (external)')
|
| 519 |
+
reproduce_parser.add_argument('fuzzer_name', help='name of the fuzzer')
|
| 520 |
+
reproduce_parser.add_argument('testcase_path', help='path of local testcase')
|
| 521 |
+
reproduce_parser.add_argument('fuzzer_args',
|
| 522 |
+
help='arguments to pass to the fuzzer',
|
| 523 |
+
nargs='*')
|
| 524 |
+
_add_environment_args(reproduce_parser)
|
| 525 |
+
_add_external_project_args(reproduce_parser)
|
| 526 |
+
_add_architecture_args(reproduce_parser)
|
| 527 |
+
|
| 528 |
+
shell_parser = subparsers.add_parser(
|
| 529 |
+
'shell', help='Run /bin/bash within the builder container.')
|
| 530 |
+
shell_parser.add_argument('project',
|
| 531 |
+
help='name of the project or path (external)')
|
| 532 |
+
shell_parser.add_argument('source_path',
|
| 533 |
+
help='path of local source',
|
| 534 |
+
nargs='?')
|
| 535 |
+
shell_parser.add_argument('--docker_image_tag',
|
| 536 |
+
dest='docker_image_tag',
|
| 537 |
+
default='latest',
|
| 538 |
+
help='docker image build tag'
|
| 539 |
+
'default: latest')
|
| 540 |
+
_add_architecture_args(shell_parser)
|
| 541 |
+
_add_engine_args(shell_parser)
|
| 542 |
+
_add_sanitizer_args(shell_parser)
|
| 543 |
+
_add_environment_args(shell_parser)
|
| 544 |
+
_add_external_project_args(shell_parser)
|
| 545 |
+
|
| 546 |
+
run_clusterfuzzlite_parser = subparsers.add_parser(
|
| 547 |
+
'run_clusterfuzzlite', help='Run ClusterFuzzLite on a project.')
|
| 548 |
+
_add_sanitizer_args(run_clusterfuzzlite_parser)
|
| 549 |
+
_add_environment_args(run_clusterfuzzlite_parser)
|
| 550 |
+
run_clusterfuzzlite_parser.add_argument('project')
|
| 551 |
+
run_clusterfuzzlite_parser.add_argument('--clean',
|
| 552 |
+
dest='clean',
|
| 553 |
+
action='store_true',
|
| 554 |
+
help='clean existing artifacts.')
|
| 555 |
+
run_clusterfuzzlite_parser.add_argument(
|
| 556 |
+
'--no-clean',
|
| 557 |
+
dest='clean',
|
| 558 |
+
action='store_false',
|
| 559 |
+
help='do not clean existing artifacts '
|
| 560 |
+
'(default).')
|
| 561 |
+
run_clusterfuzzlite_parser.add_argument('--branch',
|
| 562 |
+
default='master',
|
| 563 |
+
required=True)
|
| 564 |
+
_add_external_project_args(run_clusterfuzzlite_parser)
|
| 565 |
+
run_clusterfuzzlite_parser.set_defaults(clean=False)
|
| 566 |
+
|
| 567 |
+
subparsers.add_parser('pull_images', help='Pull base images.')
|
| 568 |
+
return parser
|
| 569 |
+
|
| 570 |
+
|
| 571 |
+
def is_base_image(image_name):
|
| 572 |
+
"""Checks if the image name is a base image."""
|
| 573 |
+
return os.path.exists(os.path.join('infra', 'base-images', image_name))
|
| 574 |
+
|
| 575 |
+
|
| 576 |
+
def check_project_exists(project):
|
| 577 |
+
"""Checks if a project exists."""
|
| 578 |
+
if os.path.exists(project.path):
|
| 579 |
+
return True
|
| 580 |
+
|
| 581 |
+
if project.is_external:
|
| 582 |
+
descriptive_project_name = project.path
|
| 583 |
+
else:
|
| 584 |
+
descriptive_project_name = project.name
|
| 585 |
+
|
| 586 |
+
logger.error('"%s" does not exist.', descriptive_project_name)
|
| 587 |
+
return False
|
| 588 |
+
|
| 589 |
+
|
| 590 |
+
def _check_fuzzer_exists(project, fuzzer_name, architecture='x86_64'):
|
| 591 |
+
"""Checks if a fuzzer exists."""
|
| 592 |
+
platform = 'linux/arm64' if architecture == 'aarch64' else 'linux/amd64'
|
| 593 |
+
command = ['docker', 'run', '--rm', '--platform', platform]
|
| 594 |
+
command.extend(['-v', '%s:/out' % project.out])
|
| 595 |
+
command.append(BASE_RUNNER_IMAGE)
|
| 596 |
+
|
| 597 |
+
command.extend(['/bin/bash', '-c', 'test -f /out/%s' % fuzzer_name])
|
| 598 |
+
|
| 599 |
+
try:
|
| 600 |
+
subprocess.check_call(command)
|
| 601 |
+
except subprocess.CalledProcessError:
|
| 602 |
+
logger.error('%s does not seem to exist. Please run build_fuzzers first.',
|
| 603 |
+
fuzzer_name)
|
| 604 |
+
return False
|
| 605 |
+
|
| 606 |
+
return True
|
| 607 |
+
|
| 608 |
+
|
| 609 |
+
def _normalized_name(name):
|
| 610 |
+
"""Return normalized name with special chars like slash, colon, etc normalized
|
| 611 |
+
to hyphen(-). This is important as otherwise these chars break local and cloud
|
| 612 |
+
storage paths."""
|
| 613 |
+
return SPECIAL_CHARS_REGEX.sub('-', name).strip('-')
|
| 614 |
+
|
| 615 |
+
|
| 616 |
+
def _get_absolute_path(path):
|
| 617 |
+
"""Returns absolute path with user expansion."""
|
| 618 |
+
return os.path.abspath(os.path.expanduser(path))
|
| 619 |
+
|
| 620 |
+
|
| 621 |
+
def _get_command_string(command):
|
| 622 |
+
"""Returns a shell escaped command string."""
|
| 623 |
+
return ' '.join(shlex.quote(part) for part in command)
|
| 624 |
+
|
| 625 |
+
|
| 626 |
+
def _get_project_build_subdir(project, subdir_name):
|
| 627 |
+
"""Creates the |subdir_name| subdirectory of the |project| subdirectory in
|
| 628 |
+
|BUILD_DIR| and returns its path."""
|
| 629 |
+
directory = os.path.join(BUILD_DIR, subdir_name, project)
|
| 630 |
+
os.makedirs(directory, exist_ok=True)
|
| 631 |
+
|
| 632 |
+
return directory
|
| 633 |
+
|
| 634 |
+
|
| 635 |
+
def _get_out_dir(project=''):
|
| 636 |
+
"""Creates and returns path to /out directory for the given project (if
|
| 637 |
+
specified)."""
|
| 638 |
+
return _get_project_build_subdir(project, 'out')
|
| 639 |
+
|
| 640 |
+
|
| 641 |
+
def _add_architecture_args(parser, choices=None):
|
| 642 |
+
"""Adds common architecture args."""
|
| 643 |
+
if choices is None:
|
| 644 |
+
choices = constants.ARCHITECTURES
|
| 645 |
+
parser.add_argument('--architecture',
|
| 646 |
+
default=constants.DEFAULT_ARCHITECTURE,
|
| 647 |
+
choices=choices)
|
| 648 |
+
|
| 649 |
+
|
| 650 |
+
def _add_engine_args(parser, choices=None):
|
| 651 |
+
"""Adds common engine args."""
|
| 652 |
+
if choices is None:
|
| 653 |
+
choices = constants.ENGINES
|
| 654 |
+
parser.add_argument('--engine',
|
| 655 |
+
default=constants.DEFAULT_ENGINE,
|
| 656 |
+
choices=choices)
|
| 657 |
+
|
| 658 |
+
|
| 659 |
+
def _add_sanitizer_args(parser, choices=None):
|
| 660 |
+
"""Adds common sanitizer args."""
|
| 661 |
+
if choices is None:
|
| 662 |
+
choices = constants.SANITIZERS
|
| 663 |
+
parser.add_argument('--sanitizer',
|
| 664 |
+
default=None,
|
| 665 |
+
choices=choices,
|
| 666 |
+
help='the default is "address"')
|
| 667 |
+
|
| 668 |
+
|
| 669 |
+
def _add_environment_args(parser):
|
| 670 |
+
"""Adds common environment args."""
|
| 671 |
+
parser.add_argument('-e',
|
| 672 |
+
action='append',
|
| 673 |
+
help="set environment variable e.g. VAR=value")
|
| 674 |
+
|
| 675 |
+
|
| 676 |
+
def build_image_impl(project, cache=True, pull=False,
|
| 677 |
+
architecture='x86_64',
|
| 678 |
+
docker_image_tag='latest'):
|
| 679 |
+
"""Builds image."""
|
| 680 |
+
image_name = project.name
|
| 681 |
+
|
| 682 |
+
if is_base_image(image_name):
|
| 683 |
+
image_project = 'aixcc-finals'
|
| 684 |
+
docker_build_dir = os.path.join(OSS_FUZZ_DIR, 'infra', 'base-images',
|
| 685 |
+
image_name)
|
| 686 |
+
dockerfile_path = os.path.join(docker_build_dir, 'Dockerfile')
|
| 687 |
+
image_name = 'ghcr.io/%s/%s%s' % (image_project, image_name, BASE_IMAGE_TAG)
|
| 688 |
+
else:
|
| 689 |
+
if not check_project_exists(project):
|
| 690 |
+
return False
|
| 691 |
+
dockerfile_path = project.dockerfile_path
|
| 692 |
+
docker_build_dir = project.path
|
| 693 |
+
image_project = 'aixcc-afc'
|
| 694 |
+
image_name = '%s/%s:%s' % (image_project, image_name, docker_image_tag)
|
| 695 |
+
|
| 696 |
+
if pull and not pull_images(project.language):
|
| 697 |
+
return False
|
| 698 |
+
|
| 699 |
+
build_args = []
|
| 700 |
+
if architecture == 'aarch64':
|
| 701 |
+
build_args += [
|
| 702 |
+
'buildx',
|
| 703 |
+
'build',
|
| 704 |
+
'--platform',
|
| 705 |
+
'linux/arm64',
|
| 706 |
+
'--progress',
|
| 707 |
+
'plain',
|
| 708 |
+
'--load',
|
| 709 |
+
]
|
| 710 |
+
if not cache:
|
| 711 |
+
build_args.append('--no-cache')
|
| 712 |
+
|
| 713 |
+
build_args += ['-t', image_name, '--file', dockerfile_path]
|
| 714 |
+
build_args.append(docker_build_dir)
|
| 715 |
+
|
| 716 |
+
if architecture == 'aarch64':
|
| 717 |
+
command = ['docker'] + build_args
|
| 718 |
+
subprocess.check_call(command)
|
| 719 |
+
return True
|
| 720 |
+
return docker_build(build_args)
|
| 721 |
+
|
| 722 |
+
|
| 723 |
+
def _env_to_docker_args(env_list):
|
| 724 |
+
"""Turns envirnoment variable list into docker arguments."""
|
| 725 |
+
return sum([['-e', v] for v in env_list], [])
|
| 726 |
+
|
| 727 |
+
|
| 728 |
+
def workdir_from_lines(lines, default='/src'):
|
| 729 |
+
"""Gets the WORKDIR from the given lines."""
|
| 730 |
+
for line in reversed(lines): # reversed to get last WORKDIR.
|
| 731 |
+
match = re.match(WORKDIR_REGEX, line)
|
| 732 |
+
if match:
|
| 733 |
+
workdir = match.group(1)
|
| 734 |
+
workdir = workdir.replace('$SRC', '/src')
|
| 735 |
+
|
| 736 |
+
if not os.path.isabs(workdir):
|
| 737 |
+
workdir = os.path.join('/src', workdir)
|
| 738 |
+
|
| 739 |
+
return os.path.normpath(workdir)
|
| 740 |
+
|
| 741 |
+
return default
|
| 742 |
+
|
| 743 |
+
|
| 744 |
+
def _workdir_from_dockerfile(project):
|
| 745 |
+
"""Parses WORKDIR from the Dockerfile for the given project."""
|
| 746 |
+
with open(project.dockerfile_path) as file_handle:
|
| 747 |
+
lines = file_handle.readlines()
|
| 748 |
+
|
| 749 |
+
return workdir_from_lines(lines, default=os.path.join('/src', project.name))
|
| 750 |
+
|
| 751 |
+
|
| 752 |
+
def prepare_aarch64_emulation():
|
| 753 |
+
"""Run some necessary commands to use buildx to build AArch64 targets using
|
| 754 |
+
QEMU emulation on an x86_64 host."""
|
| 755 |
+
subprocess.check_call(
|
| 756 |
+
['docker', 'buildx', 'create', '--name', ARM_BUILDER_NAME])
|
| 757 |
+
subprocess.check_call(['docker', 'buildx', 'use', ARM_BUILDER_NAME])
|
| 758 |
+
|
| 759 |
+
|
| 760 |
+
def docker_run(run_args, print_output=True, architecture='x86_64', propagate_exit_codes=False, privileged=True, timeout=None):
|
| 761 |
+
"""Calls `docker run`."""
|
| 762 |
+
platform = 'linux/arm64' if architecture == 'aarch64' else 'linux/amd64'
|
| 763 |
+
|
| 764 |
+
if privileged:
|
| 765 |
+
command = [
|
| 766 |
+
'docker', 'run', '--privileged', '--shm-size=2g', '--platform', platform
|
| 767 |
+
]
|
| 768 |
+
else:
|
| 769 |
+
command = [
|
| 770 |
+
'docker', 'run', '--shm-size=2g', '--platform', platform
|
| 771 |
+
]
|
| 772 |
+
|
| 773 |
+
if os.getenv('OSS_FUZZ_SAVE_CONTAINERS_NAME'):
|
| 774 |
+
command.append('--name')
|
| 775 |
+
command.append(os.getenv('OSS_FUZZ_SAVE_CONTAINERS_NAME'))
|
| 776 |
+
else:
|
| 777 |
+
command.append('--rm')
|
| 778 |
+
|
| 779 |
+
# Support environments with a TTY.
|
| 780 |
+
if sys.stdin.isatty():
|
| 781 |
+
command.append('-i')
|
| 782 |
+
|
| 783 |
+
command.extend(run_args)
|
| 784 |
+
|
| 785 |
+
logger.info('Running: %s.', _get_command_string(command))
|
| 786 |
+
stdout = None
|
| 787 |
+
if not print_output:
|
| 788 |
+
stdout = open(os.devnull, 'w')
|
| 789 |
+
|
| 790 |
+
exit_code = 0
|
| 791 |
+
|
| 792 |
+
try:
|
| 793 |
+
subprocess.check_call(command, stdout=stdout, stderr=subprocess.STDOUT,
|
| 794 |
+
timeout=timeout)
|
| 795 |
+
except subprocess.CalledProcessError as e:
|
| 796 |
+
print(f'subprocess command returned a non-zero exit status: {e.returncode}')
|
| 797 |
+
exit_code = e.returncode
|
| 798 |
+
except subprocess.TimeoutExpired:
|
| 799 |
+
print(f'subprocess command timed out: {timeout=}')
|
| 800 |
+
exit_code = 124
|
| 801 |
+
|
| 802 |
+
return exit_code if propagate_exit_codes else exit_code == 0
|
| 803 |
+
|
| 804 |
+
|
| 805 |
+
def docker_build(build_args):
|
| 806 |
+
"""Calls `docker build`."""
|
| 807 |
+
command = ['docker', 'build']
|
| 808 |
+
command.extend(build_args)
|
| 809 |
+
logger.info('Running: %s.', _get_command_string(command))
|
| 810 |
+
|
| 811 |
+
try:
|
| 812 |
+
subprocess.check_call(command)
|
| 813 |
+
except subprocess.CalledProcessError:
|
| 814 |
+
logger.error('Docker build failed.')
|
| 815 |
+
return False
|
| 816 |
+
|
| 817 |
+
return True
|
| 818 |
+
|
| 819 |
+
|
| 820 |
+
def docker_pull(image):
|
| 821 |
+
"""Call `docker pull`."""
|
| 822 |
+
command = ['docker', 'pull', image]
|
| 823 |
+
logger.info('Running: %s', _get_command_string(command))
|
| 824 |
+
|
| 825 |
+
try:
|
| 826 |
+
subprocess.check_call(command)
|
| 827 |
+
except subprocess.CalledProcessError:
|
| 828 |
+
logger.error('Docker pull failed.')
|
| 829 |
+
return False
|
| 830 |
+
|
| 831 |
+
return True
|
| 832 |
+
|
| 833 |
+
|
| 834 |
+
def build_image(args):
|
| 835 |
+
"""Builds docker image."""
|
| 836 |
+
if args.pull and args.no_pull:
|
| 837 |
+
logger.error('Incompatible arguments --pull and --no-pull.')
|
| 838 |
+
return False
|
| 839 |
+
|
| 840 |
+
if args.pull:
|
| 841 |
+
pull = True
|
| 842 |
+
elif args.no_pull:
|
| 843 |
+
pull = False
|
| 844 |
+
else:
|
| 845 |
+
y_or_n = raw_input('Pull latest base images (compiler/runtime)? (y/N): ')
|
| 846 |
+
pull = y_or_n.lower() == 'y'
|
| 847 |
+
|
| 848 |
+
if pull:
|
| 849 |
+
logger.info('Pulling latest base images...')
|
| 850 |
+
else:
|
| 851 |
+
logger.info('Using cached base images...')
|
| 852 |
+
|
| 853 |
+
# If build_image is called explicitly, don't use cache.
|
| 854 |
+
if build_image_impl(args.project,
|
| 855 |
+
cache=args.cache,
|
| 856 |
+
pull=pull,
|
| 857 |
+
architecture=args.architecture,
|
| 858 |
+
docker_image_tag=args.docker_image_tag):
|
| 859 |
+
return True
|
| 860 |
+
|
| 861 |
+
return False
|
| 862 |
+
|
| 863 |
+
|
| 864 |
+
def build_fuzzers_impl( # pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
| 865 |
+
project,
|
| 866 |
+
clean,
|
| 867 |
+
engine,
|
| 868 |
+
sanitizer,
|
| 869 |
+
architecture,
|
| 870 |
+
env_to_add,
|
| 871 |
+
source_path,
|
| 872 |
+
mount_path=None,
|
| 873 |
+
child_dir='',
|
| 874 |
+
build_project_image=True,
|
| 875 |
+
docker_image_tag='latest'):
|
| 876 |
+
"""Builds fuzzers."""
|
| 877 |
+
if build_project_image and not build_image_impl(project,
|
| 878 |
+
architecture=architecture,
|
| 879 |
+
docker_image_tag=docker_image_tag):
|
| 880 |
+
return False
|
| 881 |
+
|
| 882 |
+
docker_image = f'aixcc-afc/{project.name}:{docker_image_tag}'
|
| 883 |
+
|
| 884 |
+
project_out = os.path.join(project.out, child_dir)
|
| 885 |
+
if clean:
|
| 886 |
+
logger.info('Cleaning existing build artifacts.')
|
| 887 |
+
|
| 888 |
+
# Clean old and possibly conflicting artifacts in project's out directory.
|
| 889 |
+
docker_run([
|
| 890 |
+
'-v', f'{project_out}:/out', '-t', f'{docker_image}',
|
| 891 |
+
'/bin/bash', '-c', 'rm -rf /out/*'
|
| 892 |
+
],
|
| 893 |
+
architecture=architecture)
|
| 894 |
+
|
| 895 |
+
docker_run([
|
| 896 |
+
'-v',
|
| 897 |
+
'%s:/work' % project.work, '-t',
|
| 898 |
+
f'{docker_image}', '/bin/bash', '-c', 'rm -rf /work/*'
|
| 899 |
+
],
|
| 900 |
+
architecture=architecture)
|
| 901 |
+
|
| 902 |
+
else:
|
| 903 |
+
logger.info('Keeping existing build artifacts as-is (if any).')
|
| 904 |
+
env = [
|
| 905 |
+
'FUZZING_ENGINE=' + engine,
|
| 906 |
+
'SANITIZER=' + sanitizer,
|
| 907 |
+
'ARCHITECTURE=' + architecture,
|
| 908 |
+
'PROJECT_NAME=' + project.name,
|
| 909 |
+
'HELPER=True',
|
| 910 |
+
]
|
| 911 |
+
|
| 912 |
+
_add_oss_fuzz_ci_if_needed(env)
|
| 913 |
+
|
| 914 |
+
if project.language:
|
| 915 |
+
env.append('FUZZING_LANGUAGE=' + project.language)
|
| 916 |
+
|
| 917 |
+
if env_to_add:
|
| 918 |
+
env += env_to_add
|
| 919 |
+
|
| 920 |
+
command = _env_to_docker_args(env)
|
| 921 |
+
if source_path:
|
| 922 |
+
workdir = _workdir_from_dockerfile(project)
|
| 923 |
+
stateless_path = mount_path if mount_path else workdir
|
| 924 |
+
|
| 925 |
+
if stateless_path == '/src':
|
| 926 |
+
logger.error('Cannot mount local source targeting "/src".')
|
| 927 |
+
return False
|
| 928 |
+
|
| 929 |
+
command += [
|
| 930 |
+
'-v',
|
| 931 |
+
'%s:%s:ro' % (_get_absolute_path(source_path), '/local-source-mount'),
|
| 932 |
+
]
|
| 933 |
+
|
| 934 |
+
command += [
|
| 935 |
+
'-v', f'{project_out}:/out', '-v', f'{project.work}:/work',
|
| 936 |
+
f'{docker_image}'
|
| 937 |
+
]
|
| 938 |
+
|
| 939 |
+
if sys.stdin.isatty():
|
| 940 |
+
command.insert(-1, '-t')
|
| 941 |
+
|
| 942 |
+
if source_path:
|
| 943 |
+
default_cmd = 'compile'
|
| 944 |
+
command += [
|
| 945 |
+
'/bin/bash',
|
| 946 |
+
'-c',
|
| 947 |
+
f'pushd $SRC && rm -rf {stateless_path} && cp -r /local-source-mount {stateless_path} && popd && {default_cmd}'
|
| 948 |
+
]
|
| 949 |
+
|
| 950 |
+
result = docker_run(command, architecture=architecture)
|
| 951 |
+
if not result:
|
| 952 |
+
logger.error('Building fuzzers failed.')
|
| 953 |
+
return False
|
| 954 |
+
|
| 955 |
+
return True
|
| 956 |
+
|
| 957 |
+
|
| 958 |
+
def run_clusterfuzzlite(args):
|
| 959 |
+
"""Runs ClusterFuzzLite on a local repo."""
|
| 960 |
+
if not os.path.exists(CLUSTERFUZZLITE_FILESTORE_DIR):
|
| 961 |
+
os.mkdir(CLUSTERFUZZLITE_FILESTORE_DIR)
|
| 962 |
+
|
| 963 |
+
try:
|
| 964 |
+
with tempfile.TemporaryDirectory() as workspace:
|
| 965 |
+
|
| 966 |
+
if args.external:
|
| 967 |
+
project_src_path = os.path.join(workspace, args.project.name)
|
| 968 |
+
shutil.copytree(args.project.path, project_src_path)
|
| 969 |
+
|
| 970 |
+
build_command = [
|
| 971 |
+
'--tag', 'ghcr.io/aixcc-finals/cifuzz-run-fuzzers', '--file',
|
| 972 |
+
'infra/run_fuzzers.Dockerfile', 'infra'
|
| 973 |
+
]
|
| 974 |
+
if not docker_build(build_command):
|
| 975 |
+
return False
|
| 976 |
+
filestore_path = os.path.abspath(CLUSTERFUZZLITE_FILESTORE_DIR)
|
| 977 |
+
docker_run_command = []
|
| 978 |
+
if args.external:
|
| 979 |
+
docker_run_command += [
|
| 980 |
+
'-e',
|
| 981 |
+
f'PROJECT_SRC_PATH={project_src_path}',
|
| 982 |
+
]
|
| 983 |
+
else:
|
| 984 |
+
docker_run_command += [
|
| 985 |
+
'-e',
|
| 986 |
+
f'OSS_FUZZ_PROJECT_NAME={args.project.name}',
|
| 987 |
+
]
|
| 988 |
+
docker_run_command += [
|
| 989 |
+
'-v',
|
| 990 |
+
f'{filestore_path}:{filestore_path}',
|
| 991 |
+
'-v',
|
| 992 |
+
f'{workspace}:{workspace}',
|
| 993 |
+
'-e',
|
| 994 |
+
f'FILESTORE_ROOT_DIR={filestore_path}',
|
| 995 |
+
'-e',
|
| 996 |
+
f'WORKSPACE={workspace}',
|
| 997 |
+
'-e',
|
| 998 |
+
f'REPOSITORY={args.project.name}',
|
| 999 |
+
'-e',
|
| 1000 |
+
'CFL_PLATFORM=standalone',
|
| 1001 |
+
'--entrypoint',
|
| 1002 |
+
'',
|
| 1003 |
+
'-v',
|
| 1004 |
+
'/var/run/docker.sock:/var/run/docker.sock',
|
| 1005 |
+
CLUSTERFUZZLITE_DOCKER_IMAGE,
|
| 1006 |
+
'python3',
|
| 1007 |
+
'/opt/oss-fuzz/infra/cifuzz/cifuzz_combined_entrypoint.py',
|
| 1008 |
+
]
|
| 1009 |
+
return docker_run(docker_run_command)
|
| 1010 |
+
|
| 1011 |
+
except PermissionError as error:
|
| 1012 |
+
logger.error('PermissionError: %s.', error)
|
| 1013 |
+
# Tempfile can't delete the workspace because of a permissions issue. This
|
| 1014 |
+
# is because docker creates files in the workspace that are owned by root
|
| 1015 |
+
# but this process is probably being run as another user. Use a docker image
|
| 1016 |
+
# to delete the temp directory (workspace) so that we have permission.
|
| 1017 |
+
docker_run([
|
| 1018 |
+
'-v', f'{workspace}:{workspace}', '--entrypoint', '',
|
| 1019 |
+
CLUSTERFUZZLITE_DOCKER_IMAGE, 'rm', '-rf',
|
| 1020 |
+
os.path.join(workspace, '*')
|
| 1021 |
+
])
|
| 1022 |
+
return False
|
| 1023 |
+
|
| 1024 |
+
|
| 1025 |
+
def build_fuzzers(args):
|
| 1026 |
+
"""Builds fuzzers."""
|
| 1027 |
+
if args.engine == 'centipede' and args.sanitizer != 'none':
|
| 1028 |
+
# Centipede always requires separate binaries for sanitizers:
|
| 1029 |
+
# An unsanitized binary, which Centipede requires for fuzzing.
|
| 1030 |
+
# A sanitized binary, placed in the child directory.
|
| 1031 |
+
sanitized_binary_directories = (
|
| 1032 |
+
('none', ''),
|
| 1033 |
+
(args.sanitizer, f'__centipede_{args.sanitizer}'),
|
| 1034 |
+
)
|
| 1035 |
+
else:
|
| 1036 |
+
# Generally, a fuzzer only needs one sanitized binary in the default dir.
|
| 1037 |
+
sanitized_binary_directories = ((args.sanitizer, ''),)
|
| 1038 |
+
return all(
|
| 1039 |
+
build_fuzzers_impl(args.project,
|
| 1040 |
+
args.clean,
|
| 1041 |
+
args.engine,
|
| 1042 |
+
sanitizer,
|
| 1043 |
+
args.architecture,
|
| 1044 |
+
args.e,
|
| 1045 |
+
args.source_path,
|
| 1046 |
+
mount_path=args.mount_path,
|
| 1047 |
+
child_dir=child_dir,
|
| 1048 |
+
docker_image_tag=args.docker_image_tag)
|
| 1049 |
+
for sanitizer, child_dir in sanitized_binary_directories)
|
| 1050 |
+
|
| 1051 |
+
|
| 1052 |
+
def fuzzbench_build_fuzzers(args):
|
| 1053 |
+
"""Builds fuzz targets with an arbitrary fuzzer from FuzzBench."""
|
| 1054 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 1055 |
+
tmp_dir = os.path.abspath(tmp_dir)
|
| 1056 |
+
fuzzbench_path = os.path.join(tmp_dir, 'fuzzbench')
|
| 1057 |
+
subprocess.run([
|
| 1058 |
+
'git', 'clone', 'https://github.com/google/fuzzbench', '--depth', '1',
|
| 1059 |
+
fuzzbench_path
|
| 1060 |
+
],
|
| 1061 |
+
check=True)
|
| 1062 |
+
env = [
|
| 1063 |
+
f'FUZZBENCH_PATH={fuzzbench_path}', 'OSS_FUZZ_ON_DEMAND=1',
|
| 1064 |
+
f'PROJECT={args.project.name}'
|
| 1065 |
+
]
|
| 1066 |
+
tag = f'aixcc-afc/{args.project.name}'
|
| 1067 |
+
subprocess.run([
|
| 1068 |
+
'docker', 'tag', 'ghcr.io/aixcc-finals/base-builder-fuzzbench',
|
| 1069 |
+
f'ghcr.io/aixcc-finals/base-builder{BASE_IMAGE_TAG}'
|
| 1070 |
+
],
|
| 1071 |
+
check=True)
|
| 1072 |
+
build_image_impl(args.project)
|
| 1073 |
+
assert docker_build([
|
| 1074 |
+
'--tag', tag, '--build-arg', f'parent_image={tag}', '--file',
|
| 1075 |
+
os.path.join(fuzzbench_path, 'fuzzers', args.engine,
|
| 1076 |
+
'builder.Dockerfile'),
|
| 1077 |
+
os.path.join(fuzzbench_path, 'fuzzers', args.engine)
|
| 1078 |
+
])
|
| 1079 |
+
|
| 1080 |
+
return build_fuzzers_impl(args.project,
|
| 1081 |
+
False,
|
| 1082 |
+
args.engine,
|
| 1083 |
+
args.sanitizer,
|
| 1084 |
+
args.architecture,
|
| 1085 |
+
env,
|
| 1086 |
+
source_path=fuzzbench_path,
|
| 1087 |
+
mount_path=fuzzbench_path,
|
| 1088 |
+
build_project_image=False)
|
| 1089 |
+
|
| 1090 |
+
|
| 1091 |
+
def _add_oss_fuzz_ci_if_needed(env):
|
| 1092 |
+
"""Adds value of |OSS_FUZZ_CI| environment variable to |env| if it is set."""
|
| 1093 |
+
oss_fuzz_ci = os.getenv('OSS_FUZZ_CI')
|
| 1094 |
+
if oss_fuzz_ci:
|
| 1095 |
+
env.append('OSS_FUZZ_CI=' + oss_fuzz_ci)
|
| 1096 |
+
|
| 1097 |
+
|
| 1098 |
+
def check_build(args):
|
| 1099 |
+
"""Checks that fuzzers in the container execute without errors."""
|
| 1100 |
+
if not check_project_exists(args.project):
|
| 1101 |
+
return False
|
| 1102 |
+
|
| 1103 |
+
if (args.fuzzer_name and not _check_fuzzer_exists(
|
| 1104 |
+
args.project, args.fuzzer_name, args.architecture)):
|
| 1105 |
+
return False
|
| 1106 |
+
|
| 1107 |
+
env = [
|
| 1108 |
+
'FUZZING_ENGINE=' + args.engine,
|
| 1109 |
+
'SANITIZER=' + args.sanitizer,
|
| 1110 |
+
'ARCHITECTURE=' + args.architecture,
|
| 1111 |
+
'FUZZING_LANGUAGE=' + args.project.language,
|
| 1112 |
+
'HELPER=True',
|
| 1113 |
+
]
|
| 1114 |
+
_add_oss_fuzz_ci_if_needed(env)
|
| 1115 |
+
if args.e:
|
| 1116 |
+
env += args.e
|
| 1117 |
+
|
| 1118 |
+
run_args = _env_to_docker_args(env) + [
|
| 1119 |
+
'-v', f'{args.project.out}:/out', '-t', BASE_RUNNER_IMAGE
|
| 1120 |
+
]
|
| 1121 |
+
|
| 1122 |
+
if args.fuzzer_name:
|
| 1123 |
+
run_args += ['test_one.py', args.fuzzer_name]
|
| 1124 |
+
else:
|
| 1125 |
+
run_args.append('test_all.py')
|
| 1126 |
+
|
| 1127 |
+
result = docker_run(run_args, architecture=args.architecture)
|
| 1128 |
+
if result:
|
| 1129 |
+
logger.info('Check build passed.')
|
| 1130 |
+
else:
|
| 1131 |
+
logger.error('Check build failed.')
|
| 1132 |
+
|
| 1133 |
+
return result
|
| 1134 |
+
|
| 1135 |
+
|
| 1136 |
+
def _get_fuzz_targets(project):
|
| 1137 |
+
"""Returns names of fuzz targest build in the project's /out directory."""
|
| 1138 |
+
fuzz_targets = []
|
| 1139 |
+
for name in os.listdir(project.out):
|
| 1140 |
+
if name.startswith('afl-'):
|
| 1141 |
+
continue
|
| 1142 |
+
if name == 'centipede':
|
| 1143 |
+
continue
|
| 1144 |
+
if name.startswith('jazzer_'):
|
| 1145 |
+
continue
|
| 1146 |
+
if name == 'llvm-symbolizer':
|
| 1147 |
+
continue
|
| 1148 |
+
|
| 1149 |
+
path = os.path.join(project.out, name)
|
| 1150 |
+
# Python and JVM fuzz targets are only executable for the root user, so
|
| 1151 |
+
# we can't use os.access.
|
| 1152 |
+
if os.path.isfile(path) and (os.stat(path).st_mode & 0o111):
|
| 1153 |
+
fuzz_targets.append(name)
|
| 1154 |
+
|
| 1155 |
+
return fuzz_targets
|
| 1156 |
+
|
| 1157 |
+
|
| 1158 |
+
def _get_latest_corpus(project, fuzz_target, base_corpus_dir):
|
| 1159 |
+
"""Downloads the latest corpus for the given fuzz target."""
|
| 1160 |
+
corpus_dir = os.path.join(base_corpus_dir, fuzz_target)
|
| 1161 |
+
os.makedirs(corpus_dir, exist_ok=True)
|
| 1162 |
+
|
| 1163 |
+
if not fuzz_target.startswith(project.name + '_'):
|
| 1164 |
+
fuzz_target = '%s_%s' % (project.name, fuzz_target)
|
| 1165 |
+
|
| 1166 |
+
# Normalise fuzz target name.
|
| 1167 |
+
fuzz_target = _normalized_name(fuzz_target)
|
| 1168 |
+
|
| 1169 |
+
corpus_backup_url = CORPUS_BACKUP_URL_FORMAT.format(project_name=project.name,
|
| 1170 |
+
fuzz_target=fuzz_target)
|
| 1171 |
+
command = ['gsutil', 'ls', corpus_backup_url]
|
| 1172 |
+
|
| 1173 |
+
# Don't capture stderr. We want it to print in real time, in case gsutil is
|
| 1174 |
+
# asking for two-factor authentication.
|
| 1175 |
+
corpus_listing = subprocess.Popen(command, stdout=subprocess.PIPE)
|
| 1176 |
+
output, _ = corpus_listing.communicate()
|
| 1177 |
+
|
| 1178 |
+
# Some fuzz targets (e.g. new ones) may not have corpus yet, just skip those.
|
| 1179 |
+
if corpus_listing.returncode:
|
| 1180 |
+
logger.warning('Corpus for %s not found:\n', fuzz_target)
|
| 1181 |
+
return
|
| 1182 |
+
|
| 1183 |
+
if output:
|
| 1184 |
+
latest_backup_url = output.splitlines()[-1]
|
| 1185 |
+
archive_path = corpus_dir + '.zip'
|
| 1186 |
+
command = ['gsutil', '-q', 'cp', latest_backup_url, archive_path]
|
| 1187 |
+
subprocess.check_call(command)
|
| 1188 |
+
|
| 1189 |
+
command = ['unzip', '-q', '-o', archive_path, '-d', corpus_dir]
|
| 1190 |
+
subprocess.check_call(command)
|
| 1191 |
+
os.remove(archive_path)
|
| 1192 |
+
else:
|
| 1193 |
+
# Sync the working corpus copy if a minimized backup is not available.
|
| 1194 |
+
corpus_url = CORPUS_URL_FORMAT.format(project_name=project.name,
|
| 1195 |
+
fuzz_target=fuzz_target)
|
| 1196 |
+
command = ['gsutil', '-m', '-q', 'rsync', '-R', corpus_url, corpus_dir]
|
| 1197 |
+
subprocess.check_call(command)
|
| 1198 |
+
|
| 1199 |
+
|
| 1200 |
+
def _get_latest_public_corpus(args, fuzzer):
|
| 1201 |
+
"""Downloads the public corpus"""
|
| 1202 |
+
target_corpus_dir = "build/corpus/%s" % args.project.name
|
| 1203 |
+
if not os.path.isdir(target_corpus_dir):
|
| 1204 |
+
os.makedirs(target_corpus_dir)
|
| 1205 |
+
|
| 1206 |
+
target_zip = os.path.join(target_corpus_dir, fuzzer + ".zip")
|
| 1207 |
+
|
| 1208 |
+
project_qualified_fuzz_target_name = fuzzer
|
| 1209 |
+
qualified_name_prefix = args.project.name + '_'
|
| 1210 |
+
if not fuzzer.startswith(qualified_name_prefix):
|
| 1211 |
+
project_qualified_fuzz_target_name = qualified_name_prefix + fuzzer
|
| 1212 |
+
|
| 1213 |
+
download_url = HTTPS_CORPUS_BACKUP_URL_FORMAT.format(
|
| 1214 |
+
project_name=args.project.name,
|
| 1215 |
+
fuzz_target=project_qualified_fuzz_target_name)
|
| 1216 |
+
|
| 1217 |
+
cmd = ['wget', download_url, '-O', target_zip]
|
| 1218 |
+
try:
|
| 1219 |
+
with open(os.devnull, 'w') as stdout:
|
| 1220 |
+
subprocess.check_call(cmd, stdout=stdout)
|
| 1221 |
+
except OSError:
|
| 1222 |
+
logger.error('Failed to download corpus')
|
| 1223 |
+
|
| 1224 |
+
target_fuzzer_dir = os.path.join(target_corpus_dir, fuzzer)
|
| 1225 |
+
if not os.path.isdir(target_fuzzer_dir):
|
| 1226 |
+
os.mkdir(target_fuzzer_dir)
|
| 1227 |
+
|
| 1228 |
+
target_corpus_dir = os.path.join(target_corpus_dir, fuzzer)
|
| 1229 |
+
try:
|
| 1230 |
+
with open(os.devnull, 'w') as stdout:
|
| 1231 |
+
subprocess.check_call(
|
| 1232 |
+
['unzip', '-q', '-o', target_zip, '-d', target_fuzzer_dir],
|
| 1233 |
+
stdout=stdout)
|
| 1234 |
+
except OSError:
|
| 1235 |
+
logger.error('Failed to unzip corpus')
|
| 1236 |
+
|
| 1237 |
+
# Remove the downloaded zip
|
| 1238 |
+
os.remove(target_zip)
|
| 1239 |
+
return True
|
| 1240 |
+
|
| 1241 |
+
|
| 1242 |
+
def download_corpora(args):
|
| 1243 |
+
"""Downloads most recent corpora from GCS for the given project."""
|
| 1244 |
+
if not check_project_exists(args.project):
|
| 1245 |
+
return False
|
| 1246 |
+
|
| 1247 |
+
if args.public:
|
| 1248 |
+
logger.info("Downloading public corpus")
|
| 1249 |
+
try:
|
| 1250 |
+
with open(os.devnull, 'w') as stdout:
|
| 1251 |
+
subprocess.check_call(['wget', '--version'], stdout=stdout)
|
| 1252 |
+
except OSError:
|
| 1253 |
+
logger.error('wget not found')
|
| 1254 |
+
return False
|
| 1255 |
+
else:
|
| 1256 |
+
try:
|
| 1257 |
+
with open(os.devnull, 'w') as stdout:
|
| 1258 |
+
subprocess.check_call(['gsutil', '--version'], stdout=stdout)
|
| 1259 |
+
except OSError:
|
| 1260 |
+
logger.error('gsutil not found. Please install it from '
|
| 1261 |
+
'https://cloud.google.com/storage/docs/gsutil_install')
|
| 1262 |
+
return False
|
| 1263 |
+
|
| 1264 |
+
if args.fuzz_target:
|
| 1265 |
+
fuzz_targets = args.fuzz_target
|
| 1266 |
+
else:
|
| 1267 |
+
fuzz_targets = _get_fuzz_targets(args.project)
|
| 1268 |
+
|
| 1269 |
+
if not fuzz_targets:
|
| 1270 |
+
logger.error(
|
| 1271 |
+
'Fuzz targets not found. Please build project first '
|
| 1272 |
+
'(python3 infra/helper.py build_fuzzers %s) so that download_corpora '
|
| 1273 |
+
'can automatically identify targets.', args.project.name)
|
| 1274 |
+
return False
|
| 1275 |
+
|
| 1276 |
+
corpus_dir = args.project.corpus
|
| 1277 |
+
|
| 1278 |
+
def _download_for_single_target(fuzz_target):
|
| 1279 |
+
try:
|
| 1280 |
+
if args.public:
|
| 1281 |
+
_get_latest_public_corpus(args, fuzz_target)
|
| 1282 |
+
else:
|
| 1283 |
+
_get_latest_corpus(args.project, fuzz_target, corpus_dir)
|
| 1284 |
+
return True
|
| 1285 |
+
except Exception as error: # pylint:disable=broad-except
|
| 1286 |
+
logger.error('Corpus download for %s failed: %s.', fuzz_target,
|
| 1287 |
+
str(error))
|
| 1288 |
+
return False
|
| 1289 |
+
|
| 1290 |
+
logger.info('Downloading corpora for %s project to %s.', args.project.name,
|
| 1291 |
+
corpus_dir)
|
| 1292 |
+
thread_pool = ThreadPool()
|
| 1293 |
+
return all(thread_pool.map(_download_for_single_target, fuzz_targets))
|
| 1294 |
+
|
| 1295 |
+
|
| 1296 |
+
def coverage(args): # pylint: disable=too-many-branches
|
| 1297 |
+
"""Generates code coverage using clang source based code coverage."""
|
| 1298 |
+
if args.corpus_dir and not args.fuzz_target:
|
| 1299 |
+
logger.error(
|
| 1300 |
+
'--corpus-dir requires specifying a particular fuzz target using '
|
| 1301 |
+
'--fuzz-target')
|
| 1302 |
+
return False
|
| 1303 |
+
|
| 1304 |
+
if not check_project_exists(args.project):
|
| 1305 |
+
return False
|
| 1306 |
+
|
| 1307 |
+
if args.project.language not in constants.LANGUAGES_WITH_COVERAGE_SUPPORT:
|
| 1308 |
+
logger.error(
|
| 1309 |
+
'Project is written in %s, coverage for it is not supported yet.',
|
| 1310 |
+
args.project.language)
|
| 1311 |
+
return False
|
| 1312 |
+
|
| 1313 |
+
if (not args.no_corpus_download and not args.corpus_dir and
|
| 1314 |
+
not args.project.is_external):
|
| 1315 |
+
if not download_corpora(args):
|
| 1316 |
+
return False
|
| 1317 |
+
|
| 1318 |
+
extra_cov_args = (
|
| 1319 |
+
f'{args.project.coverage_extra_args.strip()} {" ".join(args.extra_args)}')
|
| 1320 |
+
env = [
|
| 1321 |
+
'FUZZING_ENGINE=libfuzzer',
|
| 1322 |
+
'HELPER=True',
|
| 1323 |
+
'FUZZING_LANGUAGE=%s' % args.project.language,
|
| 1324 |
+
'PROJECT=%s' % args.project.name,
|
| 1325 |
+
'SANITIZER=coverage',
|
| 1326 |
+
'COVERAGE_EXTRA_ARGS=%s' % extra_cov_args,
|
| 1327 |
+
'ARCHITECTURE=' + args.architecture,
|
| 1328 |
+
]
|
| 1329 |
+
|
| 1330 |
+
if not args.no_serve:
|
| 1331 |
+
env.append(f'HTTP_PORT={args.port}')
|
| 1332 |
+
|
| 1333 |
+
run_args = _env_to_docker_args(env)
|
| 1334 |
+
|
| 1335 |
+
if args.port:
|
| 1336 |
+
run_args.extend([
|
| 1337 |
+
'-p',
|
| 1338 |
+
'%s:%s' % (args.port, args.port),
|
| 1339 |
+
])
|
| 1340 |
+
|
| 1341 |
+
if args.corpus_dir:
|
| 1342 |
+
if not os.path.exists(args.corpus_dir):
|
| 1343 |
+
logger.error('The path provided in --corpus-dir argument does not '
|
| 1344 |
+
'exist.')
|
| 1345 |
+
return False
|
| 1346 |
+
corpus_dir = os.path.realpath(args.corpus_dir)
|
| 1347 |
+
run_args.extend(['-v', '%s:/corpus/%s' % (corpus_dir, args.fuzz_target)])
|
| 1348 |
+
else:
|
| 1349 |
+
run_args.extend(['-v', '%s:/corpus' % args.project.corpus])
|
| 1350 |
+
|
| 1351 |
+
run_args.extend([
|
| 1352 |
+
'-v',
|
| 1353 |
+
'%s:/out' % args.project.out,
|
| 1354 |
+
'-t',
|
| 1355 |
+
BASE_RUNNER_IMAGE,
|
| 1356 |
+
])
|
| 1357 |
+
|
| 1358 |
+
run_args.append('coverage')
|
| 1359 |
+
if args.fuzz_target:
|
| 1360 |
+
run_args.append(args.fuzz_target)
|
| 1361 |
+
|
| 1362 |
+
result = docker_run(run_args, architecture=args.architecture)
|
| 1363 |
+
if result:
|
| 1364 |
+
logger.info('Successfully generated clang code coverage report.')
|
| 1365 |
+
else:
|
| 1366 |
+
logger.error('Failed to generate clang code coverage report.')
|
| 1367 |
+
|
| 1368 |
+
return result
|
| 1369 |
+
|
| 1370 |
+
|
| 1371 |
+
def _introspector_prepare_corpus(args):
|
| 1372 |
+
"""Helper function for introspector runs to generate corpora."""
|
| 1373 |
+
parser = get_parser()
|
| 1374 |
+
# Generate corpus, either by downloading or running fuzzers.
|
| 1375 |
+
if args.private_corpora or args.public_corpora:
|
| 1376 |
+
corpora_command = ['download_corpora']
|
| 1377 |
+
if args.public_corpora:
|
| 1378 |
+
corpora_command.append('--public')
|
| 1379 |
+
corpora_command.append(args.project.name)
|
| 1380 |
+
if not download_corpora(parse_args(parser, corpora_command)):
|
| 1381 |
+
logger.error('Failed to download corpora')
|
| 1382 |
+
return False
|
| 1383 |
+
else:
|
| 1384 |
+
fuzzer_targets = _get_fuzz_targets(args.project)
|
| 1385 |
+
for fuzzer_name in fuzzer_targets:
|
| 1386 |
+
# Make a corpus directory.
|
| 1387 |
+
fuzzer_corpus_dir = args.project.corpus + f'/{fuzzer_name}'
|
| 1388 |
+
if not os.path.isdir(fuzzer_corpus_dir):
|
| 1389 |
+
os.makedirs(fuzzer_corpus_dir)
|
| 1390 |
+
run_fuzzer_command = [
|
| 1391 |
+
'run_fuzzer', '--sanitizer', 'address', '--corpus-dir',
|
| 1392 |
+
fuzzer_corpus_dir, args.project.name, fuzzer_name
|
| 1393 |
+
]
|
| 1394 |
+
|
| 1395 |
+
parsed_args = parse_args(parser, run_fuzzer_command)
|
| 1396 |
+
parsed_args.fuzzer_args = [
|
| 1397 |
+
f'-max_total_time={args.seconds}', '-detect_leaks=0'
|
| 1398 |
+
]
|
| 1399 |
+
# Continue even if run command fails, because we do not have 100%
|
| 1400 |
+
# accuracy in fuzz target detection, i.e. we might try to run something
|
| 1401 |
+
# that is not a target.
|
| 1402 |
+
run_fuzzer(parsed_args)
|
| 1403 |
+
return True
|
| 1404 |
+
|
| 1405 |
+
|
| 1406 |
+
def introspector(args):
|
| 1407 |
+
"""Runs a complete end-to-end run of introspector."""
|
| 1408 |
+
parser = get_parser()
|
| 1409 |
+
|
| 1410 |
+
args_to_append = []
|
| 1411 |
+
if args.source_path:
|
| 1412 |
+
args_to_append.append(_get_absolute_path(args.source_path))
|
| 1413 |
+
|
| 1414 |
+
# Build fuzzers with ASAN.
|
| 1415 |
+
build_fuzzers_command = [
|
| 1416 |
+
'build_fuzzers', '--sanitizer=address', args.project.name
|
| 1417 |
+
] + args_to_append
|
| 1418 |
+
if not build_fuzzers(parse_args(parser, build_fuzzers_command)):
|
| 1419 |
+
logger.error('Failed to build project with ASAN')
|
| 1420 |
+
return False
|
| 1421 |
+
|
| 1422 |
+
if not _introspector_prepare_corpus(args):
|
| 1423 |
+
return False
|
| 1424 |
+
|
| 1425 |
+
# Build code coverage.
|
| 1426 |
+
build_fuzzers_command = [
|
| 1427 |
+
'build_fuzzers', '--sanitizer=coverage', args.project.name
|
| 1428 |
+
] + args_to_append
|
| 1429 |
+
if not build_fuzzers(parse_args(parser, build_fuzzers_command)):
|
| 1430 |
+
logger.error('Failed to build project with coverage instrumentation')
|
| 1431 |
+
return False
|
| 1432 |
+
|
| 1433 |
+
# Collect coverage.
|
| 1434 |
+
coverage_command = [
|
| 1435 |
+
'coverage', '--no-corpus-download', '--port', '', args.project.name
|
| 1436 |
+
]
|
| 1437 |
+
if not coverage(parse_args(parser, coverage_command)):
|
| 1438 |
+
logger.error('Failed to extract coverage')
|
| 1439 |
+
return False
|
| 1440 |
+
|
| 1441 |
+
# Build introspector.
|
| 1442 |
+
build_fuzzers_command = [
|
| 1443 |
+
'build_fuzzers', '--sanitizer=introspector', args.project.name
|
| 1444 |
+
] + args_to_append
|
| 1445 |
+
if not build_fuzzers(parse_args(parser, build_fuzzers_command)):
|
| 1446 |
+
logger.error('Failed to build project with introspector')
|
| 1447 |
+
return False
|
| 1448 |
+
|
| 1449 |
+
introspector_dst = os.path.join(args.project.out,
|
| 1450 |
+
"introspector-report/inspector")
|
| 1451 |
+
shutil.rmtree(introspector_dst, ignore_errors=True)
|
| 1452 |
+
shutil.copytree(os.path.join(args.project.out, "inspector"), introspector_dst)
|
| 1453 |
+
|
| 1454 |
+
# Copy the coverage reports into the introspector report.
|
| 1455 |
+
dst_cov_report = os.path.join(introspector_dst, "covreport")
|
| 1456 |
+
shutil.copytree(os.path.join(args.project.out, "report"), dst_cov_report)
|
| 1457 |
+
|
| 1458 |
+
# Copy per-target coverage reports
|
| 1459 |
+
src_target_cov_report = os.path.join(args.project.out, "report_target")
|
| 1460 |
+
for target_cov_dir in os.listdir(src_target_cov_report):
|
| 1461 |
+
dst_target_cov_report = os.path.join(dst_cov_report, target_cov_dir)
|
| 1462 |
+
shutil.copytree(os.path.join(src_target_cov_report, target_cov_dir),
|
| 1463 |
+
dst_target_cov_report)
|
| 1464 |
+
|
| 1465 |
+
logger.info('Introspector run complete. Report in %s', introspector_dst)
|
| 1466 |
+
logger.info(
|
| 1467 |
+
'To browse the report, run: `python3 -m http.server 8008 --directory %s`'
|
| 1468 |
+
'and navigate to localhost:8008/fuzz_report.html in your browser',
|
| 1469 |
+
introspector_dst)
|
| 1470 |
+
return True
|
| 1471 |
+
|
| 1472 |
+
|
| 1473 |
+
def run_fuzzer(args):
|
| 1474 |
+
"""Runs a fuzzer in the container."""
|
| 1475 |
+
if not check_project_exists(args.project):
|
| 1476 |
+
return False
|
| 1477 |
+
|
| 1478 |
+
if not _check_fuzzer_exists(args.project, args.fuzzer_name,
|
| 1479 |
+
args.architecture):
|
| 1480 |
+
return False
|
| 1481 |
+
|
| 1482 |
+
env = [
|
| 1483 |
+
'FUZZING_ENGINE=' + args.engine,
|
| 1484 |
+
'SANITIZER=' + args.sanitizer,
|
| 1485 |
+
'RUN_FUZZER_MODE=interactive',
|
| 1486 |
+
'HELPER=True',
|
| 1487 |
+
]
|
| 1488 |
+
|
| 1489 |
+
if args.e:
|
| 1490 |
+
env += args.e
|
| 1491 |
+
|
| 1492 |
+
run_args = _env_to_docker_args(env)
|
| 1493 |
+
|
| 1494 |
+
if args.corpus_dir:
|
| 1495 |
+
if not os.path.exists(args.corpus_dir):
|
| 1496 |
+
logger.error('The path provided in --corpus-dir argument does not exist')
|
| 1497 |
+
return False
|
| 1498 |
+
corpus_dir = os.path.realpath(args.corpus_dir)
|
| 1499 |
+
run_args.extend([
|
| 1500 |
+
'-v',
|
| 1501 |
+
'{corpus_dir}:/tmp/{fuzzer}_corpus'.format(corpus_dir=corpus_dir,
|
| 1502 |
+
fuzzer=args.fuzzer_name)
|
| 1503 |
+
])
|
| 1504 |
+
|
| 1505 |
+
run_args.extend([
|
| 1506 |
+
'-v',
|
| 1507 |
+
'%s:/out' % args.project.out,
|
| 1508 |
+
'-t',
|
| 1509 |
+
BASE_RUNNER_IMAGE,
|
| 1510 |
+
'run_fuzzer',
|
| 1511 |
+
args.fuzzer_name,
|
| 1512 |
+
] + args.fuzzer_args)
|
| 1513 |
+
|
| 1514 |
+
return docker_run(run_args, architecture=args.architecture)
|
| 1515 |
+
|
| 1516 |
+
|
| 1517 |
+
def fuzzbench_run_fuzzer(args):
|
| 1518 |
+
"""Runs a fuzz target built by fuzzbench in the container."""
|
| 1519 |
+
if not check_project_exists(args.project):
|
| 1520 |
+
return False
|
| 1521 |
+
|
| 1522 |
+
env = [
|
| 1523 |
+
'FUZZING_ENGINE=' + args.engine,
|
| 1524 |
+
'SANITIZER=' + args.sanitizer,
|
| 1525 |
+
'RUN_FUZZER_MODE=interactive',
|
| 1526 |
+
'HELPER=True',
|
| 1527 |
+
f'FUZZ_TARGET={args.fuzzer_name}',
|
| 1528 |
+
f'BENCHMARK={args.project.name}',
|
| 1529 |
+
'TRIAL_ID=1',
|
| 1530 |
+
'EXPERIMENT_TYPE=bug',
|
| 1531 |
+
]
|
| 1532 |
+
|
| 1533 |
+
if args.e:
|
| 1534 |
+
env += args.e
|
| 1535 |
+
|
| 1536 |
+
run_args = _env_to_docker_args(env)
|
| 1537 |
+
|
| 1538 |
+
if args.corpus_dir:
|
| 1539 |
+
if not os.path.exists(args.corpus_dir):
|
| 1540 |
+
logger.error('The path provided in --corpus-dir argument does not exist')
|
| 1541 |
+
return False
|
| 1542 |
+
corpus_dir = os.path.realpath(args.corpus_dir)
|
| 1543 |
+
run_args.extend([
|
| 1544 |
+
'-v',
|
| 1545 |
+
'{corpus_dir}:/tmp/{fuzzer}_corpus'.format(corpus_dir=corpus_dir,
|
| 1546 |
+
fuzzer=args.fuzzer_name)
|
| 1547 |
+
])
|
| 1548 |
+
|
| 1549 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 1550 |
+
tmp_dir = os.path.abspath(tmp_dir)
|
| 1551 |
+
fuzzbench_path = os.path.join(tmp_dir, 'fuzzbench')
|
| 1552 |
+
subprocess.run([
|
| 1553 |
+
'git', 'clone', 'https://github.com/google/fuzzbench', '--depth', '1',
|
| 1554 |
+
fuzzbench_path
|
| 1555 |
+
],
|
| 1556 |
+
check=True)
|
| 1557 |
+
run_args.extend([
|
| 1558 |
+
'-v',
|
| 1559 |
+
f'{args.project.out}:/out',
|
| 1560 |
+
'-v',
|
| 1561 |
+
f'{fuzzbench_path}:{fuzzbench_path}',
|
| 1562 |
+
'-e',
|
| 1563 |
+
f'FUZZBENCH_PATH={fuzzbench_path}',
|
| 1564 |
+
f'aixcc-afc/{args.project.name}',
|
| 1565 |
+
'fuzzbench_run_fuzzer',
|
| 1566 |
+
args.fuzzer_name,
|
| 1567 |
+
] + args.fuzzer_args)
|
| 1568 |
+
|
| 1569 |
+
return docker_run(run_args, architecture=args.architecture)
|
| 1570 |
+
|
| 1571 |
+
|
| 1572 |
+
def fuzzbench_measure(args):
|
| 1573 |
+
"""Measure results from fuzzing with fuzzbench."""
|
| 1574 |
+
if not check_project_exists(args.project):
|
| 1575 |
+
return False
|
| 1576 |
+
|
| 1577 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 1578 |
+
tmp_dir = os.path.abspath(tmp_dir)
|
| 1579 |
+
fuzzbench_path = os.path.join(tmp_dir, 'fuzzbench')
|
| 1580 |
+
subprocess.run([
|
| 1581 |
+
'git', 'clone', 'https://github.com/google/fuzzbench', '--depth', '1',
|
| 1582 |
+
fuzzbench_path
|
| 1583 |
+
],
|
| 1584 |
+
check=True)
|
| 1585 |
+
run_args = [
|
| 1586 |
+
'-v', f'{args.project.out}:/out', '-v',
|
| 1587 |
+
f'{fuzzbench_path}:{fuzzbench_path}', '-e',
|
| 1588 |
+
f'FUZZBENCH_PATH={fuzzbench_path}', '-e', 'EXPERIMENT_TYPE=bug', '-e',
|
| 1589 |
+
f'FUZZ_TARGET={args.fuzz_target_name}', '-e',
|
| 1590 |
+
f'FUZZER={args.engine_name}', '-e', f'BENCHMARK={args.project.name}',
|
| 1591 |
+
f'aixcc-afc/{args.project.name}', 'fuzzbench_measure'
|
| 1592 |
+
]
|
| 1593 |
+
|
| 1594 |
+
return docker_run(run_args, 'x86_64')
|
| 1595 |
+
|
| 1596 |
+
|
| 1597 |
+
def reproduce(args):
|
| 1598 |
+
"""Reproduces a specific test case from a specific project."""
|
| 1599 |
+
return reproduce_impl(args.project, args.fuzzer_name, args.valgrind, args.e,
|
| 1600 |
+
args.fuzzer_args, args.testcase_path, args.architecture,
|
| 1601 |
+
args.propagate_exit_codes, args.err_result,
|
| 1602 |
+
privileged=args.privileged, timeout=args.timeout)
|
| 1603 |
+
|
| 1604 |
+
|
| 1605 |
+
def reproduce_impl( # pylint: disable=too-many-arguments
|
| 1606 |
+
project,
|
| 1607 |
+
fuzzer_name,
|
| 1608 |
+
valgrind,
|
| 1609 |
+
env_to_add,
|
| 1610 |
+
fuzzer_args,
|
| 1611 |
+
testcase_path,
|
| 1612 |
+
architecture='x86_64',
|
| 1613 |
+
propagate_exit_codes=False,
|
| 1614 |
+
err_result=1,
|
| 1615 |
+
run_function=docker_run,
|
| 1616 |
+
privileged=True,
|
| 1617 |
+
timeout=None):
|
| 1618 |
+
"""Reproduces a testcase in the container."""
|
| 1619 |
+
|
| 1620 |
+
if not check_project_exists(project):
|
| 1621 |
+
return err_result if propagate_exit_codes else False
|
| 1622 |
+
|
| 1623 |
+
if not _check_fuzzer_exists(project, fuzzer_name, architecture):
|
| 1624 |
+
return err_result if propagate_exit_codes else False
|
| 1625 |
+
|
| 1626 |
+
debugger = ''
|
| 1627 |
+
env = ['HELPER=True', 'ARCHITECTURE=' + architecture]
|
| 1628 |
+
image_name = 'base-runner'
|
| 1629 |
+
|
| 1630 |
+
if valgrind:
|
| 1631 |
+
debugger = 'valgrind --tool=memcheck --track-origins=yes --leak-check=full'
|
| 1632 |
+
|
| 1633 |
+
if debugger:
|
| 1634 |
+
image_name = 'base-runner-debug'
|
| 1635 |
+
env += ['DEBUGGER=' + debugger]
|
| 1636 |
+
|
| 1637 |
+
if env_to_add:
|
| 1638 |
+
env += env_to_add
|
| 1639 |
+
|
| 1640 |
+
run_args = _env_to_docker_args(env) + [
|
| 1641 |
+
'-v',
|
| 1642 |
+
'%s:/out' % project.out,
|
| 1643 |
+
'-v',
|
| 1644 |
+
'%s:/testcase' % _get_absolute_path(testcase_path),
|
| 1645 |
+
'-t',
|
| 1646 |
+
'ghcr.io/aixcc-finals/%s%s' % (image_name, BASE_IMAGE_TAG),
|
| 1647 |
+
'reproduce',
|
| 1648 |
+
fuzzer_name,
|
| 1649 |
+
'-runs=100',
|
| 1650 |
+
] + fuzzer_args
|
| 1651 |
+
|
| 1652 |
+
return run_function(run_args, architecture=architecture, propagate_exit_codes=propagate_exit_codes, privileged=privileged, timeout=timeout)
|
| 1653 |
+
|
| 1654 |
+
|
| 1655 |
+
def _validate_project_name(project_name):
|
| 1656 |
+
"""Validates |project_name| is a valid OSS-Fuzz project name."""
|
| 1657 |
+
if len(project_name) > MAX_PROJECT_NAME_LENGTH:
|
| 1658 |
+
logger.error(
|
| 1659 |
+
'Project name needs to be less than or equal to %d characters.',
|
| 1660 |
+
MAX_PROJECT_NAME_LENGTH)
|
| 1661 |
+
return False
|
| 1662 |
+
|
| 1663 |
+
if not VALID_PROJECT_NAME_REGEX.match(project_name):
|
| 1664 |
+
logger.info('Invalid project name: %s.', project_name)
|
| 1665 |
+
return False
|
| 1666 |
+
|
| 1667 |
+
return True
|
| 1668 |
+
|
| 1669 |
+
|
| 1670 |
+
def _validate_language(language):
|
| 1671 |
+
if not LANGUAGE_REGEX.match(language):
|
| 1672 |
+
logger.error('Invalid project language %s.', language)
|
| 1673 |
+
return False
|
| 1674 |
+
|
| 1675 |
+
return True
|
| 1676 |
+
|
| 1677 |
+
|
| 1678 |
+
def _create_build_integration_directory(directory):
|
| 1679 |
+
"""Returns True on successful creation of a build integration directory.
|
| 1680 |
+
Suitable for OSS-Fuzz and external projects."""
|
| 1681 |
+
try:
|
| 1682 |
+
os.makedirs(directory)
|
| 1683 |
+
except OSError as error:
|
| 1684 |
+
if error.errno != errno.EEXIST:
|
| 1685 |
+
raise
|
| 1686 |
+
logger.error('%s already exists.', directory)
|
| 1687 |
+
return False
|
| 1688 |
+
return True
|
| 1689 |
+
|
| 1690 |
+
|
| 1691 |
+
def _template_project_file(filename, template, template_args, directory):
|
| 1692 |
+
"""Templates |template| using |template_args| and writes the result to
|
| 1693 |
+
|directory|/|filename|. Sets the file to executable if |filename| is
|
| 1694 |
+
build.sh."""
|
| 1695 |
+
file_path = os.path.join(directory, filename)
|
| 1696 |
+
with open(file_path, 'w') as file_handle:
|
| 1697 |
+
file_handle.write(template % template_args)
|
| 1698 |
+
|
| 1699 |
+
if filename == 'build.sh':
|
| 1700 |
+
os.chmod(file_path, 0o755)
|
| 1701 |
+
|
| 1702 |
+
|
| 1703 |
+
def generate(args):
|
| 1704 |
+
"""Generates empty project files."""
|
| 1705 |
+
return _generate_impl(args.project, args.language)
|
| 1706 |
+
|
| 1707 |
+
|
| 1708 |
+
def _get_current_datetime():
|
| 1709 |
+
"""Returns this year. Needed for mocking."""
|
| 1710 |
+
return datetime.datetime.now()
|
| 1711 |
+
|
| 1712 |
+
|
| 1713 |
+
def _base_builder_from_language(language):
|
| 1714 |
+
"""Returns the base builder for the specified language."""
|
| 1715 |
+
return LANGUAGE_TO_BASE_BUILDER_IMAGE[language]
|
| 1716 |
+
|
| 1717 |
+
|
| 1718 |
+
def _generate_impl(project, language):
|
| 1719 |
+
"""Implementation of generate(). Useful for testing."""
|
| 1720 |
+
if project.is_external:
|
| 1721 |
+
# External project.
|
| 1722 |
+
project_templates = templates.EXTERNAL_TEMPLATES
|
| 1723 |
+
else:
|
| 1724 |
+
# Internal project.
|
| 1725 |
+
if not _validate_project_name(project.name):
|
| 1726 |
+
return False
|
| 1727 |
+
project_templates = templates.TEMPLATES
|
| 1728 |
+
|
| 1729 |
+
if not _validate_language(language):
|
| 1730 |
+
return False
|
| 1731 |
+
|
| 1732 |
+
directory = project.build_integration_path
|
| 1733 |
+
if not _create_build_integration_directory(directory):
|
| 1734 |
+
return False
|
| 1735 |
+
|
| 1736 |
+
logger.info('Writing new files to: %s.', directory)
|
| 1737 |
+
|
| 1738 |
+
template_args = {
|
| 1739 |
+
'project_name': project.name,
|
| 1740 |
+
'base_builder': _base_builder_from_language(language),
|
| 1741 |
+
'language': language,
|
| 1742 |
+
'year': _get_current_datetime().year
|
| 1743 |
+
}
|
| 1744 |
+
for filename, template in project_templates.items():
|
| 1745 |
+
_template_project_file(filename, template, template_args, directory)
|
| 1746 |
+
return True
|
| 1747 |
+
|
| 1748 |
+
|
| 1749 |
+
def shell(args):
|
| 1750 |
+
"""Runs a shell within a docker image."""
|
| 1751 |
+
if not build_image_impl(args.project):
|
| 1752 |
+
return False
|
| 1753 |
+
|
| 1754 |
+
env = [
|
| 1755 |
+
'FUZZING_ENGINE=' + args.engine,
|
| 1756 |
+
'SANITIZER=' + args.sanitizer,
|
| 1757 |
+
'ARCHITECTURE=' + args.architecture,
|
| 1758 |
+
'HELPER=True',
|
| 1759 |
+
]
|
| 1760 |
+
|
| 1761 |
+
if args.project.name != 'base-runner-debug':
|
| 1762 |
+
env.append('FUZZING_LANGUAGE=' + args.project.language)
|
| 1763 |
+
|
| 1764 |
+
if args.e:
|
| 1765 |
+
env += args.e
|
| 1766 |
+
|
| 1767 |
+
if is_base_image(args.project.name):
|
| 1768 |
+
image_project = 'aixcc-finals'
|
| 1769 |
+
project_full = 'ghcr.io/%s/%s%s' % (image_project, args.project.name, BASE_IMAGE_TAG)
|
| 1770 |
+
out_dir = _get_out_dir()
|
| 1771 |
+
else:
|
| 1772 |
+
image_project = 'aixcc-afc'
|
| 1773 |
+
project_full = '%s/%s:%s' % (image_project, args.project.name, args.docker_image_tag)
|
| 1774 |
+
out_dir = args.project.out
|
| 1775 |
+
|
| 1776 |
+
run_args = _env_to_docker_args(env)
|
| 1777 |
+
if args.source_path:
|
| 1778 |
+
workdir = _workdir_from_dockerfile(args.project)
|
| 1779 |
+
run_args.extend([
|
| 1780 |
+
'-v',
|
| 1781 |
+
'%s:%s' % (_get_absolute_path(args.source_path), workdir),
|
| 1782 |
+
])
|
| 1783 |
+
|
| 1784 |
+
|
| 1785 |
+
run_args.extend([
|
| 1786 |
+
'-v',
|
| 1787 |
+
'%s:/out' % out_dir, '-v',
|
| 1788 |
+
'%s:/work' % args.project.work, '-t',
|
| 1789 |
+
'%s' % (project_full), '/bin/bash'
|
| 1790 |
+
])
|
| 1791 |
+
|
| 1792 |
+
docker_run(run_args, architecture=args.architecture)
|
| 1793 |
+
return True
|
| 1794 |
+
|
| 1795 |
+
|
| 1796 |
+
def pull_images(language=None):
|
| 1797 |
+
"""Pulls base images used to build projects in language lang (or all if lang
|
| 1798 |
+
is None)."""
|
| 1799 |
+
for base_image_lang, base_images in BASE_IMAGES.items():
|
| 1800 |
+
if (language is None or base_image_lang == 'generic' or
|
| 1801 |
+
base_image_lang == language):
|
| 1802 |
+
for base_image in base_images:
|
| 1803 |
+
if not docker_pull(base_image):
|
| 1804 |
+
return False
|
| 1805 |
+
|
| 1806 |
+
return True
|
| 1807 |
+
|
| 1808 |
+
|
| 1809 |
+
if __name__ == '__main__':
|
| 1810 |
+
sys.exit(main())
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/helper_test.py
ADDED
|
@@ -0,0 +1,239 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Tests for helper.py"""
|
| 15 |
+
|
| 16 |
+
import datetime
|
| 17 |
+
import os
|
| 18 |
+
import tempfile
|
| 19 |
+
import unittest
|
| 20 |
+
from unittest import mock
|
| 21 |
+
|
| 22 |
+
from pyfakefs import fake_filesystem_unittest
|
| 23 |
+
|
| 24 |
+
import constants
|
| 25 |
+
import helper
|
| 26 |
+
import templates
|
| 27 |
+
|
| 28 |
+
# pylint: disable=no-self-use,protected-access
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class ShellTest(unittest.TestCase):
|
| 32 |
+
"""Tests 'shell' command."""
|
| 33 |
+
|
| 34 |
+
@mock.patch('helper.docker_run')
|
| 35 |
+
@mock.patch('helper.build_image_impl')
|
| 36 |
+
def test_base_runner_debug(self, _, __):
|
| 37 |
+
"""Tests that shell base-runner-debug works as intended."""
|
| 38 |
+
image_name = 'base-runner-debug'
|
| 39 |
+
unparsed_args = ['shell', image_name]
|
| 40 |
+
parser = helper.get_parser()
|
| 41 |
+
args = helper.parse_args(parser, unparsed_args)
|
| 42 |
+
args.sanitizer = 'address'
|
| 43 |
+
result = helper.shell(args)
|
| 44 |
+
self.assertTrue(result)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class BuildImageImplTest(unittest.TestCase):
|
| 48 |
+
"""Tests for build_image_impl."""
|
| 49 |
+
|
| 50 |
+
@mock.patch('helper.docker_build')
|
| 51 |
+
def test_no_cache(self, mock_docker_build):
|
| 52 |
+
"""Tests that cache=False is handled properly."""
|
| 53 |
+
image_name = 'base-image'
|
| 54 |
+
helper.build_image_impl(helper.Project(image_name), cache=False)
|
| 55 |
+
self.assertIn('--no-cache', mock_docker_build.call_args_list[0][0][0])
|
| 56 |
+
|
| 57 |
+
@mock.patch('helper.docker_build')
|
| 58 |
+
@mock.patch('helper.pull_images')
|
| 59 |
+
def test_pull(self, mock_pull_images, _):
|
| 60 |
+
"""Tests that pull=True is handled properly."""
|
| 61 |
+
image_name = 'base-image'
|
| 62 |
+
project = helper.Project(image_name, is_external=True)
|
| 63 |
+
self.assertTrue(helper.build_image_impl(project, pull=True))
|
| 64 |
+
mock_pull_images.assert_called_with('c++')
|
| 65 |
+
|
| 66 |
+
@mock.patch('helper.docker_build')
|
| 67 |
+
def test_base_image(self, mock_docker_build):
|
| 68 |
+
"""Tests that build_image_impl works as intended with a base-image."""
|
| 69 |
+
image_name = 'base-image'
|
| 70 |
+
self.assertTrue(helper.build_image_impl(helper.Project(image_name)))
|
| 71 |
+
build_dir = os.path.join(helper.OSS_FUZZ_DIR,
|
| 72 |
+
'infra/base-images/base-image')
|
| 73 |
+
mock_docker_build.assert_called_with([
|
| 74 |
+
'-t', 'ghcr.io/aixcc-finals/base-image', '--file',
|
| 75 |
+
os.path.join(build_dir, 'Dockerfile'), build_dir
|
| 76 |
+
])
|
| 77 |
+
|
| 78 |
+
@mock.patch('helper.docker_build')
|
| 79 |
+
def test_oss_fuzz_project(self, mock_docker_build):
|
| 80 |
+
"""Tests that build_image_impl works as intended with an OSS-Fuzz
|
| 81 |
+
project."""
|
| 82 |
+
project_name = 'example'
|
| 83 |
+
self.assertTrue(helper.build_image_impl(helper.Project(project_name)))
|
| 84 |
+
build_dir = os.path.join(helper.OSS_FUZZ_DIR, 'projects', project_name)
|
| 85 |
+
mock_docker_build.assert_called_with([
|
| 86 |
+
'-t', 'gcr.io/oss-fuzz/example', '--file',
|
| 87 |
+
os.path.join(build_dir, 'Dockerfile'), build_dir
|
| 88 |
+
])
|
| 89 |
+
|
| 90 |
+
@mock.patch('helper.docker_build')
|
| 91 |
+
def test_external_project(self, mock_docker_build):
|
| 92 |
+
"""Tests that build_image_impl works as intended with a non-OSS-Fuzz
|
| 93 |
+
project."""
|
| 94 |
+
with tempfile.TemporaryDirectory() as temp_dir:
|
| 95 |
+
project_src_path = os.path.join(temp_dir, 'example')
|
| 96 |
+
os.mkdir(project_src_path)
|
| 97 |
+
build_integration_path = 'build-integration'
|
| 98 |
+
project = helper.Project(project_src_path,
|
| 99 |
+
is_external=True,
|
| 100 |
+
build_integration_path=build_integration_path)
|
| 101 |
+
self.assertTrue(helper.build_image_impl(project))
|
| 102 |
+
mock_docker_build.assert_called_with([
|
| 103 |
+
'-t', 'gcr.io/oss-fuzz/example', '--file',
|
| 104 |
+
os.path.join(project_src_path, build_integration_path, 'Dockerfile'),
|
| 105 |
+
project_src_path
|
| 106 |
+
])
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
class GenerateImplTest(fake_filesystem_unittest.TestCase):
|
| 110 |
+
"""Tests for _generate_impl."""
|
| 111 |
+
PROJECT_NAME = 'newfakeproject'
|
| 112 |
+
PROJECT_LANGUAGE = 'python'
|
| 113 |
+
|
| 114 |
+
def setUp(self):
|
| 115 |
+
self.maxDiff = None # pylint: disable=invalid-name
|
| 116 |
+
self.setUpPyfakefs()
|
| 117 |
+
self.fs.add_real_directory(helper.OSS_FUZZ_DIR)
|
| 118 |
+
|
| 119 |
+
def _verify_templated_files(self, template_dict, directory, language):
|
| 120 |
+
template_args = {
|
| 121 |
+
'project_name': self.PROJECT_NAME,
|
| 122 |
+
'year': 2021,
|
| 123 |
+
'base_builder': helper._base_builder_from_language(language),
|
| 124 |
+
'language': language,
|
| 125 |
+
}
|
| 126 |
+
for filename, template in template_dict.items():
|
| 127 |
+
file_path = os.path.join(directory, filename)
|
| 128 |
+
with open(file_path, 'r') as file_handle:
|
| 129 |
+
contents = file_handle.read()
|
| 130 |
+
self.assertEqual(contents, template % template_args)
|
| 131 |
+
|
| 132 |
+
@mock.patch('helper._get_current_datetime',
|
| 133 |
+
return_value=datetime.datetime(year=2021, month=1, day=1))
|
| 134 |
+
def test_generate_oss_fuzz_project(self, _):
|
| 135 |
+
"""Tests that the correct files are generated for an OSS-Fuzz project."""
|
| 136 |
+
helper._generate_impl(helper.Project(self.PROJECT_NAME),
|
| 137 |
+
self.PROJECT_LANGUAGE)
|
| 138 |
+
self._verify_templated_files(
|
| 139 |
+
templates.TEMPLATES,
|
| 140 |
+
os.path.join(helper.OSS_FUZZ_DIR, 'projects', self.PROJECT_NAME),
|
| 141 |
+
self.PROJECT_LANGUAGE)
|
| 142 |
+
|
| 143 |
+
def test_generate_external_project(self):
|
| 144 |
+
"""Tests that the correct files are generated for a non-OSS-Fuzz project."""
|
| 145 |
+
build_integration_path = '/newfakeproject/build-integration'
|
| 146 |
+
helper._generate_impl(
|
| 147 |
+
helper.Project('/newfakeproject/',
|
| 148 |
+
is_external=True,
|
| 149 |
+
build_integration_path=build_integration_path),
|
| 150 |
+
self.PROJECT_LANGUAGE)
|
| 151 |
+
self._verify_templated_files(templates.EXTERNAL_TEMPLATES,
|
| 152 |
+
build_integration_path, self.PROJECT_LANGUAGE)
|
| 153 |
+
|
| 154 |
+
@mock.patch('helper._get_current_datetime',
|
| 155 |
+
return_value=datetime.datetime(year=2021, month=1, day=1))
|
| 156 |
+
def test_generate_swift_project(self, _):
|
| 157 |
+
"""Tests that the swift project uses the correct base image."""
|
| 158 |
+
helper._generate_impl(helper.Project(self.PROJECT_NAME), 'swift')
|
| 159 |
+
self._verify_templated_files(
|
| 160 |
+
templates.TEMPLATES,
|
| 161 |
+
os.path.join(helper.OSS_FUZZ_DIR, 'projects', self.PROJECT_NAME),
|
| 162 |
+
'swift')
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
class ProjectTest(fake_filesystem_unittest.TestCase):
|
| 166 |
+
"""Tests for Project class."""
|
| 167 |
+
|
| 168 |
+
def setUp(self):
|
| 169 |
+
self.project_name = 'project'
|
| 170 |
+
self.internal_project = helper.Project(self.project_name)
|
| 171 |
+
self.external_project_path = os.path.join('/path', 'to', self.project_name)
|
| 172 |
+
self.external_project = helper.Project(self.external_project_path,
|
| 173 |
+
is_external=True)
|
| 174 |
+
self.setUpPyfakefs()
|
| 175 |
+
|
| 176 |
+
def test_init_external_project(self):
|
| 177 |
+
"""Tests __init__ method for external projects."""
|
| 178 |
+
self.assertEqual(self.external_project.name, self.project_name)
|
| 179 |
+
self.assertEqual(self.external_project.path, self.external_project_path)
|
| 180 |
+
self.assertEqual(
|
| 181 |
+
self.external_project.build_integration_path,
|
| 182 |
+
os.path.join(self.external_project_path,
|
| 183 |
+
constants.DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH))
|
| 184 |
+
|
| 185 |
+
def test_init_internal_project(self):
|
| 186 |
+
"""Tests __init__ method for internal projects."""
|
| 187 |
+
self.assertEqual(self.internal_project.name, self.project_name)
|
| 188 |
+
path = os.path.join(helper.OSS_FUZZ_DIR, 'projects', self.project_name)
|
| 189 |
+
self.assertEqual(self.internal_project.path, path)
|
| 190 |
+
self.assertEqual(self.internal_project.build_integration_path, path)
|
| 191 |
+
|
| 192 |
+
def test_dockerfile_path_internal_project(self):
|
| 193 |
+
"""Tests that dockerfile_path works as intended."""
|
| 194 |
+
self.assertEqual(
|
| 195 |
+
self.internal_project.dockerfile_path,
|
| 196 |
+
os.path.join(helper.OSS_FUZZ_DIR, 'projects', self.project_name,
|
| 197 |
+
'Dockerfile'))
|
| 198 |
+
|
| 199 |
+
def test_dockerfile_path_external_project(self):
|
| 200 |
+
"""Tests that dockerfile_path works as intended."""
|
| 201 |
+
self.assertEqual(
|
| 202 |
+
self.external_project.dockerfile_path,
|
| 203 |
+
os.path.join(self.external_project_path,
|
| 204 |
+
constants.DEFAULT_EXTERNAL_BUILD_INTEGRATION_PATH,
|
| 205 |
+
'Dockerfile'))
|
| 206 |
+
|
| 207 |
+
def test_out(self):
|
| 208 |
+
"""Tests that out works as intended."""
|
| 209 |
+
out_dir = self.internal_project.out
|
| 210 |
+
self.assertEqual(
|
| 211 |
+
out_dir,
|
| 212 |
+
os.path.join(helper.OSS_FUZZ_DIR, 'build', 'out', self.project_name))
|
| 213 |
+
self.assertTrue(os.path.exists(out_dir))
|
| 214 |
+
|
| 215 |
+
def test_work(self):
|
| 216 |
+
"""Tests that work works as intended."""
|
| 217 |
+
work_dir = self.internal_project.work
|
| 218 |
+
self.assertEqual(
|
| 219 |
+
work_dir,
|
| 220 |
+
os.path.join(helper.OSS_FUZZ_DIR, 'build', 'work', self.project_name))
|
| 221 |
+
self.assertTrue(os.path.exists(work_dir))
|
| 222 |
+
|
| 223 |
+
def test_corpus(self):
|
| 224 |
+
"""Tests that corpus works as intended."""
|
| 225 |
+
corpus_dir = self.internal_project.corpus
|
| 226 |
+
self.assertEqual(
|
| 227 |
+
corpus_dir,
|
| 228 |
+
os.path.join(helper.OSS_FUZZ_DIR, 'build', 'corpus', self.project_name))
|
| 229 |
+
self.assertTrue(os.path.exists(corpus_dir))
|
| 230 |
+
|
| 231 |
+
def test_language_internal_project(self):
|
| 232 |
+
"""Tests that language works as intended for an internal project."""
|
| 233 |
+
project_yaml_path = os.path.join(self.internal_project.path, 'project.yaml')
|
| 234 |
+
self.fs.create_file(project_yaml_path, contents='language: python')
|
| 235 |
+
self.assertEqual(self.internal_project.language, 'python')
|
| 236 |
+
|
| 237 |
+
def test_language_external_project(self):
|
| 238 |
+
"""Tests that language works as intended for an external project."""
|
| 239 |
+
self.assertEqual(self.external_project.language, 'c++')
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/presubmit.py
ADDED
|
@@ -0,0 +1,549 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2020 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Checks code for common issues before submitting."""
|
| 18 |
+
|
| 19 |
+
import argparse
|
| 20 |
+
import os
|
| 21 |
+
import re
|
| 22 |
+
import subprocess
|
| 23 |
+
import sys
|
| 24 |
+
import unittest
|
| 25 |
+
import yaml
|
| 26 |
+
|
| 27 |
+
import constants
|
| 28 |
+
|
| 29 |
+
_SRC_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
| 30 |
+
VALID_PROJECT_REGEX_STR = '^[a-z0-9_-]+$'
|
| 31 |
+
VALID_PROJECT_REGEX = re.compile(VALID_PROJECT_REGEX_STR)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def _is_project_file(actual_path, expected_filename):
|
| 35 |
+
"""Returns True if actual_path's name is |expected_filename| and is a file
|
| 36 |
+
that exists and is in in projects/."""
|
| 37 |
+
if os.path.basename(actual_path) != expected_filename:
|
| 38 |
+
return False
|
| 39 |
+
|
| 40 |
+
if os.path.basename(os.path.dirname(
|
| 41 |
+
os.path.dirname(actual_path))) != 'projects':
|
| 42 |
+
return False
|
| 43 |
+
|
| 44 |
+
return os.path.exists(actual_path)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# TODO: Check for -fsanitize=fuzzer in files as well.
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def _check_one_lib_fuzzing_engine(build_sh_file):
|
| 51 |
+
"""Returns False if |build_sh_file| contains -lFuzzingEngine.
|
| 52 |
+
This is deprecated behavior. $LIB_FUZZING_ENGINE should be used instead
|
| 53 |
+
so that -fsanitize=fuzzer is used."""
|
| 54 |
+
if not _is_project_file(build_sh_file, 'build.sh'):
|
| 55 |
+
return True
|
| 56 |
+
|
| 57 |
+
with open(build_sh_file) as build_sh:
|
| 58 |
+
build_sh_lines = build_sh.readlines()
|
| 59 |
+
for line_num, line in enumerate(build_sh_lines):
|
| 60 |
+
uncommented_code = line.split('#')[0]
|
| 61 |
+
if '-lFuzzingEngine' in uncommented_code:
|
| 62 |
+
print('Error: build.sh contains deprecated "-lFuzzingEngine" on line: '
|
| 63 |
+
f'{line_num}. Please use "$LIB_FUZZING_ENGINE" instead.')
|
| 64 |
+
return False
|
| 65 |
+
return True
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def check_lib_fuzzing_engine(paths):
|
| 69 |
+
"""Calls _check_one_lib_fuzzing_engine on each path in |paths|. Returns True
|
| 70 |
+
if the result of every call is True."""
|
| 71 |
+
return all(_check_one_lib_fuzzing_engine(path) for path in paths)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class ProjectYamlChecker:
|
| 75 |
+
"""Checks for a project.yaml file."""
|
| 76 |
+
|
| 77 |
+
# Sections in a project.yaml and the constant values that they are allowed
|
| 78 |
+
# to have.
|
| 79 |
+
SECTIONS_AND_CONSTANTS = {
|
| 80 |
+
'sanitizers': constants.SANITIZERS,
|
| 81 |
+
'architectures': constants.ARCHITECTURES,
|
| 82 |
+
'fuzzing_engines': constants.ENGINES,
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
+
# Note: this list must be updated when we allow new sections.
|
| 86 |
+
VALID_SECTION_NAMES = [
|
| 87 |
+
'architectures',
|
| 88 |
+
'auto_ccs',
|
| 89 |
+
'blackbox',
|
| 90 |
+
'builds_per_day',
|
| 91 |
+
'coverage_extra_args',
|
| 92 |
+
'disabled',
|
| 93 |
+
'fuzzing_engines',
|
| 94 |
+
'help_url',
|
| 95 |
+
'homepage',
|
| 96 |
+
'language',
|
| 97 |
+
'labels', # For internal use only, hard to lint as it uses fuzzer names.
|
| 98 |
+
'main_repo',
|
| 99 |
+
'primary_contact',
|
| 100 |
+
'run_tests',
|
| 101 |
+
'sanitizers',
|
| 102 |
+
'selective_unpack',
|
| 103 |
+
'vendor_ccs',
|
| 104 |
+
'view_restrictions',
|
| 105 |
+
'file_github_issue',
|
| 106 |
+
]
|
| 107 |
+
|
| 108 |
+
REQUIRED_SECTIONS = ['main_repo']
|
| 109 |
+
|
| 110 |
+
def __init__(self, filename):
|
| 111 |
+
self.filename = filename
|
| 112 |
+
with open(filename) as file_handle:
|
| 113 |
+
self.data = yaml.safe_load(file_handle)
|
| 114 |
+
|
| 115 |
+
self.success = True
|
| 116 |
+
|
| 117 |
+
def do_checks(self):
|
| 118 |
+
"""Does all project.yaml checks. Returns True if they pass."""
|
| 119 |
+
if self.is_disabled():
|
| 120 |
+
return True
|
| 121 |
+
|
| 122 |
+
checks = [
|
| 123 |
+
self.check_project_yaml_constants,
|
| 124 |
+
self.check_required_sections,
|
| 125 |
+
self.check_valid_section_names,
|
| 126 |
+
self.check_valid_emails,
|
| 127 |
+
self.check_valid_language,
|
| 128 |
+
self.check_valid_project_name,
|
| 129 |
+
]
|
| 130 |
+
for check_function in checks:
|
| 131 |
+
check_function()
|
| 132 |
+
return self.success
|
| 133 |
+
|
| 134 |
+
def is_disabled(self):
|
| 135 |
+
"""Returns True if this project is disabled."""
|
| 136 |
+
return self.data.get('disabled', False)
|
| 137 |
+
|
| 138 |
+
def error(self, message):
|
| 139 |
+
"""Prints an error message and sets self.success to False."""
|
| 140 |
+
self.success = False
|
| 141 |
+
print(f'Error in {self.filename}: {message}')
|
| 142 |
+
|
| 143 |
+
def check_valid_project_name(self):
|
| 144 |
+
"""Checks that the project has a valid name."""
|
| 145 |
+
banned_names = ['google', 'g00gle']
|
| 146 |
+
project_name = os.path.basename(os.path.dirname(self.filename))
|
| 147 |
+
for banned_name in banned_names:
|
| 148 |
+
if banned_name in project_name:
|
| 149 |
+
self.error('Projects can\'t have \'google\' in the name.')
|
| 150 |
+
if not VALID_PROJECT_REGEX.match(project_name):
|
| 151 |
+
self.error(f'Projects must conform to regex {VALID_PROJECT_REGEX_STR}')
|
| 152 |
+
|
| 153 |
+
def check_project_yaml_constants(self):
|
| 154 |
+
"""Returns True if certain sections only have certain constant values."""
|
| 155 |
+
for section, allowed_constants in self.SECTIONS_AND_CONSTANTS.items():
|
| 156 |
+
if section not in self.data:
|
| 157 |
+
continue
|
| 158 |
+
actual_constants = self.data[section]
|
| 159 |
+
allowed_constants_str = ', '.join(allowed_constants)
|
| 160 |
+
for constant in actual_constants:
|
| 161 |
+
if isinstance(constant, str):
|
| 162 |
+
if constant not in allowed_constants:
|
| 163 |
+
self.error(f'{constant} (in {section} section) is not a valid '
|
| 164 |
+
f'constant ({allowed_constants_str}).')
|
| 165 |
+
elif isinstance(constant, dict):
|
| 166 |
+
# The only alternative value allowed is the experimental flag, i.e.
|
| 167 |
+
# `constant == {'memory': {'experimental': True}}`. Do not check the
|
| 168 |
+
# experimental flag, but assert that the sanitizer is a valid one.
|
| 169 |
+
if (len(constant.keys()) > 1 or
|
| 170 |
+
list(constant.keys())[0] not in allowed_constants):
|
| 171 |
+
self.error(f'Not allowed value in the project.yaml: {constant}')
|
| 172 |
+
else:
|
| 173 |
+
self.error(f'Not allowed value in the project.yaml: {constant}')
|
| 174 |
+
|
| 175 |
+
def check_valid_section_names(self):
|
| 176 |
+
"""Returns True if all section names are valid."""
|
| 177 |
+
for name in self.data:
|
| 178 |
+
if name not in self.VALID_SECTION_NAMES:
|
| 179 |
+
self.error(
|
| 180 |
+
f'{name} is not a valid section name ({self.VALID_SECTION_NAMES})')
|
| 181 |
+
|
| 182 |
+
def check_required_sections(self):
|
| 183 |
+
"""Returns True if all required sections are in |self.data|."""
|
| 184 |
+
for section in self.REQUIRED_SECTIONS:
|
| 185 |
+
if section not in self.data:
|
| 186 |
+
self.error(f'{section} section is missing.')
|
| 187 |
+
|
| 188 |
+
def check_valid_emails(self):
|
| 189 |
+
"""Returns True if emails are valid looking.."""
|
| 190 |
+
# Get email addresses.
|
| 191 |
+
email_addresses = []
|
| 192 |
+
primary_contact = self.data.get('primary_contact')
|
| 193 |
+
if primary_contact:
|
| 194 |
+
email_addresses.append(primary_contact)
|
| 195 |
+
auto_ccs = self.data.get('auto_ccs')
|
| 196 |
+
if auto_ccs:
|
| 197 |
+
email_addresses.extend(auto_ccs)
|
| 198 |
+
|
| 199 |
+
# Check that email addresses seem normal.
|
| 200 |
+
for email_address in email_addresses:
|
| 201 |
+
if '@' not in email_address or '.' not in email_address:
|
| 202 |
+
self.error(f'{email_address} is an invalid email address.')
|
| 203 |
+
|
| 204 |
+
def check_valid_language(self):
|
| 205 |
+
"""Returns True if the language is specified and valid."""
|
| 206 |
+
language = self.data.get('language')
|
| 207 |
+
if not language:
|
| 208 |
+
self.error('Missing "language" attribute in project.yaml.')
|
| 209 |
+
elif language not in constants.LANGUAGES:
|
| 210 |
+
self.error(
|
| 211 |
+
f'"language: {language}" is not supported ({constants.LANGUAGES}).')
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def _check_one_project_yaml(project_yaml_filename):
|
| 215 |
+
"""Does checks on the project.yaml file. Returns True on success."""
|
| 216 |
+
if _is_project_file(project_yaml_filename, 'project.yml'):
|
| 217 |
+
print(project_yaml_filename, 'must be named project.yaml.')
|
| 218 |
+
return False
|
| 219 |
+
|
| 220 |
+
if not _is_project_file(project_yaml_filename, 'project.yaml'):
|
| 221 |
+
return True
|
| 222 |
+
|
| 223 |
+
checker = ProjectYamlChecker(project_yaml_filename)
|
| 224 |
+
return checker.do_checks()
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
def check_project_yaml(paths):
|
| 228 |
+
"""Calls _check_one_project_yaml on each path in |paths|. Returns True if the
|
| 229 |
+
result of every call is True."""
|
| 230 |
+
return all([_check_one_project_yaml(path) for path in paths])
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def _check_one_seed_corpus(path):
|
| 234 |
+
"""Returns False and prints error if |path| is a seed corpus."""
|
| 235 |
+
if os.path.basename(os.path.dirname(os.path.dirname(path))) != 'projects':
|
| 236 |
+
return True
|
| 237 |
+
|
| 238 |
+
if os.path.splitext(path)[1] == '.zip':
|
| 239 |
+
print('Don\'t commit seed corpora into the ClusterFuzz repo,'
|
| 240 |
+
'they bloat it forever.')
|
| 241 |
+
return False
|
| 242 |
+
|
| 243 |
+
return True
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
def check_seed_corpus(paths):
|
| 247 |
+
"""Calls _check_one_seed_corpus on each path in |paths|. Returns True if the
|
| 248 |
+
result of every call is True."""
|
| 249 |
+
return all([_check_one_seed_corpus(path) for path in paths])
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
def _check_one_apt_update(path):
|
| 253 |
+
"""Checks that a Dockerfile uses apt-update before apt-install"""
|
| 254 |
+
if os.path.basename(os.path.dirname(os.path.dirname(path))) != 'projects':
|
| 255 |
+
return True
|
| 256 |
+
|
| 257 |
+
if os.path.basename(path) != 'Dockerfile':
|
| 258 |
+
return True
|
| 259 |
+
|
| 260 |
+
with open(path, 'r') as file:
|
| 261 |
+
dockerfile = file.read()
|
| 262 |
+
if 'RUN apt install' in dockerfile or 'RUN apt-get install' in dockerfile:
|
| 263 |
+
print('Please add an "apt-get update" before "apt-get install". '
|
| 264 |
+
'Otherwise, a cached and outdated RUN layer may lead to install '
|
| 265 |
+
'failures in file %s.' % str(path))
|
| 266 |
+
return False
|
| 267 |
+
|
| 268 |
+
return True
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
def check_apt_update(paths):
|
| 272 |
+
"""Checks that all Dockerfile use apt-update before apt-install"""
|
| 273 |
+
return all([_check_one_apt_update(path) for path in paths])
|
| 274 |
+
|
| 275 |
+
|
| 276 |
+
def do_checks(changed_files):
|
| 277 |
+
"""Runs all presubmit checks. Returns False if any fails."""
|
| 278 |
+
checks = [
|
| 279 |
+
check_license,
|
| 280 |
+
yapf,
|
| 281 |
+
check_project_yaml,
|
| 282 |
+
check_lib_fuzzing_engine,
|
| 283 |
+
check_seed_corpus,
|
| 284 |
+
check_apt_update,
|
| 285 |
+
]
|
| 286 |
+
# Use a list comprehension here and in other cases where we use all() so that
|
| 287 |
+
# we don't quit early on failure. This is more user-friendly since the more
|
| 288 |
+
# errors we spit out at once, the less frequently the less check-fix-check
|
| 289 |
+
# cycles they need to do.
|
| 290 |
+
return all([check(changed_files) for check in checks])
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
_CHECK_LICENSE_FILENAMES = ['Dockerfile']
|
| 294 |
+
_CHECK_LICENSE_EXTENSIONS = [
|
| 295 |
+
'.bash',
|
| 296 |
+
'.c',
|
| 297 |
+
'.cc',
|
| 298 |
+
'.cpp',
|
| 299 |
+
'.css',
|
| 300 |
+
'.Dockerfile',
|
| 301 |
+
'.go',
|
| 302 |
+
'.h',
|
| 303 |
+
'.htm',
|
| 304 |
+
'.html',
|
| 305 |
+
'.java',
|
| 306 |
+
'.js',
|
| 307 |
+
'.proto',
|
| 308 |
+
'.py',
|
| 309 |
+
'.rs',
|
| 310 |
+
'.sh',
|
| 311 |
+
'.ts',
|
| 312 |
+
]
|
| 313 |
+
THIRD_PARTY_DIR_NAME = 'third_party'
|
| 314 |
+
|
| 315 |
+
_LICENSE_STRING = 'http://www.apache.org/licenses/LICENSE-2.0'
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def check_license(paths):
|
| 319 |
+
"""Validates license header."""
|
| 320 |
+
if not paths:
|
| 321 |
+
return True
|
| 322 |
+
|
| 323 |
+
success = True
|
| 324 |
+
for path in paths:
|
| 325 |
+
path_parts = str(path).split(os.sep)
|
| 326 |
+
if any(path_part == THIRD_PARTY_DIR_NAME for path_part in path_parts):
|
| 327 |
+
continue
|
| 328 |
+
filename = os.path.basename(path)
|
| 329 |
+
extension = os.path.splitext(path)[1]
|
| 330 |
+
if (filename not in _CHECK_LICENSE_FILENAMES and
|
| 331 |
+
extension not in _CHECK_LICENSE_EXTENSIONS):
|
| 332 |
+
continue
|
| 333 |
+
|
| 334 |
+
with open(path) as file_handle:
|
| 335 |
+
if _LICENSE_STRING not in file_handle.read():
|
| 336 |
+
print('Missing license header in file %s.' % str(path))
|
| 337 |
+
success = False
|
| 338 |
+
|
| 339 |
+
return success
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
def bool_to_returncode(success):
|
| 343 |
+
"""Returns 0 if |success|. Otherwise returns 1."""
|
| 344 |
+
if success:
|
| 345 |
+
print('Success.')
|
| 346 |
+
return 0
|
| 347 |
+
|
| 348 |
+
print('Failed.')
|
| 349 |
+
return 1
|
| 350 |
+
|
| 351 |
+
|
| 352 |
+
def is_nonfuzzer_python(path):
|
| 353 |
+
"""Returns True if |path| ends in .py."""
|
| 354 |
+
return os.path.splitext(path)[1] == '.py' and '/projects/' not in path
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
def lint(_=None):
|
| 358 |
+
"""Runs python's linter on infra. Returns False if it fails linting."""
|
| 359 |
+
|
| 360 |
+
# Use --score no to make linting quieter.
|
| 361 |
+
command = ['python3', '-m', 'pylint', '--score', 'no', '-j', '0', 'infra']
|
| 362 |
+
returncode = subprocess.run(command, check=False).returncode
|
| 363 |
+
return returncode == 0
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
def yapf(paths, validate=True):
|
| 367 |
+
"""Does yapf on |path| if it is Python file. Only validates format if
|
| 368 |
+
|validate|. Otherwise, formats the file. Returns False if validation or
|
| 369 |
+
formatting fails."""
|
| 370 |
+
paths = [path for path in paths if is_nonfuzzer_python(path)]
|
| 371 |
+
if not paths:
|
| 372 |
+
return True
|
| 373 |
+
|
| 374 |
+
validate_argument = '-d' if validate else '-i'
|
| 375 |
+
command = ['yapf', validate_argument, '-p']
|
| 376 |
+
command.extend(paths)
|
| 377 |
+
|
| 378 |
+
returncode = subprocess.run(command, check=False).returncode
|
| 379 |
+
return returncode == 0
|
| 380 |
+
|
| 381 |
+
|
| 382 |
+
def get_changed_files():
|
| 383 |
+
"""Returns a list of absolute paths of files changed in this git branch."""
|
| 384 |
+
branch_commit_hash = subprocess.check_output(
|
| 385 |
+
['git', 'merge-base', 'HEAD', 'origin/HEAD']).strip().decode()
|
| 386 |
+
|
| 387 |
+
diff_commands = [
|
| 388 |
+
# Return list of modified files in the commits on this branch.
|
| 389 |
+
['git', 'diff', '--name-only', branch_commit_hash + '..'],
|
| 390 |
+
# Return list of modified files from uncommitted changes.
|
| 391 |
+
['git', 'diff', '--name-only']
|
| 392 |
+
]
|
| 393 |
+
|
| 394 |
+
changed_files = set()
|
| 395 |
+
for command in diff_commands:
|
| 396 |
+
file_paths = subprocess.check_output(command).decode().splitlines()
|
| 397 |
+
for file_path in file_paths:
|
| 398 |
+
if not os.path.isfile(file_path):
|
| 399 |
+
continue
|
| 400 |
+
changed_files.add(file_path)
|
| 401 |
+
print(f'Changed files: {" ".join(changed_files)}')
|
| 402 |
+
return [os.path.abspath(f) for f in changed_files]
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
def run_build_tests():
|
| 406 |
+
"""Runs build tests because they can't be run in parallel."""
|
| 407 |
+
suite_list = [
|
| 408 |
+
unittest.TestLoader().discover(os.path.join(_SRC_ROOT, 'infra', 'build'),
|
| 409 |
+
pattern='*_test.py'),
|
| 410 |
+
]
|
| 411 |
+
suite = unittest.TestSuite(suite_list)
|
| 412 |
+
print('Running build tests.')
|
| 413 |
+
result = unittest.TextTestRunner().run(suite)
|
| 414 |
+
return not result.failures and not result.errors
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
def run_nonbuild_tests(parallel):
|
| 418 |
+
"""Runs all tests but build tests. Does them in parallel if |parallel|. The
|
| 419 |
+
reason why we exclude build tests is because they use an emulator that
|
| 420 |
+
prevents them from being used in parallel."""
|
| 421 |
+
# We look for all project directories because otherwise pytest won't run tests
|
| 422 |
+
# that are not in valid modules (e.g. "base-images").
|
| 423 |
+
relevant_dirs = set()
|
| 424 |
+
all_files = get_all_files()
|
| 425 |
+
for file_path in all_files:
|
| 426 |
+
directory = os.path.dirname(file_path)
|
| 427 |
+
relevant_dirs.add(directory)
|
| 428 |
+
|
| 429 |
+
# Use ignore-glob because ignore doesn't seem to work properly with the way we
|
| 430 |
+
# pass directories to pytest.
|
| 431 |
+
command = [
|
| 432 |
+
'pytest',
|
| 433 |
+
'--ignore-glob=infra/build/*',
|
| 434 |
+
'--ignore-glob=projects/*',
|
| 435 |
+
]
|
| 436 |
+
if parallel:
|
| 437 |
+
command.extend(['-n', 'auto'])
|
| 438 |
+
command += list(relevant_dirs)
|
| 439 |
+
print('Running non-build tests.')
|
| 440 |
+
|
| 441 |
+
# TODO(metzman): Get rid of this once config_utils stops using it.
|
| 442 |
+
env = os.environ.copy()
|
| 443 |
+
env['CIFUZZ_TEST'] = '1'
|
| 444 |
+
|
| 445 |
+
return subprocess.run(command, check=False, env=env).returncode == 0
|
| 446 |
+
|
| 447 |
+
|
| 448 |
+
def run_tests(_=None, parallel=False, build_tests=True, nonbuild_tests=True):
|
| 449 |
+
"""Runs all unit tests."""
|
| 450 |
+
build_success = True
|
| 451 |
+
nonbuild_success = True
|
| 452 |
+
if nonbuild_tests:
|
| 453 |
+
nonbuild_success = run_nonbuild_tests(parallel)
|
| 454 |
+
else:
|
| 455 |
+
print('Skipping nonbuild tests as specified.')
|
| 456 |
+
|
| 457 |
+
if build_tests:
|
| 458 |
+
build_success = run_build_tests()
|
| 459 |
+
else:
|
| 460 |
+
print('Skipping build tests as specified.')
|
| 461 |
+
|
| 462 |
+
return nonbuild_success and build_success
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
def run_systemsan_tests(_=None):
|
| 466 |
+
"""Runs SystemSan unit tests."""
|
| 467 |
+
command = ['make', 'test']
|
| 468 |
+
return subprocess.run(command,
|
| 469 |
+
cwd='infra/experimental/SystemSan',
|
| 470 |
+
check=False).returncode == 0
|
| 471 |
+
|
| 472 |
+
|
| 473 |
+
def get_all_files():
|
| 474 |
+
"""Returns a list of absolute paths of files in this repo."""
|
| 475 |
+
get_all_files_command = ['git', 'ls-files']
|
| 476 |
+
output = subprocess.check_output(get_all_files_command).decode().splitlines()
|
| 477 |
+
return [os.path.abspath(path) for path in output if os.path.isfile(path)]
|
| 478 |
+
|
| 479 |
+
|
| 480 |
+
def main():
|
| 481 |
+
"""Check changes on a branch for common issues before submitting."""
|
| 482 |
+
# Get program arguments.
|
| 483 |
+
parser = argparse.ArgumentParser(description='Presubmit script for oss-fuzz.')
|
| 484 |
+
parser.add_argument(
|
| 485 |
+
'command',
|
| 486 |
+
choices=['format', 'lint', 'license', 'infra-tests', 'systemsan-tests'],
|
| 487 |
+
nargs='?')
|
| 488 |
+
parser.add_argument('-a',
|
| 489 |
+
'--all-files',
|
| 490 |
+
action='store_true',
|
| 491 |
+
help='Run presubmit check(s) on all files',
|
| 492 |
+
default=False)
|
| 493 |
+
parser.add_argument('-p',
|
| 494 |
+
'--parallel',
|
| 495 |
+
action='store_true',
|
| 496 |
+
help='Run tests in parallel.',
|
| 497 |
+
default=False)
|
| 498 |
+
parser.add_argument('-s',
|
| 499 |
+
'--skip-build-tests',
|
| 500 |
+
action='store_true',
|
| 501 |
+
help='Skip build tests which are slow and must run '
|
| 502 |
+
'sequentially.',
|
| 503 |
+
default=False)
|
| 504 |
+
parser.add_argument('-n',
|
| 505 |
+
'--skip-nonbuild-tests',
|
| 506 |
+
action='store_true',
|
| 507 |
+
help='Only do build tests.',
|
| 508 |
+
default=False)
|
| 509 |
+
args = parser.parse_args()
|
| 510 |
+
|
| 511 |
+
if args.all_files:
|
| 512 |
+
relevant_files = get_all_files()
|
| 513 |
+
else:
|
| 514 |
+
relevant_files = get_changed_files()
|
| 515 |
+
|
| 516 |
+
os.chdir(_SRC_ROOT)
|
| 517 |
+
|
| 518 |
+
# Do one specific check if the user asked for it.
|
| 519 |
+
if args.command == 'format':
|
| 520 |
+
success = yapf(relevant_files, False)
|
| 521 |
+
return bool_to_returncode(success)
|
| 522 |
+
|
| 523 |
+
if args.command == 'lint':
|
| 524 |
+
success = lint()
|
| 525 |
+
return bool_to_returncode(success)
|
| 526 |
+
|
| 527 |
+
if args.command == 'license':
|
| 528 |
+
success = check_license(relevant_files)
|
| 529 |
+
return bool_to_returncode(success)
|
| 530 |
+
|
| 531 |
+
if args.command == 'infra-tests':
|
| 532 |
+
success = run_tests(relevant_files,
|
| 533 |
+
parallel=args.parallel,
|
| 534 |
+
build_tests=(not args.skip_build_tests),
|
| 535 |
+
nonbuild_tests=(not args.skip_nonbuild_tests))
|
| 536 |
+
return bool_to_returncode(success)
|
| 537 |
+
|
| 538 |
+
if args.command == 'systemsan-tests':
|
| 539 |
+
success = run_systemsan_tests(relevant_files)
|
| 540 |
+
return bool_to_returncode(success)
|
| 541 |
+
|
| 542 |
+
# Do all the checks (but no tests).
|
| 543 |
+
success = do_checks(relevant_files)
|
| 544 |
+
|
| 545 |
+
return bool_to_returncode(success)
|
| 546 |
+
|
| 547 |
+
|
| 548 |
+
if __name__ == '__main__':
|
| 549 |
+
sys.exit(main())
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/pytest.ini
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[pytest]
|
| 2 |
+
python_files = *_test.py
|
| 3 |
+
log_cli = true
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/repo_manager.py
ADDED
|
@@ -0,0 +1,272 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Class to manage a git repository via python.
|
| 15 |
+
|
| 16 |
+
This class is to be used to implement git commands over
|
| 17 |
+
a python API and manage the current state of the git repo.
|
| 18 |
+
|
| 19 |
+
Typical usage example:
|
| 20 |
+
|
| 21 |
+
r_man = RepoManager('https://github.com/google/oss-fuzz.git')
|
| 22 |
+
r_man.checkout('5668cc422c2c92d38a370545d3591039fb5bb8d4')
|
| 23 |
+
"""
|
| 24 |
+
import datetime
|
| 25 |
+
import logging
|
| 26 |
+
import os
|
| 27 |
+
import shutil
|
| 28 |
+
|
| 29 |
+
import urllib.parse
|
| 30 |
+
|
| 31 |
+
import utils
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class RepoManager:
|
| 35 |
+
"""Repo manager."""
|
| 36 |
+
|
| 37 |
+
def __init__(self, repo_dir):
|
| 38 |
+
self.repo_dir = repo_dir
|
| 39 |
+
|
| 40 |
+
def _is_git_repo(self):
|
| 41 |
+
"""Test if the current repo dir is a git repo or not.
|
| 42 |
+
|
| 43 |
+
Returns:
|
| 44 |
+
True if the current repo_dir is a valid git repo.
|
| 45 |
+
"""
|
| 46 |
+
git_path = os.path.join(self.repo_dir, '.git')
|
| 47 |
+
return os.path.isdir(git_path)
|
| 48 |
+
|
| 49 |
+
def git(self, cmd, check_result=False):
|
| 50 |
+
"""Run a git command.
|
| 51 |
+
|
| 52 |
+
Args:
|
| 53 |
+
command: The git command as a list to be run.
|
| 54 |
+
check_result: Should an exception be thrown on failed command.
|
| 55 |
+
|
| 56 |
+
Returns:
|
| 57 |
+
stdout, stderr, error code.
|
| 58 |
+
"""
|
| 59 |
+
return utils.execute(['git'] + cmd,
|
| 60 |
+
location=self.repo_dir,
|
| 61 |
+
check_result=check_result)
|
| 62 |
+
|
| 63 |
+
def commit_exists(self, commit):
|
| 64 |
+
"""Checks to see if a commit exists in the project repo.
|
| 65 |
+
|
| 66 |
+
Args:
|
| 67 |
+
commit: The commit SHA you are checking.
|
| 68 |
+
|
| 69 |
+
Returns:
|
| 70 |
+
True if the commit exits in the project.
|
| 71 |
+
"""
|
| 72 |
+
if not commit.rstrip():
|
| 73 |
+
return False
|
| 74 |
+
|
| 75 |
+
_, _, err_code = self.git(['cat-file', '-e', commit])
|
| 76 |
+
return not err_code
|
| 77 |
+
|
| 78 |
+
def commit_date(self, commit):
|
| 79 |
+
"""Get the date of a commit.
|
| 80 |
+
|
| 81 |
+
Args:
|
| 82 |
+
commit: The commit hash.
|
| 83 |
+
|
| 84 |
+
Returns:
|
| 85 |
+
A datetime representing the date of the commit.
|
| 86 |
+
"""
|
| 87 |
+
out, _, _ = self.git(['show', '-s', '--format=%ct', commit],
|
| 88 |
+
check_result=True)
|
| 89 |
+
return datetime.datetime.fromtimestamp(int(out), tz=datetime.timezone.utc)
|
| 90 |
+
|
| 91 |
+
def get_git_diff(self, base='origin...'):
|
| 92 |
+
"""Gets a list of files that have changed from the repo head.
|
| 93 |
+
|
| 94 |
+
Returns:
|
| 95 |
+
A list of changed file paths or None on Error.
|
| 96 |
+
"""
|
| 97 |
+
self.fetch_unshallow()
|
| 98 |
+
# Add '--' so that git knows we aren't talking about files.
|
| 99 |
+
command = ['diff', '--name-only', base, '--']
|
| 100 |
+
out, err_msg, err_code = self.git(command)
|
| 101 |
+
if err_code:
|
| 102 |
+
logging.error('Git diff failed with error message %s.', err_msg)
|
| 103 |
+
return None
|
| 104 |
+
if not out:
|
| 105 |
+
logging.error('No diff was found.')
|
| 106 |
+
return None
|
| 107 |
+
return [line for line in out.splitlines() if line]
|
| 108 |
+
|
| 109 |
+
def get_current_commit(self):
|
| 110 |
+
"""Gets the current commit SHA of the repo.
|
| 111 |
+
|
| 112 |
+
Returns:
|
| 113 |
+
The current active commit SHA.
|
| 114 |
+
"""
|
| 115 |
+
out, _, _ = self.git(['rev-parse', 'HEAD'], check_result=True)
|
| 116 |
+
return out.strip()
|
| 117 |
+
|
| 118 |
+
def get_parent(self, commit, count):
|
| 119 |
+
"""Gets the count'th parent of the given commit.
|
| 120 |
+
|
| 121 |
+
Returns:
|
| 122 |
+
The parent commit SHA.
|
| 123 |
+
"""
|
| 124 |
+
self.fetch_unshallow()
|
| 125 |
+
out, _, err_code = self.git(['rev-parse', commit + '~' + str(count)],
|
| 126 |
+
check_result=False)
|
| 127 |
+
if err_code:
|
| 128 |
+
return None
|
| 129 |
+
|
| 130 |
+
return out.strip()
|
| 131 |
+
|
| 132 |
+
def fetch_all_remotes(self):
|
| 133 |
+
"""Fetch all remotes for checkouts that track a single branch."""
|
| 134 |
+
self.git([
|
| 135 |
+
'config', 'remote.origin.fetch', '+refs/heads/*:refs/remotes/origin/*'
|
| 136 |
+
],
|
| 137 |
+
check_result=True)
|
| 138 |
+
self.git(['remote', 'update'], check_result=True)
|
| 139 |
+
|
| 140 |
+
def get_commit_list(self, newest_commit, oldest_commit=None, limit=None):
|
| 141 |
+
"""Gets the list of commits(inclusive) between the old and new commits.
|
| 142 |
+
|
| 143 |
+
Args:
|
| 144 |
+
newest_commit: The newest commit to be in the list.
|
| 145 |
+
oldest_commit: The (optional) oldest commit to be in the list.
|
| 146 |
+
|
| 147 |
+
Returns:
|
| 148 |
+
The list of commit SHAs from newest to oldest.
|
| 149 |
+
|
| 150 |
+
Raises:
|
| 151 |
+
ValueError: When either the oldest or newest commit does not exist.
|
| 152 |
+
RuntimeError: When there is an error getting the commit list.
|
| 153 |
+
"""
|
| 154 |
+
self.fetch_unshallow()
|
| 155 |
+
if oldest_commit and not self.commit_exists(oldest_commit):
|
| 156 |
+
raise ValueError('The oldest commit %s does not exist' % oldest_commit)
|
| 157 |
+
if not self.commit_exists(newest_commit):
|
| 158 |
+
raise ValueError('The newest commit %s does not exist' % newest_commit)
|
| 159 |
+
if oldest_commit == newest_commit:
|
| 160 |
+
return [oldest_commit]
|
| 161 |
+
|
| 162 |
+
if oldest_commit:
|
| 163 |
+
commit_range = oldest_commit + '..' + newest_commit
|
| 164 |
+
else:
|
| 165 |
+
commit_range = newest_commit
|
| 166 |
+
|
| 167 |
+
limit_args = []
|
| 168 |
+
if limit:
|
| 169 |
+
limit_args.append(f'--max-count={limit}')
|
| 170 |
+
|
| 171 |
+
out, _, err_code = self.git(['rev-list', commit_range] + limit_args)
|
| 172 |
+
commits = out.split('\n')
|
| 173 |
+
commits = [commit for commit in commits if commit]
|
| 174 |
+
if err_code or not commits:
|
| 175 |
+
raise RuntimeError('Error getting commit list between %s and %s ' %
|
| 176 |
+
(oldest_commit, newest_commit))
|
| 177 |
+
|
| 178 |
+
# Make sure result is inclusive
|
| 179 |
+
if oldest_commit:
|
| 180 |
+
commits.append(oldest_commit)
|
| 181 |
+
return commits
|
| 182 |
+
|
| 183 |
+
def fetch_branch(self, branch):
|
| 184 |
+
"""Fetches a remote branch from origin."""
|
| 185 |
+
return self.git(
|
| 186 |
+
['fetch', 'origin', '{branch}:{branch}'.format(branch=branch)])
|
| 187 |
+
|
| 188 |
+
def fetch_unshallow(self):
|
| 189 |
+
"""Gets the current git repository history."""
|
| 190 |
+
shallow_file = os.path.join(self.repo_dir, '.git', 'shallow')
|
| 191 |
+
if os.path.exists(shallow_file):
|
| 192 |
+
_, err, err_code = self.git(['fetch', '--unshallow'], check_result=False)
|
| 193 |
+
if err_code:
|
| 194 |
+
logging.error('Unshallow returned non-zero code: %s', err)
|
| 195 |
+
|
| 196 |
+
def checkout_pr(self, pr_ref):
|
| 197 |
+
"""Checks out a remote pull request.
|
| 198 |
+
|
| 199 |
+
Args:
|
| 200 |
+
pr_ref: The pull request reference to be checked out.
|
| 201 |
+
"""
|
| 202 |
+
self.fetch_unshallow()
|
| 203 |
+
self.git(['fetch', 'origin', pr_ref], check_result=True)
|
| 204 |
+
self.git(['checkout', '-f', 'FETCH_HEAD'], check_result=True)
|
| 205 |
+
self.git(['submodule', 'update', '-f', '--init', '--recursive'],
|
| 206 |
+
check_result=True)
|
| 207 |
+
|
| 208 |
+
def checkout_commit(self, commit, clean=True):
|
| 209 |
+
"""Checks out a specific commit from the repo.
|
| 210 |
+
|
| 211 |
+
Args:
|
| 212 |
+
commit: The commit SHA to be checked out.
|
| 213 |
+
|
| 214 |
+
Raises:
|
| 215 |
+
RuntimeError: when checkout is not successful.
|
| 216 |
+
ValueError: when commit does not exist.
|
| 217 |
+
"""
|
| 218 |
+
self.fetch_unshallow()
|
| 219 |
+
if not self.commit_exists(commit):
|
| 220 |
+
raise ValueError('Commit %s does not exist in current branch' % commit)
|
| 221 |
+
self.git(['checkout', '-f', commit], check_result=True)
|
| 222 |
+
self.git(['submodule', 'update', '-f', '--init', '--recursive'],
|
| 223 |
+
check_result=True)
|
| 224 |
+
if clean:
|
| 225 |
+
self.git(['clean', '-fxd'], check_result=True)
|
| 226 |
+
if self.get_current_commit() != commit:
|
| 227 |
+
raise RuntimeError('Error checking out commit %s' % commit)
|
| 228 |
+
|
| 229 |
+
def remove_repo(self):
|
| 230 |
+
"""Removes the git repo from disk."""
|
| 231 |
+
if os.path.isdir(self.repo_dir):
|
| 232 |
+
shutil.rmtree(self.repo_dir)
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
def clone_repo_and_get_manager(repo_url,
|
| 236 |
+
base_dir,
|
| 237 |
+
repo_name=None,
|
| 238 |
+
username=None,
|
| 239 |
+
password=None):
|
| 240 |
+
"""Clones a repo and constructs a repo manager class.
|
| 241 |
+
|
| 242 |
+
Args:
|
| 243 |
+
repo_url: The github url needed to clone.
|
| 244 |
+
base_dir: The full file-path where the git repo is located.
|
| 245 |
+
repo_name: The name of the directory the repo is cloned to.
|
| 246 |
+
"""
|
| 247 |
+
if repo_name is None:
|
| 248 |
+
repo_name = os.path.basename(repo_url).replace('.git', '')
|
| 249 |
+
repo_dir = os.path.join(base_dir, repo_name)
|
| 250 |
+
manager = RepoManager(repo_dir)
|
| 251 |
+
|
| 252 |
+
if not os.path.exists(repo_dir):
|
| 253 |
+
_clone(repo_url, base_dir, repo_name, username=username, password=password)
|
| 254 |
+
|
| 255 |
+
return manager
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
def _clone(repo_url, base_dir, repo_name, username=None, password=None):
|
| 259 |
+
"""Creates a clone of the repo in the specified directory.
|
| 260 |
+
|
| 261 |
+
Raises:
|
| 262 |
+
ValueError: when the repo is not able to be cloned.
|
| 263 |
+
"""
|
| 264 |
+
if username and password:
|
| 265 |
+
parsed_url = urllib.parse.urlparse(repo_url)
|
| 266 |
+
new_netloc = f'{username}:{password}@{parsed_url.netloc}'
|
| 267 |
+
repo_url = urllib.parse.urlunparse(parsed_url._replace(netloc=new_netloc))
|
| 268 |
+
|
| 269 |
+
utils.execute(['git', 'clone', repo_url, repo_name],
|
| 270 |
+
location=base_dir,
|
| 271 |
+
check_result=True,
|
| 272 |
+
log_command=not password)
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/repo_manager_test.py
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Test the functionality of the RepoManager class."""
|
| 15 |
+
|
| 16 |
+
import contextlib
|
| 17 |
+
import os
|
| 18 |
+
import tempfile
|
| 19 |
+
import unittest
|
| 20 |
+
from unittest import mock
|
| 21 |
+
|
| 22 |
+
import repo_manager
|
| 23 |
+
import utils
|
| 24 |
+
|
| 25 |
+
# pylint: disable=protected-access
|
| 26 |
+
|
| 27 |
+
OSS_FUZZ_REPO_URL = 'https://github.com/google/oss-fuzz'
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
@contextlib.contextmanager
|
| 31 |
+
def get_oss_fuzz_repo():
|
| 32 |
+
"""Clones a temporary copy of the OSS-Fuzz repo. Returns the path to the
|
| 33 |
+
repo."""
|
| 34 |
+
repo_name = 'oss-fuzz'
|
| 35 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 36 |
+
repo_manager._clone(OSS_FUZZ_REPO_URL, tmp_dir, repo_name)
|
| 37 |
+
yield os.path.join(tmp_dir, repo_name)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class CloneTest(unittest.TestCase):
|
| 41 |
+
"""Tests the _clone function."""
|
| 42 |
+
|
| 43 |
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
|
| 44 |
+
'INTEGRATION_TESTS=1 not set')
|
| 45 |
+
def test_clone_valid_repo_integration(self):
|
| 46 |
+
"""Integration test that tests the correct location of the git repo."""
|
| 47 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 48 |
+
git_path = os.path.join(oss_fuzz_repo, '.git')
|
| 49 |
+
self.assertTrue(os.path.isdir(git_path))
|
| 50 |
+
|
| 51 |
+
def test_clone_invalid_repo(self):
|
| 52 |
+
"""Tests that cloning an invalid repo will fail."""
|
| 53 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 54 |
+
with self.assertRaises(RuntimeError):
|
| 55 |
+
repo_manager._clone('https://github.com/oss-fuzz-not-real.git', tmp_dir,
|
| 56 |
+
'oss-fuzz')
|
| 57 |
+
|
| 58 |
+
@mock.patch('utils.execute')
|
| 59 |
+
def test_clone_with_username(self, mock_execute): # pylint: disable=no-self-use
|
| 60 |
+
"""Test clone with username."""
|
| 61 |
+
repo_manager._clone('https://github.com/fake/repo.git',
|
| 62 |
+
'/',
|
| 63 |
+
'name',
|
| 64 |
+
username='user',
|
| 65 |
+
password='password')
|
| 66 |
+
mock_execute.assert_called_once_with([
|
| 67 |
+
'git', 'clone', 'https://user:password@github.com/fake/repo.git', 'name'
|
| 68 |
+
],
|
| 69 |
+
location='/',
|
| 70 |
+
check_result=True,
|
| 71 |
+
log_command=False)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
|
| 75 |
+
'INTEGRATION_TESTS=1 not set')
|
| 76 |
+
class RepoManagerCheckoutTest(unittest.TestCase):
|
| 77 |
+
"""Tests the checkout functionality of RepoManager."""
|
| 78 |
+
|
| 79 |
+
def test_checkout_valid_commit(self):
|
| 80 |
+
"""Tests that the git checkout command works."""
|
| 81 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 82 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 83 |
+
commit_to_test = '04ea24ee15bbe46a19e5da6c5f022a2ffdfbdb3b'
|
| 84 |
+
repo_man.checkout_commit(commit_to_test)
|
| 85 |
+
self.assertEqual(commit_to_test, repo_man.get_current_commit())
|
| 86 |
+
|
| 87 |
+
def test_checkout_invalid_commit(self):
|
| 88 |
+
"""Tests that the git checkout invalid commit fails."""
|
| 89 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 90 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 91 |
+
with self.assertRaises(ValueError):
|
| 92 |
+
repo_man.checkout_commit(' ')
|
| 93 |
+
with self.assertRaises(ValueError):
|
| 94 |
+
repo_man.checkout_commit('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
|
| 95 |
+
with self.assertRaises(ValueError):
|
| 96 |
+
repo_man.checkout_commit('not-a-valid-commit')
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
|
| 100 |
+
'INTEGRATION_TESTS=1 not set')
|
| 101 |
+
class RepoManagerGetCommitListTest(unittest.TestCase):
|
| 102 |
+
"""Tests the get_commit_list method of RepoManager."""
|
| 103 |
+
|
| 104 |
+
def test_get_valid_commit_list(self):
|
| 105 |
+
"""Tests an accurate commit list can be retrieved from the repo manager."""
|
| 106 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 107 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 108 |
+
old_commit = '04ea24ee15bbe46a19e5da6c5f022a2ffdfbdb3b'
|
| 109 |
+
new_commit = 'fa662173bfeb3ba08d2e84cefc363be11e6c8463'
|
| 110 |
+
commit_list = [
|
| 111 |
+
'fa662173bfeb3ba08d2e84cefc363be11e6c8463',
|
| 112 |
+
'17035317a44fa89d22fe6846d868d4bf57def78b',
|
| 113 |
+
'97dee00a3c4ce95071c3e061592f5fd577dea886',
|
| 114 |
+
'04ea24ee15bbe46a19e5da6c5f022a2ffdfbdb3b'
|
| 115 |
+
]
|
| 116 |
+
result_list = repo_man.get_commit_list(new_commit, old_commit)
|
| 117 |
+
self.assertListEqual(commit_list, result_list)
|
| 118 |
+
|
| 119 |
+
def test_get_invalid_commit_list(self):
|
| 120 |
+
"""Tests that the proper errors are thrown when invalid commits are
|
| 121 |
+
passed to get_commit_list."""
|
| 122 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 123 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 124 |
+
old_commit = '04ea24ee15bbe46a19e5da6c5f022a2ffdfbdb3b'
|
| 125 |
+
new_commit = 'fa662173bfeb3ba08d2e84cefc363be11e6c8463'
|
| 126 |
+
with self.assertRaises(ValueError):
|
| 127 |
+
repo_man.get_commit_list('fakecommit', new_commit)
|
| 128 |
+
with self.assertRaises(ValueError):
|
| 129 |
+
repo_man.get_commit_list(new_commit, 'fakecommit')
|
| 130 |
+
with self.assertRaises(RuntimeError):
|
| 131 |
+
repo_man.get_commit_list(old_commit, new_commit) # pylint: disable=arguments-out-of-order
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
|
| 135 |
+
'INTEGRATION_TESTS=1 not set')
|
| 136 |
+
class GitDiffTest(unittest.TestCase):
|
| 137 |
+
"""Tests get_git_diff."""
|
| 138 |
+
|
| 139 |
+
def test_diff_exists(self):
|
| 140 |
+
"""Tests that a real diff is returned when a valid repo manager exists."""
|
| 141 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 142 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 143 |
+
with mock.patch.object(utils,
|
| 144 |
+
'execute',
|
| 145 |
+
return_value=('test.py\ndiff.py', None, 0)):
|
| 146 |
+
diff = repo_man.get_git_diff()
|
| 147 |
+
self.assertCountEqual(diff, ['test.py', 'diff.py'])
|
| 148 |
+
|
| 149 |
+
def test_diff_empty(self):
|
| 150 |
+
"""Tests that None is returned when there is no difference between repos."""
|
| 151 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 152 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 153 |
+
with mock.patch.object(utils, 'execute', return_value=('', None, 0)):
|
| 154 |
+
diff = repo_man.get_git_diff()
|
| 155 |
+
self.assertIsNone(diff)
|
| 156 |
+
|
| 157 |
+
def test_error_on_command(self):
|
| 158 |
+
"""Tests that None is returned when the command errors out."""
|
| 159 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 160 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 161 |
+
with mock.patch.object(utils,
|
| 162 |
+
'execute',
|
| 163 |
+
return_value=('', 'Test error.', 1)):
|
| 164 |
+
diff = repo_man.get_git_diff()
|
| 165 |
+
self.assertIsNone(diff)
|
| 166 |
+
|
| 167 |
+
def test_diff_no_change(self):
|
| 168 |
+
"""Tests that None is returned when there is no difference between repos."""
|
| 169 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 170 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 171 |
+
diff = repo_man.get_git_diff()
|
| 172 |
+
self.assertIsNone(diff)
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
|
| 176 |
+
'INTEGRATION_TESTS=1 not set')
|
| 177 |
+
class CheckoutPrIntegrationTest(unittest.TestCase):
|
| 178 |
+
"""Does Integration tests on the checkout_pr method of RepoManager."""
|
| 179 |
+
|
| 180 |
+
def test_pull_request_exists(self):
|
| 181 |
+
"""Tests that a diff is returned when a valid PR is checked out."""
|
| 182 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 183 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 184 |
+
repo_man.checkout_pr('refs/pull/3415/merge')
|
| 185 |
+
diff = repo_man.get_git_diff()
|
| 186 |
+
self.assertCountEqual(diff, ['README.md'])
|
| 187 |
+
|
| 188 |
+
def test_checkout_invalid_pull_request(self):
|
| 189 |
+
"""Tests that the git checkout invalid pull request fails."""
|
| 190 |
+
with get_oss_fuzz_repo() as oss_fuzz_repo:
|
| 191 |
+
repo_man = repo_manager.RepoManager(oss_fuzz_repo)
|
| 192 |
+
with self.assertRaises(RuntimeError):
|
| 193 |
+
repo_man.checkout_pr(' ')
|
| 194 |
+
with self.assertRaises(RuntimeError):
|
| 195 |
+
repo_man.checkout_pr('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
|
| 196 |
+
with self.assertRaises(RuntimeError):
|
| 197 |
+
repo_man.checkout_pr('not/a/valid/pr')
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
if __name__ == '__main__':
|
| 201 |
+
unittest.main()
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/run_fuzzers.Dockerfile
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
# Docker image for running fuzzers on CIFuzz (the run_fuzzers action on GitHub
|
| 17 |
+
# actions).
|
| 18 |
+
|
| 19 |
+
FROM ghcr.io/aixcc-finals/cifuzz-base
|
| 20 |
+
|
| 21 |
+
# Python file to execute when the docker container starts up.
|
| 22 |
+
# We can't use the env var $OSS_FUZZ_ROOT here. Since it's a constant env var,
|
| 23 |
+
# just expand to '/opt/oss-fuzz'.
|
| 24 |
+
ENTRYPOINT ["python3", "/opt/oss-fuzz/infra/cifuzz/run_fuzzers_entrypoint.py"]
|
| 25 |
+
|
| 26 |
+
WORKDIR ${OSS_FUZZ_ROOT}/infra
|
| 27 |
+
|
| 28 |
+
# Copy infra source code.
|
| 29 |
+
ADD . ${OSS_FUZZ_ROOT}/infra
|
| 30 |
+
|
| 31 |
+
RUN python3 -m pip install -r ${OSS_FUZZ_ROOT}/infra/cifuzz/requirements.txt
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/templates.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2016 Google Inc.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
"""Templates for OSS-Fuzz project files."""
|
| 17 |
+
|
| 18 |
+
PROJECT_YAML_TEMPLATE = """\
|
| 19 |
+
homepage: "<your_project_homepage>"
|
| 20 |
+
language: %(language)s
|
| 21 |
+
primary_contact: "<primary_contact_email>"
|
| 22 |
+
main_repo: "https://path/to/main/repo.git"
|
| 23 |
+
file_github_issue: true
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
DOCKER_TEMPLATE = """\
|
| 27 |
+
# Copyright %(year)d Google LLC
|
| 28 |
+
#
|
| 29 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 30 |
+
# you may not use this file except in compliance with the License.
|
| 31 |
+
# You may obtain a copy of the License at
|
| 32 |
+
#
|
| 33 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 34 |
+
#
|
| 35 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 36 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 37 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 38 |
+
# See the License for the specific language governing permissions and
|
| 39 |
+
# limitations under the License.
|
| 40 |
+
#
|
| 41 |
+
################################################################################
|
| 42 |
+
|
| 43 |
+
FROM ghcr.io/aixcc-finals/%(base_builder)s
|
| 44 |
+
RUN apt-get update && apt-get install -y make autoconf automake libtool
|
| 45 |
+
RUN git clone --depth 1 <git_url> %(project_name)s # or use other version control
|
| 46 |
+
WORKDIR %(project_name)s
|
| 47 |
+
COPY build.sh $SRC/
|
| 48 |
+
"""
|
| 49 |
+
|
| 50 |
+
EXTERNAL_DOCKER_TEMPLATE = """\
|
| 51 |
+
FROM ghcr.io/aixcc-finals/%(base_builder)s:v1
|
| 52 |
+
RUN apt-get update && apt-get install -y make autoconf automake libtool
|
| 53 |
+
COPY . $SRC/%(project_name)s
|
| 54 |
+
WORKDIR %(project_name)s
|
| 55 |
+
COPY .clusterfuzzlite/build.sh $SRC/
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
BUILD_TEMPLATE = """\
|
| 59 |
+
#!/bin/bash -eu
|
| 60 |
+
# Copyright %(year)d Google LLC
|
| 61 |
+
#
|
| 62 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 63 |
+
# you may not use this file except in compliance with the License.
|
| 64 |
+
# You may obtain a copy of the License at
|
| 65 |
+
#
|
| 66 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 67 |
+
#
|
| 68 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 69 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 70 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 71 |
+
# See the License for the specific language governing permissions and
|
| 72 |
+
# limitations under the License.
|
| 73 |
+
#
|
| 74 |
+
################################################################################
|
| 75 |
+
|
| 76 |
+
# build project
|
| 77 |
+
# e.g.
|
| 78 |
+
# ./autogen.sh
|
| 79 |
+
# ./configure
|
| 80 |
+
# make -j$(nproc) all
|
| 81 |
+
|
| 82 |
+
# build fuzzers
|
| 83 |
+
# e.g.
|
| 84 |
+
# $CXX $CXXFLAGS -std=c++11 -Iinclude \\
|
| 85 |
+
# /path/to/name_of_fuzzer.cc -o $OUT/name_of_fuzzer \\
|
| 86 |
+
# $LIB_FUZZING_ENGINE /path/to/library.a
|
| 87 |
+
"""
|
| 88 |
+
|
| 89 |
+
EXTERNAL_BUILD_TEMPLATE = """\
|
| 90 |
+
#!/bin/bash -eu
|
| 91 |
+
|
| 92 |
+
# build project
|
| 93 |
+
# e.g.
|
| 94 |
+
# ./autogen.sh
|
| 95 |
+
# ./configure
|
| 96 |
+
# make -j$(nproc) all
|
| 97 |
+
|
| 98 |
+
# build fuzzers
|
| 99 |
+
# e.g.
|
| 100 |
+
# $CXX $CXXFLAGS -std=c++11 -Iinclude \\
|
| 101 |
+
# /path/to/name_of_fuzzer.cc -o $OUT/name_of_fuzzer \\
|
| 102 |
+
# $LIB_FUZZING_ENGINE /path/to/library.a
|
| 103 |
+
"""
|
| 104 |
+
|
| 105 |
+
EXTERNAL_PROJECT_YAML_TEMPLATE = """\
|
| 106 |
+
language: %(language)s
|
| 107 |
+
"""
|
| 108 |
+
|
| 109 |
+
TEMPLATES = {
|
| 110 |
+
'build.sh': BUILD_TEMPLATE,
|
| 111 |
+
'Dockerfile': DOCKER_TEMPLATE,
|
| 112 |
+
'project.yaml': PROJECT_YAML_TEMPLATE
|
| 113 |
+
}
|
| 114 |
+
|
| 115 |
+
EXTERNAL_TEMPLATES = {
|
| 116 |
+
'build.sh': EXTERNAL_BUILD_TEMPLATE,
|
| 117 |
+
'Dockerfile': EXTERNAL_DOCKER_TEMPLATE,
|
| 118 |
+
'project.yaml': EXTERNAL_PROJECT_YAML_TEMPLATE
|
| 119 |
+
}
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/test
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/test_repos.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""This module contains a list of test repository's used in unit/integration
|
| 15 |
+
tests.
|
| 16 |
+
|
| 17 |
+
Note: If you notice tests failing for unexpected reasons, make sure the data
|
| 18 |
+
in the test repos are correct. This is because the test repos are dynamic and
|
| 19 |
+
may change.
|
| 20 |
+
|
| 21 |
+
Note: This should be removed when a better method of testing is established.
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
import collections
|
| 25 |
+
import os
|
| 26 |
+
|
| 27 |
+
ExampleRepo = collections.namedtuple('ExampleRepo', [
|
| 28 |
+
'project_name', 'oss_repo_name', 'git_repo_name', 'image_location',
|
| 29 |
+
'git_url', 'new_commit', 'old_commit', 'intro_commit', 'fuzz_target',
|
| 30 |
+
'testcase_path'
|
| 31 |
+
])
|
| 32 |
+
|
| 33 |
+
TEST_DIR_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
| 34 |
+
'testcases')
|
| 35 |
+
|
| 36 |
+
# WARNING: Tests are dependent upon the following repos existing and the
|
| 37 |
+
# specified commits existing.
|
| 38 |
+
# TODO(metzman): Fix this problem.
|
| 39 |
+
# TODO(metzman): The testcases got deleted here because the test that used them
|
| 40 |
+
# was skipped. Probably worth deleting the test.
|
| 41 |
+
TEST_REPOS = [
|
| 42 |
+
ExampleRepo(project_name='curl',
|
| 43 |
+
oss_repo_name='curl',
|
| 44 |
+
git_repo_name='curl',
|
| 45 |
+
image_location='/src',
|
| 46 |
+
git_url='https://github.com/curl/curl.git',
|
| 47 |
+
old_commit='df26f5f9c36e19cd503c0e462e9f72ad37b84c82',
|
| 48 |
+
new_commit='dda418266c99ceab368d723facb52069cbb9c8d5',
|
| 49 |
+
intro_commit='df26f5f9c36e19cd503c0e462e9f72ad37b84c82',
|
| 50 |
+
fuzz_target='curl_fuzzer_ftp',
|
| 51 |
+
testcase_path=os.path.join(TEST_DIR_PATH, 'curl_test_data')),
|
| 52 |
+
ExampleRepo(project_name='libarchive',
|
| 53 |
+
oss_repo_name='libarchive',
|
| 54 |
+
git_repo_name='libarchive',
|
| 55 |
+
image_location='/src',
|
| 56 |
+
git_url='https://github.com/libarchive/libarchive.git',
|
| 57 |
+
old_commit='5bd2a9b6658a3a6efa20bb9ad75bd39a44d71da6',
|
| 58 |
+
new_commit='458e49358f17ec58d65ab1c45cf299baaf3c98d1',
|
| 59 |
+
intro_commit='840266712006de5e737f8052db920dfea2be4260',
|
| 60 |
+
fuzz_target='libarchive_fuzzer',
|
| 61 |
+
testcase_path=os.path.join(TEST_DIR_PATH,
|
| 62 |
+
'libarchive_test_data')),
|
| 63 |
+
ExampleRepo(project_name='gonids',
|
| 64 |
+
oss_repo_name='gonids',
|
| 65 |
+
git_repo_name='gonids',
|
| 66 |
+
image_location='/root/go/src/github.com/google/',
|
| 67 |
+
git_url='https://github.com/google/gonids',
|
| 68 |
+
old_commit='',
|
| 69 |
+
new_commit='',
|
| 70 |
+
intro_commit='',
|
| 71 |
+
fuzz_target='',
|
| 72 |
+
testcase_path='')
|
| 73 |
+
]
|
| 74 |
+
|
| 75 |
+
INVALID_REPO = ExampleRepo(project_name='notaproj',
|
| 76 |
+
oss_repo_name='notarepo',
|
| 77 |
+
git_repo_name='notarepo',
|
| 78 |
+
git_url='invalid.git',
|
| 79 |
+
image_location='/src',
|
| 80 |
+
old_commit='aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
|
| 81 |
+
new_commit='aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
|
| 82 |
+
intro_commit='aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
|
| 83 |
+
fuzz_target='NONEFUZZER',
|
| 84 |
+
testcase_path='not/a/path')
|