Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +7 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/7z-empty-mhc-off.7z +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/7z-hello-mhc-off-lzma2.7z +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-114.tar +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-131.bz2 +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-256.7z +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-279-fail.tar +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-335.tar +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-348.7z +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-358.uncompressed +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-417.tar +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_winrar/split_zip_created_by_winrar.zip +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_winrar/zip_to_compare_created_by_winrar.zip +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z01 +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z02 +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.zip +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip_zip64.z01 +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip_zip64.z02 +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/zip_to_compare_created_by_zip_zip64.zip +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-492.7z +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-542-endheadercorrupted2.7z +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-548.zip +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-569-fail.tar +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-592.7z +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/OSX_ArchiveWithNestedArchive.zip +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/archive_with_trailer.tar +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla-stored-dd-contradicts-actualsize.zip +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla-stored-dd-nosig.zip +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla-stored-dd-sizes-differ.zip +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.dump.lz4 +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.tar +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.tar.br +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.tar.bz2 +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.tar.deflatez +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.tar.lz4 +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.xml.bz2 +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bzip2-zip.zip +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/directory.tar +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/emptyDir.tar +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/multiple.bz2 +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/pack200/jars/ant.jar +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/posix01_sparse.tar +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/testNumbersNew.numbers +3 -0
- local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/utf8-winzip-test.zip +3 -0
- local-test-commons-compress-full-01-vuln_1/fuzz-tooling/docs/ideal_integration.md +1 -0
- local-test-commons-compress-full-01-vuln_1/fuzz-tooling/docs/index.md +88 -0
- local-test-commons-compress-full-01-vuln_1/fuzz-tooling/infra/manifest.py +61 -0
- local-test-commons-compress-full-01-vuln_1/fuzz-tooling/infra/pr_helper.py +300 -0
- local-test-commons-compress-full-01-vuln_1/fuzz-tooling/infra/presubmit.py +549 -0
- local-test-commons-compress-full-01-vuln_1/fuzz-tooling/infra/retry.py +106 -0
.gitattributes
CHANGED
|
@@ -107,3 +107,10 @@ local-test-tika-delta-05/afc-tika/tika-parsers/tika-parsers-extended/tika-parser
|
|
| 107 |
local-test-tika-delta-05/fuzz-tooling/infra/cifuzz/test_data/timeout_fuzzer filter=lfs diff=lfs merge=lfs -text
|
| 108 |
local-test-tika-delta-05/afc-tika/tika-parsers/tika-parsers-ml/tika-parser-nlp-module/src/test/resources/test-documents/testJournalParser.pdf filter=lfs diff=lfs merge=lfs -text
|
| 109 |
local-test-tika-delta-05/afc-tika/tika-parsers/tika-parsers-extended/tika-parser-scientific-module/src/test/resources/test-documents/test.hdf filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 107 |
local-test-tika-delta-05/fuzz-tooling/infra/cifuzz/test_data/timeout_fuzzer filter=lfs diff=lfs merge=lfs -text
|
| 108 |
local-test-tika-delta-05/afc-tika/tika-parsers/tika-parsers-ml/tika-parser-nlp-module/src/test/resources/test-documents/testJournalParser.pdf filter=lfs diff=lfs merge=lfs -text
|
| 109 |
local-test-tika-delta-05/afc-tika/tika-parsers/tika-parsers-extended/tika-parser-scientific-module/src/test/resources/test-documents/test.hdf filter=lfs diff=lfs merge=lfs -text
|
| 110 |
+
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-358.uncompressed filter=lfs diff=lfs merge=lfs -text
|
| 111 |
+
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/testNumbersNew.numbers filter=lfs diff=lfs merge=lfs -text
|
| 112 |
+
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip_zip64.z02 filter=lfs diff=lfs merge=lfs -text
|
| 113 |
+
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/pack200/jars/ant.jar filter=lfs diff=lfs merge=lfs -text
|
| 114 |
+
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip_zip64.z01 filter=lfs diff=lfs merge=lfs -text
|
| 115 |
+
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z01 filter=lfs diff=lfs merge=lfs -text
|
| 116 |
+
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z02 filter=lfs diff=lfs merge=lfs -text
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/7z-empty-mhc-off.7z
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0450dc9369e4e5e42488d0dc1edce863cf013c5aedcdf231338f270cd314c241
|
| 3 |
+
size 78
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/7z-hello-mhc-off-lzma2.7z
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0052f7f4e201f9eab215679cb1e2f2e0b7b0040d826e869302e117eaa5a439fd
|
| 3 |
+
size 138
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-114.tar
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:575763c5065f5dc79cf7bc83911668a968c4fc4a1cf7df7038dc16cb90ea7721
|
| 3 |
+
size 10240
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-131.bz2
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a89ee3425574c2fc585e4d1c87b9e40c6d68ecf51377a1f21d25f0a25e69cdc5
|
| 3 |
+
size 340
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-256.7z
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2684812614b043d3baffb7a2941fe9bef4e585b7060441f24914eaafb98a7945
|
| 3 |
+
size 4069842
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-279-fail.tar
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:be4f54cce9addfb76b8a772d94e40ea029e2131fa76b0d88075d4b5eda8aba24
|
| 3 |
+
size 19000
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-335.tar
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:20f6f1612b7203dd5191d9484365df4f9366f0c632d4ddc4db199f27f6e3c7f3
|
| 3 |
+
size 3072
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-348.7z
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:eb6a43668474b6dd6922f225a409979f9b7bfe33a02c29cfb59900afcf403dee
|
| 3 |
+
size 213
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-358.uncompressed
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ee8e4aaa66bf8ae56d55b3a3ac0ad64ea02b7cfcf8746ca4bd56927ca884e31b
|
| 3 |
+
size 219521
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-417.tar
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:425ff8fd74131c6ddc2d5a98a0d4ba78baebbca2c3645ee8edbc152e629956b3
|
| 3 |
+
size 10240
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_winrar/split_zip_created_by_winrar.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ea2d067a99a38f10288b2eed4543337719a41b0e6b8585a87f8728e5e317410a
|
| 3 |
+
size 50536
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_winrar/zip_to_compare_created_by_winrar.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:33169cd55b80be1c1b6174d3bf8432b7c239bc5372e2c86e8b7bef95f88e6d81
|
| 3 |
+
size 574820
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z01
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:976bb2c4c8a9093be32824e0b2d112335581aba733d6103e65fcc63bc1560878
|
| 3 |
+
size 262144
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z02
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e6cdf0faf9016807160197d017cfd8a59815b5e55c3360f4c98f105ac609dc40
|
| 3 |
+
size 262144
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fb86d35f40656889434d6e69b1055d11dfb44d3d68555a74891f187e6dc3333c
|
| 3 |
+
size 57763
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip_zip64.z01
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:38a2673a82a76deb82bf1897a87b06938fa02119a8382eb80c500e6ca7a67a17
|
| 3 |
+
size 262144
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip_zip64.z02
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3e6812211e4f5fb3e636e4a700bf8dfd385bac26b36e989a2c97dacf1d25748e
|
| 3 |
+
size 262144
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-477/split_zip_created_by_zip/zip_to_compare_created_by_zip_zip64.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d963653f8c6e22a9e2ec276909aac4d98e00f6951f9b175e7848a135b0920e2f
|
| 3 |
+
size 584681
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-492.7z
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1f513e29c0c822c3a093c94207ea42cee5a0876a5205dbd0c4adffc05d416e92
|
| 3 |
+
size 39
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-542-endheadercorrupted2.7z
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c9c92894e9c43fef4eda2591571b978d02df9d1e2c948c5fc95a3ac353971dd4
|
| 3 |
+
size 233
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-548.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3bed8be4a2685d261801fc44e90b36fcb1b4abc874982396e9f6720540d38fb4
|
| 3 |
+
size 79
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-569-fail.tar
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7ad4603ba7626f88b8f640c4f8543b9b3ace3ec261e4eb5241f02ed397690b3d
|
| 3 |
+
size 513
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/COMPRESS-592.7z
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b54836099c4c27e535e8c2b6adf6b092fd66986f8ad586983c590e76cdf46316
|
| 3 |
+
size 1053510
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/OSX_ArchiveWithNestedArchive.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:342ab7e56b45182f962a2fd4b9ad17dff96a1883e9fd42e27c3682b873c73f9c
|
| 3 |
+
size 752
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/archive_with_trailer.tar
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:802283b934ee8e998b136d1195266b85462c5d090ffbc9f36536e7e1a1234812
|
| 3 |
+
size 10254
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla-stored-dd-contradicts-actualsize.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1fe52bd3515c7d18734429e5501aa4f9cd3173f7512370791e9625fcbde1860c
|
| 3 |
+
size 1023
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla-stored-dd-nosig.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7adcf56c2eb0e95558a0f8ae038158b4502db096b8ed1511bd8246a98e0e7058
|
| 3 |
+
size 1018
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla-stored-dd-sizes-differ.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f39c7f7987ad60de6c6756795ac36201a1ef298b723badf6a2c0b103ee426e29
|
| 3 |
+
size 1022
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.dump.lz4
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:83342b842478d51857b590b98a518a70afc186c647ebc4d3d29ea0e2110d9b04
|
| 3 |
+
size 1443
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.tar
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8e081a961b6a97f68e19cf6e26f3b85e04ffef27dbe9c4ad664d79a7ced06a38
|
| 3 |
+
size 10240
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.tar.br
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f5412ea63311ed00e13b02580f9298194fbadfca4dd2f9270d2da9a677acd43c
|
| 3 |
+
size 401
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.tar.bz2
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:963e492c9542fc377e79d99fbae2f5de0b14b6ad118615c6edcceeb1e49c7747
|
| 3 |
+
size 543
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.tar.deflatez
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:94f7e82a678084ddb8da9e47a188ca698ebf169b8ff539366febcec99dcfde0b
|
| 3 |
+
size 468
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.tar.lz4
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1754153c73567574efc457b05034ec671a94250a38b48fea2d045f0313c7b210
|
| 3 |
+
size 623
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bla.xml.bz2
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ece0962864bcda997c011042f0677a34d97fc4bc8294dd96fdcb003e38faea89
|
| 3 |
+
size 389
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/bzip2-zip.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:650a1ad2c88b0ad2f7686dc4502138848cb36016da535692b1a683675c2f5ca5
|
| 3 |
+
size 209
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/directory.tar
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:487b432f36ef467a0848331aa2957f6896506e4a5b94fa6a05c23b75a20d265e
|
| 3 |
+
size 1536
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/emptyDir.tar
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:096fc6ad880d4063f863775d96356edcc1752b0c137711f9a50ee207a1bb09c0
|
| 3 |
+
size 2560
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/multiple.bz2
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:95047070dc140497f68adc8bda5f298fee7611c91e369236491b99025ba8ee16
|
| 3 |
+
size 74
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/pack200/jars/ant.jar
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:12a63db875017c008ea9bb6ca5112d84c14be78d02ad9ea21c4221428a44a824
|
| 3 |
+
size 958858
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/posix01_sparse.tar
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1e96dcf7939b20ba863a58cc0f91b72231439da3201ff3c457d06ce4d90f27d9
|
| 3 |
+
size 10240
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/testNumbersNew.numbers
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:74a834857c02d06dbc399aff8ba475f7708092e42501cc665d7fa489f385831f
|
| 3 |
+
size 179147
|
local-test-commons-compress-delta-02/afc-commons-compress/src/test/resources/utf8-winzip-test.zip
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fd5aeb74f430739a93b21f107237f74f31de5a59c66729dab68281f47ed1ec61
|
| 3 |
+
size 569
|
local-test-commons-compress-full-01-vuln_1/fuzz-tooling/docs/ideal_integration.md
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
This page has moved [here](https://google.github.io/oss-fuzz/advanced-topics/ideal-integration)
|
local-test-commons-compress-full-01-vuln_1/fuzz-tooling/docs/index.md
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
layout: default
|
| 3 |
+
title: OSS-Fuzz
|
| 4 |
+
permalink: /
|
| 5 |
+
nav_order: 1
|
| 6 |
+
has_children: true
|
| 7 |
+
has_toc: false
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
# OSS-Fuzz
|
| 11 |
+
|
| 12 |
+
[Fuzz testing] is a well-known technique for uncovering programming errors in
|
| 13 |
+
software. Many of these detectable errors, like [buffer overflow], can have
|
| 14 |
+
serious security implications. Google has found [thousands] of security
|
| 15 |
+
vulnerabilities and stability bugs by deploying [guided in-process fuzzing of
|
| 16 |
+
Chrome components], and we now want to share that service with the open source
|
| 17 |
+
community.
|
| 18 |
+
|
| 19 |
+
[Fuzz testing]: https://en.wikipedia.org/wiki/Fuzz_testing
|
| 20 |
+
[buffer overflow]: https://en.wikipedia.org/wiki/Buffer_overflow
|
| 21 |
+
[thousands]: https://bugs.chromium.org/p/chromium/issues/list?q=label%3AStability-LibFuzzer%2CStability-AFL%20-status%3ADuplicate%2CWontFix&can=1
|
| 22 |
+
[guided in-process fuzzing of Chrome components]: https://security.googleblog.com/2016/08/guided-in-process-fuzzing-of-chrome.html
|
| 23 |
+
|
| 24 |
+
In cooperation with the [Core Infrastructure Initiative] and the [OpenSSF],
|
| 25 |
+
OSS-Fuzz aims to make common open source software more secure and stable by
|
| 26 |
+
combining modern fuzzing techniques with scalable, distributed execution.
|
| 27 |
+
Projects that do not qualify for OSS-Fuzz (e.g. closed source) can run their own
|
| 28 |
+
instances of [ClusterFuzz] or [ClusterFuzzLite].
|
| 29 |
+
|
| 30 |
+
[Core Infrastructure Initiative]: https://www.coreinfrastructure.org/
|
| 31 |
+
[OpenSSF]: https://www.openssf.org/
|
| 32 |
+
|
| 33 |
+
We support the [libFuzzer], [AFL++], [Honggfuzz], and [Centipede] fuzzing engines in
|
| 34 |
+
combination with [Sanitizers], as well as [ClusterFuzz], a distributed fuzzer
|
| 35 |
+
execution environment and reporting tool.
|
| 36 |
+
|
| 37 |
+
[libFuzzer]: https://llvm.org/docs/LibFuzzer.html
|
| 38 |
+
[AFL++]: https://github.com/AFLplusplus/AFLplusplus
|
| 39 |
+
[Honggfuzz]: https://github.com/google/honggfuzz
|
| 40 |
+
[Centipede]: https://github.com/google/centipede
|
| 41 |
+
[Sanitizers]: https://github.com/google/sanitizers
|
| 42 |
+
[ClusterFuzz]: https://github.com/google/clusterfuzz
|
| 43 |
+
[ClusterFuzzLite]: https://google.github.io/clusterfuzzlite/
|
| 44 |
+
|
| 45 |
+
Currently, OSS-Fuzz supports C/C++, Rust, Go, Python and Java/JVM code. Other
|
| 46 |
+
languages supported by [LLVM] may work too. OSS-Fuzz supports fuzzing x86_64
|
| 47 |
+
and i386 builds.
|
| 48 |
+
|
| 49 |
+
[LLVM]: https://llvm.org
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
## Project history
|
| 53 |
+
OSS-Fuzz was launched in 2016 in response to the
|
| 54 |
+
[Heartbleed] vulnerability, discovered in [OpenSSL], one of the
|
| 55 |
+
most popular open source projects for encrypting web traffic. The vulnerability
|
| 56 |
+
had the potential to affect almost every internet user, yet was caused by a
|
| 57 |
+
relatively simple memory buffer overflow bug that could have been detected by
|
| 58 |
+
fuzzing—that is, by running the code on randomized inputs to intentionally cause
|
| 59 |
+
unexpected behaviors or crashes. At the time, though, fuzzing
|
| 60 |
+
was not widely used and was cumbersome for developers, requiring extensive
|
| 61 |
+
manual effort.
|
| 62 |
+
|
| 63 |
+
Google created OSS-Fuzz to fill this gap: it's a free service that runs fuzzers
|
| 64 |
+
for open source projects and privately alerts developers to the bugs detected.
|
| 65 |
+
Since its launch, OSS-Fuzz has become a critical service for the open source
|
| 66 |
+
community, growing beyond C/C++ to
|
| 67 |
+
detect problems in memory-safe languages such as Go, Rust, and Python.
|
| 68 |
+
|
| 69 |
+
[Heartbleed]: https://heartbleed.com/
|
| 70 |
+
[OpenSSL]: https://www.openssl.org/
|
| 71 |
+
|
| 72 |
+
## Learn more about fuzzing
|
| 73 |
+
|
| 74 |
+
This documentation describes how to use OSS-Fuzz service for your open source
|
| 75 |
+
project. To learn more about fuzzing in general, we recommend reading [libFuzzer
|
| 76 |
+
tutorial] and the other docs in [google/fuzzing] repository. These and some
|
| 77 |
+
other resources are listed on the [useful links] page.
|
| 78 |
+
|
| 79 |
+
[google/fuzzing]: https://github.com/google/fuzzing/tree/master/docs
|
| 80 |
+
[libFuzzer tutorial]: https://github.com/google/fuzzing/blob/master/tutorial/libFuzzerTutorial.md
|
| 81 |
+
[useful links]: {{ site.baseurl }}/reference/useful-links/#tutorials
|
| 82 |
+
|
| 83 |
+
## Trophies
|
| 84 |
+
As of August 2023, OSS-Fuzz has helped identify and fix over [10,000] vulnerabilities and [36,000] bugs across [1,000] projects.
|
| 85 |
+
|
| 86 |
+
[10,000]: https://bugs.chromium.org/p/oss-fuzz/issues/list?q=Type%3DBug-Security%20label%3Aclusterfuzz%20-status%3ADuplicate%2CWontFix&can=1
|
| 87 |
+
[36,000]: https://bugs.chromium.org/p/oss-fuzz/issues/list?q=Type%3DBug%20label%3Aclusterfuzz%20-status%3ADuplicate%2CWontFix&can=1
|
| 88 |
+
[1,000]: https://github.com/google/oss-fuzz/tree/master/projects
|
local-test-commons-compress-full-01-vuln_1/fuzz-tooling/infra/manifest.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /usr/bin/env python3
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Script for pushing manifest files to docker that point to AMD64 and ARM
|
| 18 |
+
images."""
|
| 19 |
+
import logging
|
| 20 |
+
import subprocess
|
| 21 |
+
import sys
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def push_manifest(image):
|
| 25 |
+
"""Pushes a manifest file in place of |image| for ARM and AMD64 versions of
|
| 26 |
+
that image."""
|
| 27 |
+
subprocess.run(['docker', 'pull', image], check=True)
|
| 28 |
+
amd64_image = f'{image}:manifest-amd64'
|
| 29 |
+
subprocess.run(['docker', 'tag', image, amd64_image], check=True)
|
| 30 |
+
subprocess.run(['docker', 'push', amd64_image], check=True)
|
| 31 |
+
|
| 32 |
+
arm_version = f'{image}-testing-arm'
|
| 33 |
+
subprocess.run(['docker', 'pull', arm_version], check=True)
|
| 34 |
+
arm64_image = f'{image}:manifest-arm64v8'
|
| 35 |
+
subprocess.run(['docker', 'tag', arm_version, arm64_image], check=True)
|
| 36 |
+
|
| 37 |
+
subprocess.run([
|
| 38 |
+
'docker', 'manifest', 'create', image, '--amend', arm64_image, '--amend',
|
| 39 |
+
amd64_image
|
| 40 |
+
],
|
| 41 |
+
check=True)
|
| 42 |
+
subprocess.run(['docker', 'manifest', 'push', image], check=True)
|
| 43 |
+
return True
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def main():
|
| 47 |
+
"""Sets up manifests for base-builder and base-runner so they can be used for
|
| 48 |
+
ARM builds."""
|
| 49 |
+
logging.info('Doing simple gcloud command to ensure 2FA passes. '
|
| 50 |
+
'Otherwise docker push fails.')
|
| 51 |
+
subprocess.run(['gcloud', 'projects', 'list', '--limit=1'], check=True)
|
| 52 |
+
|
| 53 |
+
images = [
|
| 54 |
+
'ghcr.io/aixcc-finals/base-builder', 'ghcr.io/aixcc-finals/base-runner'
|
| 55 |
+
]
|
| 56 |
+
results = [push_manifest(image) for image in images]
|
| 57 |
+
return 0 if all(results) else 1
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
if __name__ == '__main__':
|
| 61 |
+
sys.exit(main())
|
local-test-commons-compress-full-01-vuln_1/fuzz-tooling/infra/pr_helper.py
ADDED
|
@@ -0,0 +1,300 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python
|
| 2 |
+
# Copyright 2023 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Adds comments for PR to provide more information for approvers."""
|
| 18 |
+
import base64
|
| 19 |
+
import json
|
| 20 |
+
import os
|
| 21 |
+
import subprocess
|
| 22 |
+
|
| 23 |
+
import requests
|
| 24 |
+
import yaml
|
| 25 |
+
|
| 26 |
+
OWNER = 'google'
|
| 27 |
+
REPO = 'oss-fuzz'
|
| 28 |
+
GITHUB_URL = 'https://github.com/'
|
| 29 |
+
GITHUB_NONREF_URL = f'https://www.github.com/{OWNER}/{REPO}' # Github URL that doesn't send emails on linked issues.
|
| 30 |
+
API_URL = 'https://api.github.com'
|
| 31 |
+
BASE_URL = f'{API_URL}/repos/{OWNER}/{REPO}'
|
| 32 |
+
BRANCH = 'master'
|
| 33 |
+
CRITICALITY_SCORE_PATH = '/home/runner/go/bin/criticality_score'
|
| 34 |
+
COMMITS_LIMIT = 50 # Only process the most recent 50 commits.
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def get_criticality_score(repo_url):
|
| 38 |
+
"""Gets the criticality score of the project."""
|
| 39 |
+
# Criticality score does not support repo url ends with '.git'
|
| 40 |
+
if repo_url.endswith('.git'):
|
| 41 |
+
repo_url = repo_url[:-4]
|
| 42 |
+
report = subprocess.run([
|
| 43 |
+
CRITICALITY_SCORE_PATH, '--format', 'json',
|
| 44 |
+
'-gcp-project-id=clusterfuzz-external', '-depsdev-disable', repo_url
|
| 45 |
+
],
|
| 46 |
+
capture_output=True,
|
| 47 |
+
text=True)
|
| 48 |
+
|
| 49 |
+
try:
|
| 50 |
+
report_dict = json.loads(report.stdout)
|
| 51 |
+
except:
|
| 52 |
+
print(f'Criticality score failed with stdout: {report.stdout}')
|
| 53 |
+
print(f'Criticality score failed with stderr: {report.stderr}')
|
| 54 |
+
return 'N/A'
|
| 55 |
+
return report_dict.get('default_score', 'N/A')
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def is_known_contributor(content, email):
|
| 59 |
+
"""Checks if the author is in the contact list."""
|
| 60 |
+
return (email == content.get('primary_contact') or
|
| 61 |
+
email in content.get('vendor_ccs', []) or
|
| 62 |
+
email in content.get('auto_ccs', []))
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def save_env(message, is_ready_for_merge, is_internal=False):
|
| 66 |
+
"""Saves the outputs as environment variables."""
|
| 67 |
+
with open(os.environ['GITHUB_ENV'], 'a') as github_env:
|
| 68 |
+
github_env.write(f'MESSAGE={message}\n')
|
| 69 |
+
github_env.write(f'IS_READY_FOR_MERGE={is_ready_for_merge}\n')
|
| 70 |
+
github_env.write(f'IS_INTERNAL={is_internal}')
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def main():
|
| 74 |
+
"""Verifies if a PR is ready to merge."""
|
| 75 |
+
github = GithubHandler()
|
| 76 |
+
|
| 77 |
+
# Bypasses PRs of the internal members.
|
| 78 |
+
if github.is_author_internal_member():
|
| 79 |
+
save_env(None, None, True)
|
| 80 |
+
return
|
| 81 |
+
|
| 82 |
+
message = ''
|
| 83 |
+
is_ready_for_merge = True
|
| 84 |
+
pr_author = github.get_pr_author()
|
| 85 |
+
# Gets all modified projects path.
|
| 86 |
+
projects_path = github.get_projects_path()
|
| 87 |
+
verified, email = github.get_author_email()
|
| 88 |
+
|
| 89 |
+
for project_path in projects_path:
|
| 90 |
+
project_url = f'{GITHUB_URL}/{OWNER}/{REPO}/tree/{BRANCH}/{project_path}'
|
| 91 |
+
content_dict = github.get_project_yaml(project_path)
|
| 92 |
+
|
| 93 |
+
# Gets information for the new integrating project.
|
| 94 |
+
if not content_dict:
|
| 95 |
+
is_ready_for_merge = False
|
| 96 |
+
new_project = github.get_integrated_project_info()
|
| 97 |
+
repo_url = new_project.get('main_repo')
|
| 98 |
+
if repo_url is None:
|
| 99 |
+
message += (f'{pr_author} is integrating a new project, '
|
| 100 |
+
'but the `main_repo` is missing. '
|
| 101 |
+
'The criticality score cannot be computed.<br/>')
|
| 102 |
+
else:
|
| 103 |
+
message += (f'{pr_author} is integrating a new project:<br/>'
|
| 104 |
+
f'- Main repo: {repo_url}<br/> - Criticality score: '
|
| 105 |
+
f'{get_criticality_score(repo_url)}<br/>')
|
| 106 |
+
continue
|
| 107 |
+
|
| 108 |
+
# Checks if the author is in the contact list.
|
| 109 |
+
if email:
|
| 110 |
+
if is_known_contributor(content_dict, email):
|
| 111 |
+
# Checks if the email is verified.
|
| 112 |
+
verified_marker = ' (verified)' if verified else ''
|
| 113 |
+
message += (
|
| 114 |
+
f'{pr_author}{verified_marker} is either the primary contact or '
|
| 115 |
+
f'is in the CCs list of [{project_path}]({project_url}).<br/>')
|
| 116 |
+
if verified:
|
| 117 |
+
continue
|
| 118 |
+
|
| 119 |
+
# Checks the previous commits.
|
| 120 |
+
commit_sha = github.has_author_modified_project(project_path)
|
| 121 |
+
if commit_sha is None:
|
| 122 |
+
history_message = ''
|
| 123 |
+
contributors = github.get_past_contributors(project_path)
|
| 124 |
+
if contributors:
|
| 125 |
+
history_message = 'The past contributors are: '
|
| 126 |
+
history_message += ', '.join(contributors)
|
| 127 |
+
message += (
|
| 128 |
+
f'{pr_author} is a new contributor to '
|
| 129 |
+
f'[{project_path}]({project_url}). The PR must be approved by known '
|
| 130 |
+
f'contributors before it can be merged. {history_message}<br/>')
|
| 131 |
+
is_ready_for_merge = False
|
| 132 |
+
continue
|
| 133 |
+
|
| 134 |
+
# If the previous commit is not associated with a pull request.
|
| 135 |
+
pr_message = (f'{pr_author} has previously contributed to '
|
| 136 |
+
f'[{project_path}]({project_url}). The previous commit was '
|
| 137 |
+
f'{GITHUB_NONREF_URL}/commit/{commit_sha}<br/>')
|
| 138 |
+
|
| 139 |
+
previous_pr_number = github.get_pull_request_number(commit_sha)
|
| 140 |
+
if previous_pr_number is not None:
|
| 141 |
+
pr_message = (f'{pr_author} has previously contributed to '
|
| 142 |
+
f'[{project_path}]({project_url}). '
|
| 143 |
+
f'The previous PR was [#{previous_pr_number}]'
|
| 144 |
+
f'({GITHUB_NONREF_URL}/pull/{previous_pr_number})<br/>')
|
| 145 |
+
message += pr_message
|
| 146 |
+
|
| 147 |
+
save_env(message, is_ready_for_merge, False)
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class GithubHandler:
|
| 151 |
+
"""Github requests handler."""
|
| 152 |
+
|
| 153 |
+
def __init__(self):
|
| 154 |
+
self._pr_author = os.environ['PRAUTHOR']
|
| 155 |
+
self._token = os.environ['GITHUBTOKEN']
|
| 156 |
+
self._pr_number = os.environ['PRNUMBER']
|
| 157 |
+
self._headers = {
|
| 158 |
+
'Authorization': f'Bearer {self._token}',
|
| 159 |
+
'X-GitHub-Api-Version': '2022-11-28'
|
| 160 |
+
}
|
| 161 |
+
self._maintainers = set()
|
| 162 |
+
os.environ['GITHUB_AUTH_TOKEN'] = self._token
|
| 163 |
+
|
| 164 |
+
def get_pr_author(self):
|
| 165 |
+
"""Gets the pr author user name."""
|
| 166 |
+
return self._pr_author
|
| 167 |
+
|
| 168 |
+
def get_projects_path(self):
|
| 169 |
+
"""Gets the current project path."""
|
| 170 |
+
response = requests.get(f'{BASE_URL}/pulls/{self._pr_number}/files',
|
| 171 |
+
headers=self._headers)
|
| 172 |
+
if not response.ok:
|
| 173 |
+
return []
|
| 174 |
+
|
| 175 |
+
projects_path = set()
|
| 176 |
+
for file in response.json():
|
| 177 |
+
file_path = file['filename']
|
| 178 |
+
dir_path = file_path.split(os.sep)
|
| 179 |
+
if len(dir_path) > 1 and dir_path[0] == 'projects':
|
| 180 |
+
projects_path.add(os.sep.join(dir_path[0:2]))
|
| 181 |
+
return list(projects_path)
|
| 182 |
+
|
| 183 |
+
def get_author_email(self):
|
| 184 |
+
"""Retrieves the author's email address for a pull request,
|
| 185 |
+
including non-public emails."""
|
| 186 |
+
user_response = requests.get(f'{API_URL}/users/{self._pr_author}')
|
| 187 |
+
if user_response.ok:
|
| 188 |
+
email = user_response.json()['email']
|
| 189 |
+
if email:
|
| 190 |
+
return True, email
|
| 191 |
+
|
| 192 |
+
commits_response = requests.get(
|
| 193 |
+
f'{BASE_URL}/pulls/{self._pr_number}/commits', headers=self._headers)
|
| 194 |
+
if not commits_response.ok:
|
| 195 |
+
return False, None
|
| 196 |
+
email = commits_response.json()[0]['commit']['author']['email']
|
| 197 |
+
verified = commits_response.json()[0]['commit']['verification']['verified']
|
| 198 |
+
return verified, email
|
| 199 |
+
|
| 200 |
+
def get_project_yaml(self, project_path):
|
| 201 |
+
"""Gets the project yaml file."""
|
| 202 |
+
contents_url = f'{BASE_URL}/contents/{project_path}/project.yaml'
|
| 203 |
+
return self.get_yaml_file_content(contents_url)
|
| 204 |
+
|
| 205 |
+
def get_yaml_file_content(self, contents_url):
|
| 206 |
+
"""Gets yaml file content."""
|
| 207 |
+
response = requests.get(contents_url, headers=self._headers)
|
| 208 |
+
if not response.ok:
|
| 209 |
+
return {}
|
| 210 |
+
content = base64.b64decode(response.json()['content']).decode('UTF-8')
|
| 211 |
+
return yaml.safe_load(content)
|
| 212 |
+
|
| 213 |
+
def get_integrated_project_info(self):
|
| 214 |
+
"""Gets the new integrated project."""
|
| 215 |
+
response = requests.get(f'{BASE_URL}/pulls/{self._pr_number}/files',
|
| 216 |
+
headers=self._headers)
|
| 217 |
+
|
| 218 |
+
for file in response.json():
|
| 219 |
+
file_path = file['filename']
|
| 220 |
+
if 'project.yaml' in file_path:
|
| 221 |
+
return self.get_yaml_file_content(file['contents_url'])
|
| 222 |
+
|
| 223 |
+
return {}
|
| 224 |
+
|
| 225 |
+
def get_pull_request_number(self, commit):
|
| 226 |
+
"""Gets the pull request number."""
|
| 227 |
+
pr_response = requests.get(f'{BASE_URL}/commits/{commit}/pulls',
|
| 228 |
+
headers=self._headers)
|
| 229 |
+
if not pr_response.ok:
|
| 230 |
+
return None
|
| 231 |
+
return pr_response.json()[0]['number']
|
| 232 |
+
|
| 233 |
+
def get_past_contributors(self, project_path):
|
| 234 |
+
"""Returns a list of past contributors of a certain project."""
|
| 235 |
+
commits_response = requests.get(f'{BASE_URL}/commits?path={project_path}',
|
| 236 |
+
headers=self._headers)
|
| 237 |
+
|
| 238 |
+
if not commits_response.ok:
|
| 239 |
+
return []
|
| 240 |
+
commits = commits_response.json()
|
| 241 |
+
contributors: dict[str, bool] = {}
|
| 242 |
+
for i, commit in enumerate(commits):
|
| 243 |
+
if i >= COMMITS_LIMIT:
|
| 244 |
+
break
|
| 245 |
+
|
| 246 |
+
if not commit['author'] or not commit['commit']:
|
| 247 |
+
continue
|
| 248 |
+
|
| 249 |
+
login = commit['author']['login']
|
| 250 |
+
verified = commit['commit']['verification']['verified']
|
| 251 |
+
if login in self._maintainers:
|
| 252 |
+
continue
|
| 253 |
+
if login not in contributors:
|
| 254 |
+
contributors[login] = verified
|
| 255 |
+
if verified:
|
| 256 |
+
# Override previous verification bit.
|
| 257 |
+
contributors[login] = True
|
| 258 |
+
|
| 259 |
+
all_contributors = []
|
| 260 |
+
for login, verified in contributors.items():
|
| 261 |
+
login_verify = login if verified else f'{login} (unverified)'
|
| 262 |
+
all_contributors.append(login_verify)
|
| 263 |
+
|
| 264 |
+
return all_contributors
|
| 265 |
+
|
| 266 |
+
def get_maintainers(self):
|
| 267 |
+
"""Get a list of internal members."""
|
| 268 |
+
if self._maintainers:
|
| 269 |
+
return self._maintainers
|
| 270 |
+
|
| 271 |
+
response = requests.get(f'{BASE_URL}/contents/infra/MAINTAINERS.csv',
|
| 272 |
+
headers=self._headers)
|
| 273 |
+
if not response.ok:
|
| 274 |
+
return self._maintainers
|
| 275 |
+
|
| 276 |
+
maintainers_file = base64.b64decode(
|
| 277 |
+
response.json()['content']).decode('UTF-8')
|
| 278 |
+
for line in maintainers_file.split(os.linesep):
|
| 279 |
+
self._maintainers.add(line.split(',')[2])
|
| 280 |
+
return self._maintainers
|
| 281 |
+
|
| 282 |
+
def is_author_internal_member(self):
|
| 283 |
+
"""Returns if the author is an internal member."""
|
| 284 |
+
return self._pr_author in self.get_maintainers()
|
| 285 |
+
|
| 286 |
+
def has_author_modified_project(self, project_path):
|
| 287 |
+
"""Checks if the author has modified this project before."""
|
| 288 |
+
commits_response = requests.get(
|
| 289 |
+
f'{BASE_URL}/commits?path={project_path}&author={self._pr_author}',
|
| 290 |
+
headers=self._headers)
|
| 291 |
+
|
| 292 |
+
if not commits_response.ok or not commits_response.json():
|
| 293 |
+
return None
|
| 294 |
+
|
| 295 |
+
commit = commits_response.json()[0]
|
| 296 |
+
return commit['sha']
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
if __name__ == '__main__':
|
| 300 |
+
main()
|
local-test-commons-compress-full-01-vuln_1/fuzz-tooling/infra/presubmit.py
ADDED
|
@@ -0,0 +1,549 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2020 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Checks code for common issues before submitting."""
|
| 18 |
+
|
| 19 |
+
import argparse
|
| 20 |
+
import os
|
| 21 |
+
import re
|
| 22 |
+
import subprocess
|
| 23 |
+
import sys
|
| 24 |
+
import unittest
|
| 25 |
+
import yaml
|
| 26 |
+
|
| 27 |
+
import constants
|
| 28 |
+
|
| 29 |
+
_SRC_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
| 30 |
+
VALID_PROJECT_REGEX_STR = '^[a-z0-9_-]+$'
|
| 31 |
+
VALID_PROJECT_REGEX = re.compile(VALID_PROJECT_REGEX_STR)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def _is_project_file(actual_path, expected_filename):
|
| 35 |
+
"""Returns True if actual_path's name is |expected_filename| and is a file
|
| 36 |
+
that exists and is in in projects/."""
|
| 37 |
+
if os.path.basename(actual_path) != expected_filename:
|
| 38 |
+
return False
|
| 39 |
+
|
| 40 |
+
if os.path.basename(os.path.dirname(
|
| 41 |
+
os.path.dirname(actual_path))) != 'projects':
|
| 42 |
+
return False
|
| 43 |
+
|
| 44 |
+
return os.path.exists(actual_path)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# TODO: Check for -fsanitize=fuzzer in files as well.
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def _check_one_lib_fuzzing_engine(build_sh_file):
|
| 51 |
+
"""Returns False if |build_sh_file| contains -lFuzzingEngine.
|
| 52 |
+
This is deprecated behavior. $LIB_FUZZING_ENGINE should be used instead
|
| 53 |
+
so that -fsanitize=fuzzer is used."""
|
| 54 |
+
if not _is_project_file(build_sh_file, 'build.sh'):
|
| 55 |
+
return True
|
| 56 |
+
|
| 57 |
+
with open(build_sh_file) as build_sh:
|
| 58 |
+
build_sh_lines = build_sh.readlines()
|
| 59 |
+
for line_num, line in enumerate(build_sh_lines):
|
| 60 |
+
uncommented_code = line.split('#')[0]
|
| 61 |
+
if '-lFuzzingEngine' in uncommented_code:
|
| 62 |
+
print('Error: build.sh contains deprecated "-lFuzzingEngine" on line: '
|
| 63 |
+
f'{line_num}. Please use "$LIB_FUZZING_ENGINE" instead.')
|
| 64 |
+
return False
|
| 65 |
+
return True
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def check_lib_fuzzing_engine(paths):
|
| 69 |
+
"""Calls _check_one_lib_fuzzing_engine on each path in |paths|. Returns True
|
| 70 |
+
if the result of every call is True."""
|
| 71 |
+
return all(_check_one_lib_fuzzing_engine(path) for path in paths)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class ProjectYamlChecker:
|
| 75 |
+
"""Checks for a project.yaml file."""
|
| 76 |
+
|
| 77 |
+
# Sections in a project.yaml and the constant values that they are allowed
|
| 78 |
+
# to have.
|
| 79 |
+
SECTIONS_AND_CONSTANTS = {
|
| 80 |
+
'sanitizers': constants.SANITIZERS,
|
| 81 |
+
'architectures': constants.ARCHITECTURES,
|
| 82 |
+
'fuzzing_engines': constants.ENGINES,
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
+
# Note: this list must be updated when we allow new sections.
|
| 86 |
+
VALID_SECTION_NAMES = [
|
| 87 |
+
'architectures',
|
| 88 |
+
'auto_ccs',
|
| 89 |
+
'blackbox',
|
| 90 |
+
'builds_per_day',
|
| 91 |
+
'coverage_extra_args',
|
| 92 |
+
'disabled',
|
| 93 |
+
'fuzzing_engines',
|
| 94 |
+
'help_url',
|
| 95 |
+
'homepage',
|
| 96 |
+
'language',
|
| 97 |
+
'labels', # For internal use only, hard to lint as it uses fuzzer names.
|
| 98 |
+
'main_repo',
|
| 99 |
+
'primary_contact',
|
| 100 |
+
'run_tests',
|
| 101 |
+
'sanitizers',
|
| 102 |
+
'selective_unpack',
|
| 103 |
+
'vendor_ccs',
|
| 104 |
+
'view_restrictions',
|
| 105 |
+
'file_github_issue',
|
| 106 |
+
]
|
| 107 |
+
|
| 108 |
+
REQUIRED_SECTIONS = ['main_repo']
|
| 109 |
+
|
| 110 |
+
def __init__(self, filename):
|
| 111 |
+
self.filename = filename
|
| 112 |
+
with open(filename) as file_handle:
|
| 113 |
+
self.data = yaml.safe_load(file_handle)
|
| 114 |
+
|
| 115 |
+
self.success = True
|
| 116 |
+
|
| 117 |
+
def do_checks(self):
|
| 118 |
+
"""Does all project.yaml checks. Returns True if they pass."""
|
| 119 |
+
if self.is_disabled():
|
| 120 |
+
return True
|
| 121 |
+
|
| 122 |
+
checks = [
|
| 123 |
+
self.check_project_yaml_constants,
|
| 124 |
+
self.check_required_sections,
|
| 125 |
+
self.check_valid_section_names,
|
| 126 |
+
self.check_valid_emails,
|
| 127 |
+
self.check_valid_language,
|
| 128 |
+
self.check_valid_project_name,
|
| 129 |
+
]
|
| 130 |
+
for check_function in checks:
|
| 131 |
+
check_function()
|
| 132 |
+
return self.success
|
| 133 |
+
|
| 134 |
+
def is_disabled(self):
|
| 135 |
+
"""Returns True if this project is disabled."""
|
| 136 |
+
return self.data.get('disabled', False)
|
| 137 |
+
|
| 138 |
+
def error(self, message):
|
| 139 |
+
"""Prints an error message and sets self.success to False."""
|
| 140 |
+
self.success = False
|
| 141 |
+
print(f'Error in {self.filename}: {message}')
|
| 142 |
+
|
| 143 |
+
def check_valid_project_name(self):
|
| 144 |
+
"""Checks that the project has a valid name."""
|
| 145 |
+
banned_names = ['google', 'g00gle']
|
| 146 |
+
project_name = os.path.basename(os.path.dirname(self.filename))
|
| 147 |
+
for banned_name in banned_names:
|
| 148 |
+
if banned_name in project_name:
|
| 149 |
+
self.error('Projects can\'t have \'google\' in the name.')
|
| 150 |
+
if not VALID_PROJECT_REGEX.match(project_name):
|
| 151 |
+
self.error(f'Projects must conform to regex {VALID_PROJECT_REGEX_STR}')
|
| 152 |
+
|
| 153 |
+
def check_project_yaml_constants(self):
|
| 154 |
+
"""Returns True if certain sections only have certain constant values."""
|
| 155 |
+
for section, allowed_constants in self.SECTIONS_AND_CONSTANTS.items():
|
| 156 |
+
if section not in self.data:
|
| 157 |
+
continue
|
| 158 |
+
actual_constants = self.data[section]
|
| 159 |
+
allowed_constants_str = ', '.join(allowed_constants)
|
| 160 |
+
for constant in actual_constants:
|
| 161 |
+
if isinstance(constant, str):
|
| 162 |
+
if constant not in allowed_constants:
|
| 163 |
+
self.error(f'{constant} (in {section} section) is not a valid '
|
| 164 |
+
f'constant ({allowed_constants_str}).')
|
| 165 |
+
elif isinstance(constant, dict):
|
| 166 |
+
# The only alternative value allowed is the experimental flag, i.e.
|
| 167 |
+
# `constant == {'memory': {'experimental': True}}`. Do not check the
|
| 168 |
+
# experimental flag, but assert that the sanitizer is a valid one.
|
| 169 |
+
if (len(constant.keys()) > 1 or
|
| 170 |
+
list(constant.keys())[0] not in allowed_constants):
|
| 171 |
+
self.error(f'Not allowed value in the project.yaml: {constant}')
|
| 172 |
+
else:
|
| 173 |
+
self.error(f'Not allowed value in the project.yaml: {constant}')
|
| 174 |
+
|
| 175 |
+
def check_valid_section_names(self):
|
| 176 |
+
"""Returns True if all section names are valid."""
|
| 177 |
+
for name in self.data:
|
| 178 |
+
if name not in self.VALID_SECTION_NAMES:
|
| 179 |
+
self.error(
|
| 180 |
+
f'{name} is not a valid section name ({self.VALID_SECTION_NAMES})')
|
| 181 |
+
|
| 182 |
+
def check_required_sections(self):
|
| 183 |
+
"""Returns True if all required sections are in |self.data|."""
|
| 184 |
+
for section in self.REQUIRED_SECTIONS:
|
| 185 |
+
if section not in self.data:
|
| 186 |
+
self.error(f'{section} section is missing.')
|
| 187 |
+
|
| 188 |
+
def check_valid_emails(self):
|
| 189 |
+
"""Returns True if emails are valid looking.."""
|
| 190 |
+
# Get email addresses.
|
| 191 |
+
email_addresses = []
|
| 192 |
+
primary_contact = self.data.get('primary_contact')
|
| 193 |
+
if primary_contact:
|
| 194 |
+
email_addresses.append(primary_contact)
|
| 195 |
+
auto_ccs = self.data.get('auto_ccs')
|
| 196 |
+
if auto_ccs:
|
| 197 |
+
email_addresses.extend(auto_ccs)
|
| 198 |
+
|
| 199 |
+
# Check that email addresses seem normal.
|
| 200 |
+
for email_address in email_addresses:
|
| 201 |
+
if '@' not in email_address or '.' not in email_address:
|
| 202 |
+
self.error(f'{email_address} is an invalid email address.')
|
| 203 |
+
|
| 204 |
+
def check_valid_language(self):
|
| 205 |
+
"""Returns True if the language is specified and valid."""
|
| 206 |
+
language = self.data.get('language')
|
| 207 |
+
if not language:
|
| 208 |
+
self.error('Missing "language" attribute in project.yaml.')
|
| 209 |
+
elif language not in constants.LANGUAGES:
|
| 210 |
+
self.error(
|
| 211 |
+
f'"language: {language}" is not supported ({constants.LANGUAGES}).')
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def _check_one_project_yaml(project_yaml_filename):
|
| 215 |
+
"""Does checks on the project.yaml file. Returns True on success."""
|
| 216 |
+
if _is_project_file(project_yaml_filename, 'project.yml'):
|
| 217 |
+
print(project_yaml_filename, 'must be named project.yaml.')
|
| 218 |
+
return False
|
| 219 |
+
|
| 220 |
+
if not _is_project_file(project_yaml_filename, 'project.yaml'):
|
| 221 |
+
return True
|
| 222 |
+
|
| 223 |
+
checker = ProjectYamlChecker(project_yaml_filename)
|
| 224 |
+
return checker.do_checks()
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
def check_project_yaml(paths):
|
| 228 |
+
"""Calls _check_one_project_yaml on each path in |paths|. Returns True if the
|
| 229 |
+
result of every call is True."""
|
| 230 |
+
return all([_check_one_project_yaml(path) for path in paths])
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def _check_one_seed_corpus(path):
|
| 234 |
+
"""Returns False and prints error if |path| is a seed corpus."""
|
| 235 |
+
if os.path.basename(os.path.dirname(os.path.dirname(path))) != 'projects':
|
| 236 |
+
return True
|
| 237 |
+
|
| 238 |
+
if os.path.splitext(path)[1] == '.zip':
|
| 239 |
+
print('Don\'t commit seed corpora into the ClusterFuzz repo,'
|
| 240 |
+
'they bloat it forever.')
|
| 241 |
+
return False
|
| 242 |
+
|
| 243 |
+
return True
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
def check_seed_corpus(paths):
|
| 247 |
+
"""Calls _check_one_seed_corpus on each path in |paths|. Returns True if the
|
| 248 |
+
result of every call is True."""
|
| 249 |
+
return all([_check_one_seed_corpus(path) for path in paths])
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
def _check_one_apt_update(path):
|
| 253 |
+
"""Checks that a Dockerfile uses apt-update before apt-install"""
|
| 254 |
+
if os.path.basename(os.path.dirname(os.path.dirname(path))) != 'projects':
|
| 255 |
+
return True
|
| 256 |
+
|
| 257 |
+
if os.path.basename(path) != 'Dockerfile':
|
| 258 |
+
return True
|
| 259 |
+
|
| 260 |
+
with open(path, 'r') as file:
|
| 261 |
+
dockerfile = file.read()
|
| 262 |
+
if 'RUN apt install' in dockerfile or 'RUN apt-get install' in dockerfile:
|
| 263 |
+
print('Please add an "apt-get update" before "apt-get install". '
|
| 264 |
+
'Otherwise, a cached and outdated RUN layer may lead to install '
|
| 265 |
+
'failures in file %s.' % str(path))
|
| 266 |
+
return False
|
| 267 |
+
|
| 268 |
+
return True
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
def check_apt_update(paths):
|
| 272 |
+
"""Checks that all Dockerfile use apt-update before apt-install"""
|
| 273 |
+
return all([_check_one_apt_update(path) for path in paths])
|
| 274 |
+
|
| 275 |
+
|
| 276 |
+
def do_checks(changed_files):
|
| 277 |
+
"""Runs all presubmit checks. Returns False if any fails."""
|
| 278 |
+
checks = [
|
| 279 |
+
check_license,
|
| 280 |
+
yapf,
|
| 281 |
+
check_project_yaml,
|
| 282 |
+
check_lib_fuzzing_engine,
|
| 283 |
+
check_seed_corpus,
|
| 284 |
+
check_apt_update,
|
| 285 |
+
]
|
| 286 |
+
# Use a list comprehension here and in other cases where we use all() so that
|
| 287 |
+
# we don't quit early on failure. This is more user-friendly since the more
|
| 288 |
+
# errors we spit out at once, the less frequently the less check-fix-check
|
| 289 |
+
# cycles they need to do.
|
| 290 |
+
return all([check(changed_files) for check in checks])
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
_CHECK_LICENSE_FILENAMES = ['Dockerfile']
|
| 294 |
+
_CHECK_LICENSE_EXTENSIONS = [
|
| 295 |
+
'.bash',
|
| 296 |
+
'.c',
|
| 297 |
+
'.cc',
|
| 298 |
+
'.cpp',
|
| 299 |
+
'.css',
|
| 300 |
+
'.Dockerfile',
|
| 301 |
+
'.go',
|
| 302 |
+
'.h',
|
| 303 |
+
'.htm',
|
| 304 |
+
'.html',
|
| 305 |
+
'.java',
|
| 306 |
+
'.js',
|
| 307 |
+
'.proto',
|
| 308 |
+
'.py',
|
| 309 |
+
'.rs',
|
| 310 |
+
'.sh',
|
| 311 |
+
'.ts',
|
| 312 |
+
]
|
| 313 |
+
THIRD_PARTY_DIR_NAME = 'third_party'
|
| 314 |
+
|
| 315 |
+
_LICENSE_STRING = 'http://www.apache.org/licenses/LICENSE-2.0'
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def check_license(paths):
|
| 319 |
+
"""Validates license header."""
|
| 320 |
+
if not paths:
|
| 321 |
+
return True
|
| 322 |
+
|
| 323 |
+
success = True
|
| 324 |
+
for path in paths:
|
| 325 |
+
path_parts = str(path).split(os.sep)
|
| 326 |
+
if any(path_part == THIRD_PARTY_DIR_NAME for path_part in path_parts):
|
| 327 |
+
continue
|
| 328 |
+
filename = os.path.basename(path)
|
| 329 |
+
extension = os.path.splitext(path)[1]
|
| 330 |
+
if (filename not in _CHECK_LICENSE_FILENAMES and
|
| 331 |
+
extension not in _CHECK_LICENSE_EXTENSIONS):
|
| 332 |
+
continue
|
| 333 |
+
|
| 334 |
+
with open(path) as file_handle:
|
| 335 |
+
if _LICENSE_STRING not in file_handle.read():
|
| 336 |
+
print('Missing license header in file %s.' % str(path))
|
| 337 |
+
success = False
|
| 338 |
+
|
| 339 |
+
return success
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
def bool_to_returncode(success):
|
| 343 |
+
"""Returns 0 if |success|. Otherwise returns 1."""
|
| 344 |
+
if success:
|
| 345 |
+
print('Success.')
|
| 346 |
+
return 0
|
| 347 |
+
|
| 348 |
+
print('Failed.')
|
| 349 |
+
return 1
|
| 350 |
+
|
| 351 |
+
|
| 352 |
+
def is_nonfuzzer_python(path):
|
| 353 |
+
"""Returns True if |path| ends in .py."""
|
| 354 |
+
return os.path.splitext(path)[1] == '.py' and '/projects/' not in path
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
def lint(_=None):
|
| 358 |
+
"""Runs python's linter on infra. Returns False if it fails linting."""
|
| 359 |
+
|
| 360 |
+
# Use --score no to make linting quieter.
|
| 361 |
+
command = ['python3', '-m', 'pylint', '--score', 'no', '-j', '0', 'infra']
|
| 362 |
+
returncode = subprocess.run(command, check=False).returncode
|
| 363 |
+
return returncode == 0
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
def yapf(paths, validate=True):
|
| 367 |
+
"""Does yapf on |path| if it is Python file. Only validates format if
|
| 368 |
+
|validate|. Otherwise, formats the file. Returns False if validation or
|
| 369 |
+
formatting fails."""
|
| 370 |
+
paths = [path for path in paths if is_nonfuzzer_python(path)]
|
| 371 |
+
if not paths:
|
| 372 |
+
return True
|
| 373 |
+
|
| 374 |
+
validate_argument = '-d' if validate else '-i'
|
| 375 |
+
command = ['yapf', validate_argument, '-p']
|
| 376 |
+
command.extend(paths)
|
| 377 |
+
|
| 378 |
+
returncode = subprocess.run(command, check=False).returncode
|
| 379 |
+
return returncode == 0
|
| 380 |
+
|
| 381 |
+
|
| 382 |
+
def get_changed_files():
|
| 383 |
+
"""Returns a list of absolute paths of files changed in this git branch."""
|
| 384 |
+
branch_commit_hash = subprocess.check_output(
|
| 385 |
+
['git', 'merge-base', 'HEAD', 'origin/HEAD']).strip().decode()
|
| 386 |
+
|
| 387 |
+
diff_commands = [
|
| 388 |
+
# Return list of modified files in the commits on this branch.
|
| 389 |
+
['git', 'diff', '--name-only', branch_commit_hash + '..'],
|
| 390 |
+
# Return list of modified files from uncommitted changes.
|
| 391 |
+
['git', 'diff', '--name-only']
|
| 392 |
+
]
|
| 393 |
+
|
| 394 |
+
changed_files = set()
|
| 395 |
+
for command in diff_commands:
|
| 396 |
+
file_paths = subprocess.check_output(command).decode().splitlines()
|
| 397 |
+
for file_path in file_paths:
|
| 398 |
+
if not os.path.isfile(file_path):
|
| 399 |
+
continue
|
| 400 |
+
changed_files.add(file_path)
|
| 401 |
+
print(f'Changed files: {" ".join(changed_files)}')
|
| 402 |
+
return [os.path.abspath(f) for f in changed_files]
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
def run_build_tests():
|
| 406 |
+
"""Runs build tests because they can't be run in parallel."""
|
| 407 |
+
suite_list = [
|
| 408 |
+
unittest.TestLoader().discover(os.path.join(_SRC_ROOT, 'infra', 'build'),
|
| 409 |
+
pattern='*_test.py'),
|
| 410 |
+
]
|
| 411 |
+
suite = unittest.TestSuite(suite_list)
|
| 412 |
+
print('Running build tests.')
|
| 413 |
+
result = unittest.TextTestRunner().run(suite)
|
| 414 |
+
return not result.failures and not result.errors
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
def run_nonbuild_tests(parallel):
|
| 418 |
+
"""Runs all tests but build tests. Does them in parallel if |parallel|. The
|
| 419 |
+
reason why we exclude build tests is because they use an emulator that
|
| 420 |
+
prevents them from being used in parallel."""
|
| 421 |
+
# We look for all project directories because otherwise pytest won't run tests
|
| 422 |
+
# that are not in valid modules (e.g. "base-images").
|
| 423 |
+
relevant_dirs = set()
|
| 424 |
+
all_files = get_all_files()
|
| 425 |
+
for file_path in all_files:
|
| 426 |
+
directory = os.path.dirname(file_path)
|
| 427 |
+
relevant_dirs.add(directory)
|
| 428 |
+
|
| 429 |
+
# Use ignore-glob because ignore doesn't seem to work properly with the way we
|
| 430 |
+
# pass directories to pytest.
|
| 431 |
+
command = [
|
| 432 |
+
'pytest',
|
| 433 |
+
'--ignore-glob=infra/build/*',
|
| 434 |
+
'--ignore-glob=projects/*',
|
| 435 |
+
]
|
| 436 |
+
if parallel:
|
| 437 |
+
command.extend(['-n', 'auto'])
|
| 438 |
+
command += list(relevant_dirs)
|
| 439 |
+
print('Running non-build tests.')
|
| 440 |
+
|
| 441 |
+
# TODO(metzman): Get rid of this once config_utils stops using it.
|
| 442 |
+
env = os.environ.copy()
|
| 443 |
+
env['CIFUZZ_TEST'] = '1'
|
| 444 |
+
|
| 445 |
+
return subprocess.run(command, check=False, env=env).returncode == 0
|
| 446 |
+
|
| 447 |
+
|
| 448 |
+
def run_tests(_=None, parallel=False, build_tests=True, nonbuild_tests=True):
|
| 449 |
+
"""Runs all unit tests."""
|
| 450 |
+
build_success = True
|
| 451 |
+
nonbuild_success = True
|
| 452 |
+
if nonbuild_tests:
|
| 453 |
+
nonbuild_success = run_nonbuild_tests(parallel)
|
| 454 |
+
else:
|
| 455 |
+
print('Skipping nonbuild tests as specified.')
|
| 456 |
+
|
| 457 |
+
if build_tests:
|
| 458 |
+
build_success = run_build_tests()
|
| 459 |
+
else:
|
| 460 |
+
print('Skipping build tests as specified.')
|
| 461 |
+
|
| 462 |
+
return nonbuild_success and build_success
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
def run_systemsan_tests(_=None):
|
| 466 |
+
"""Runs SystemSan unit tests."""
|
| 467 |
+
command = ['make', 'test']
|
| 468 |
+
return subprocess.run(command,
|
| 469 |
+
cwd='infra/experimental/SystemSan',
|
| 470 |
+
check=False).returncode == 0
|
| 471 |
+
|
| 472 |
+
|
| 473 |
+
def get_all_files():
|
| 474 |
+
"""Returns a list of absolute paths of files in this repo."""
|
| 475 |
+
get_all_files_command = ['git', 'ls-files']
|
| 476 |
+
output = subprocess.check_output(get_all_files_command).decode().splitlines()
|
| 477 |
+
return [os.path.abspath(path) for path in output if os.path.isfile(path)]
|
| 478 |
+
|
| 479 |
+
|
| 480 |
+
def main():
|
| 481 |
+
"""Check changes on a branch for common issues before submitting."""
|
| 482 |
+
# Get program arguments.
|
| 483 |
+
parser = argparse.ArgumentParser(description='Presubmit script for oss-fuzz.')
|
| 484 |
+
parser.add_argument(
|
| 485 |
+
'command',
|
| 486 |
+
choices=['format', 'lint', 'license', 'infra-tests', 'systemsan-tests'],
|
| 487 |
+
nargs='?')
|
| 488 |
+
parser.add_argument('-a',
|
| 489 |
+
'--all-files',
|
| 490 |
+
action='store_true',
|
| 491 |
+
help='Run presubmit check(s) on all files',
|
| 492 |
+
default=False)
|
| 493 |
+
parser.add_argument('-p',
|
| 494 |
+
'--parallel',
|
| 495 |
+
action='store_true',
|
| 496 |
+
help='Run tests in parallel.',
|
| 497 |
+
default=False)
|
| 498 |
+
parser.add_argument('-s',
|
| 499 |
+
'--skip-build-tests',
|
| 500 |
+
action='store_true',
|
| 501 |
+
help='Skip build tests which are slow and must run '
|
| 502 |
+
'sequentially.',
|
| 503 |
+
default=False)
|
| 504 |
+
parser.add_argument('-n',
|
| 505 |
+
'--skip-nonbuild-tests',
|
| 506 |
+
action='store_true',
|
| 507 |
+
help='Only do build tests.',
|
| 508 |
+
default=False)
|
| 509 |
+
args = parser.parse_args()
|
| 510 |
+
|
| 511 |
+
if args.all_files:
|
| 512 |
+
relevant_files = get_all_files()
|
| 513 |
+
else:
|
| 514 |
+
relevant_files = get_changed_files()
|
| 515 |
+
|
| 516 |
+
os.chdir(_SRC_ROOT)
|
| 517 |
+
|
| 518 |
+
# Do one specific check if the user asked for it.
|
| 519 |
+
if args.command == 'format':
|
| 520 |
+
success = yapf(relevant_files, False)
|
| 521 |
+
return bool_to_returncode(success)
|
| 522 |
+
|
| 523 |
+
if args.command == 'lint':
|
| 524 |
+
success = lint()
|
| 525 |
+
return bool_to_returncode(success)
|
| 526 |
+
|
| 527 |
+
if args.command == 'license':
|
| 528 |
+
success = check_license(relevant_files)
|
| 529 |
+
return bool_to_returncode(success)
|
| 530 |
+
|
| 531 |
+
if args.command == 'infra-tests':
|
| 532 |
+
success = run_tests(relevant_files,
|
| 533 |
+
parallel=args.parallel,
|
| 534 |
+
build_tests=(not args.skip_build_tests),
|
| 535 |
+
nonbuild_tests=(not args.skip_nonbuild_tests))
|
| 536 |
+
return bool_to_returncode(success)
|
| 537 |
+
|
| 538 |
+
if args.command == 'systemsan-tests':
|
| 539 |
+
success = run_systemsan_tests(relevant_files)
|
| 540 |
+
return bool_to_returncode(success)
|
| 541 |
+
|
| 542 |
+
# Do all the checks (but no tests).
|
| 543 |
+
success = do_checks(relevant_files)
|
| 544 |
+
|
| 545 |
+
return bool_to_returncode(success)
|
| 546 |
+
|
| 547 |
+
|
| 548 |
+
if __name__ == '__main__':
|
| 549 |
+
sys.exit(main())
|
local-test-commons-compress-full-01-vuln_1/fuzz-tooling/infra/retry.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Retry decorator. Copied from ClusterFuzz source."""
|
| 15 |
+
|
| 16 |
+
import functools
|
| 17 |
+
import inspect
|
| 18 |
+
import logging
|
| 19 |
+
import sys
|
| 20 |
+
import time
|
| 21 |
+
|
| 22 |
+
# pylint: disable=too-many-arguments,broad-except
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def sleep(seconds):
|
| 26 |
+
"""Invoke time.sleep. This is to avoid the flakiness of time.sleep. See:
|
| 27 |
+
crbug.com/770375"""
|
| 28 |
+
time.sleep(seconds)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def get_delay(num_try, delay, backoff):
|
| 32 |
+
"""Compute backoff delay."""
|
| 33 |
+
return delay * (backoff**(num_try - 1))
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def wrap(retries,
|
| 37 |
+
delay,
|
| 38 |
+
backoff=2,
|
| 39 |
+
exception_type=Exception,
|
| 40 |
+
retry_on_false=False):
|
| 41 |
+
"""Retry decorator for a function."""
|
| 42 |
+
|
| 43 |
+
assert delay > 0
|
| 44 |
+
assert backoff >= 1
|
| 45 |
+
assert retries >= 0
|
| 46 |
+
|
| 47 |
+
def decorator(func):
|
| 48 |
+
"""Decorator for the given function."""
|
| 49 |
+
tries = retries + 1
|
| 50 |
+
is_generator = inspect.isgeneratorfunction(func)
|
| 51 |
+
function_with_type = func.__qualname__
|
| 52 |
+
if is_generator:
|
| 53 |
+
function_with_type += ' (generator)'
|
| 54 |
+
|
| 55 |
+
def handle_retry(num_try, exception=None):
|
| 56 |
+
"""Handle retry."""
|
| 57 |
+
if (exception is None or
|
| 58 |
+
isinstance(exception, exception_type)) and num_try < tries:
|
| 59 |
+
logging.info('Retrying on %s failed with %s. Retrying again.',
|
| 60 |
+
function_with_type,
|
| 61 |
+
sys.exc_info()[1])
|
| 62 |
+
sleep(get_delay(num_try, delay, backoff))
|
| 63 |
+
return True
|
| 64 |
+
|
| 65 |
+
logging.error('Retrying on %s failed with %s. Raise.', function_with_type,
|
| 66 |
+
sys.exc_info()[1])
|
| 67 |
+
return False
|
| 68 |
+
|
| 69 |
+
@functools.wraps(func)
|
| 70 |
+
def _wrapper(*args, **kwargs):
|
| 71 |
+
"""Regular function wrapper."""
|
| 72 |
+
for num_try in range(1, tries + 1):
|
| 73 |
+
try:
|
| 74 |
+
result = func(*args, **kwargs)
|
| 75 |
+
if retry_on_false and not result:
|
| 76 |
+
if not handle_retry(num_try):
|
| 77 |
+
return result
|
| 78 |
+
|
| 79 |
+
continue
|
| 80 |
+
return result
|
| 81 |
+
except Exception as error:
|
| 82 |
+
if not handle_retry(num_try, exception=error):
|
| 83 |
+
raise
|
| 84 |
+
|
| 85 |
+
@functools.wraps(func)
|
| 86 |
+
def _generator_wrapper(*args, **kwargs):
|
| 87 |
+
"""Generator function wrapper."""
|
| 88 |
+
# This argument is not applicable for generator functions.
|
| 89 |
+
assert not retry_on_false
|
| 90 |
+
already_yielded_element_count = 0
|
| 91 |
+
for num_try in range(1, tries + 1):
|
| 92 |
+
try:
|
| 93 |
+
for index, result in enumerate(func(*args, **kwargs)):
|
| 94 |
+
if index >= already_yielded_element_count:
|
| 95 |
+
yield result
|
| 96 |
+
already_yielded_element_count += 1
|
| 97 |
+
break
|
| 98 |
+
except Exception as error:
|
| 99 |
+
if not handle_retry(num_try, exception=error):
|
| 100 |
+
raise
|
| 101 |
+
|
| 102 |
+
if is_generator:
|
| 103 |
+
return _generator_wrapper
|
| 104 |
+
return _wrapper
|
| 105 |
+
|
| 106 |
+
return decorator
|