Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/README.md +6 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_build +28 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_install_dependencies +22 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-go/Dockerfile +33 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go +80 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-python/Dockerfile +20 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/bash_parser.py +235 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/bisect_clang_test.py +294 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_fuzztests.sh +126 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_go_fuzzer +69 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_native_go_fuzzer +60 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/debug_afl +40 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/detect_repo_test.py +121 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_deps.sh +44 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_go.sh +43 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_java.sh +31 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_ruby.sh +25 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_rust.sh +22 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_swift.sh +67 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_afl +35 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_centipede +49 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_honggfuzz +45 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/python_coverage_helper.py +120 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/srcmap +66 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/write_labels.py +40 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-image/Dockerfile +46 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner-debug/Dockerfile +26 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/Dockerfile +139 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/README.md +31 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/bad_build_check +494 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage +549 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage_helper +17 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/download_corpus +30 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/generate_differential_cov_report.py +228 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_deps.sh +37 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_go.sh +41 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_java.sh +46 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_javascript.sh +30 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/jacoco_report_converter.py +174 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/nyc_report_converter.py +80 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/parse_options.py +58 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/profraw_update.py +182 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/python_coverage_runner_help.py +181 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/rcfilt +21 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/reproduce +34 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/run_fuzzer +228 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/ruzzy +19 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/targets_list +10 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all.py +295 -0
- local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all_test.py +38 -0
local-test-tika-delta-02/fuzz-tooling/infra/base-images/README.md
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Building all infra images:
|
| 2 |
+
|
| 3 |
+
```bash
|
| 4 |
+
# run from project root
|
| 5 |
+
infra/base-images/all.sh
|
| 6 |
+
```
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_build
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /bin/bash -eux
|
| 2 |
+
# Copyright 2023 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# TODO(metzman): Do this in a docket image so we don't need to waste time
|
| 19 |
+
# reinstalling.
|
| 20 |
+
PYTHONPATH=$FUZZBENCH_PATH python3 -B -u -c "from fuzzers.$FUZZING_ENGINE import fuzzer; fuzzer.build()"
|
| 21 |
+
|
| 22 |
+
if [ "$FUZZING_ENGINE" = "coverage" ]; then
|
| 23 |
+
cd $OUT
|
| 24 |
+
mkdir -p filestore/oss-fuzz-on-demand/coverage-binaries
|
| 25 |
+
# We expect an error regarding leading slashes. Just assume this step succeeds.
|
| 26 |
+
# TODO(metzman): Fix this when I get a chance.
|
| 27 |
+
tar -czvf filestore/oss-fuzz-on-demand/coverage-binaries/coverage-build-$PROJECT.tar.gz * /src /work || exit 0
|
| 28 |
+
fi
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_install_dependencies
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /bin/bash -eux
|
| 2 |
+
# Copyright 2023 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
apt-get update && apt-get install -y gcc gfortran python-dev libopenblas-dev liblapack-dev cython libpq-dev
|
| 19 |
+
wget -O /tmp/requirements.txt https://raw.githubusercontent.com/google/fuzzbench/master/requirements.txt
|
| 20 |
+
pip3 install pip --upgrade
|
| 21 |
+
CFLAGS= CXXFLAGS= pip3 install -r /tmp/requirements.txt
|
| 22 |
+
rm /tmp/requirements.txt
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-go/Dockerfile
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
|
| 17 |
+
ARG IMG_TAG=latest
|
| 18 |
+
FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG}
|
| 19 |
+
|
| 20 |
+
# Set up Golang environment variables (copied from /root/.bash_profile).
|
| 21 |
+
ENV GOPATH /root/go
|
| 22 |
+
|
| 23 |
+
# /root/.go/bin is for the standard Go binaries (i.e. go, gofmt, etc).
|
| 24 |
+
# $GOPATH/bin is for the binaries from the dependencies installed via "go get".
|
| 25 |
+
ENV PATH $PATH:/root/.go/bin:$GOPATH/bin
|
| 26 |
+
|
| 27 |
+
COPY gosigfuzz.c $GOPATH/gosigfuzz/
|
| 28 |
+
|
| 29 |
+
RUN install_go.sh
|
| 30 |
+
|
| 31 |
+
# TODO(jonathanmetzman): Install this file using install_go.sh.
|
| 32 |
+
COPY ossfuzz_coverage_runner.go \
|
| 33 |
+
$GOPATH/
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
// Copyright 2020 Google LLC
|
| 2 |
+
//
|
| 3 |
+
// Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
// you may not use this file except in compliance with the License.
|
| 5 |
+
// You may obtain a copy of the License at
|
| 6 |
+
//
|
| 7 |
+
// http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
//
|
| 9 |
+
// Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
// distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
// See the License for the specific language governing permissions and
|
| 13 |
+
// limitations under the License.
|
| 14 |
+
|
| 15 |
+
package mypackagebeingfuzzed
|
| 16 |
+
|
| 17 |
+
import (
|
| 18 |
+
"io/fs"
|
| 19 |
+
"io/ioutil"
|
| 20 |
+
"os"
|
| 21 |
+
"path/filepath"
|
| 22 |
+
"runtime/pprof"
|
| 23 |
+
"testing"
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
func TestFuzzCorpus(t *testing.T) {
|
| 27 |
+
dir := os.Getenv("FUZZ_CORPUS_DIR")
|
| 28 |
+
if dir == "" {
|
| 29 |
+
t.Logf("No fuzzing corpus directory set")
|
| 30 |
+
return
|
| 31 |
+
}
|
| 32 |
+
filename := ""
|
| 33 |
+
defer func() {
|
| 34 |
+
if r := recover(); r != nil {
|
| 35 |
+
t.Error("Fuzz panicked in "+filename, r)
|
| 36 |
+
}
|
| 37 |
+
}()
|
| 38 |
+
profname := os.Getenv("FUZZ_PROFILE_NAME")
|
| 39 |
+
if profname != "" {
|
| 40 |
+
f, err := os.Create(profname + ".cpu.prof")
|
| 41 |
+
if err != nil {
|
| 42 |
+
t.Logf("error creating profile file %s\n", err)
|
| 43 |
+
} else {
|
| 44 |
+
_ = pprof.StartCPUProfile(f)
|
| 45 |
+
}
|
| 46 |
+
}
|
| 47 |
+
_, err := ioutil.ReadDir(dir)
|
| 48 |
+
if err != nil {
|
| 49 |
+
t.Logf("Not fuzzing corpus directory %s", err)
|
| 50 |
+
return
|
| 51 |
+
}
|
| 52 |
+
// recurse for regressions subdirectory
|
| 53 |
+
err = filepath.Walk(dir, func(fname string, info fs.FileInfo, err error) error {
|
| 54 |
+
if info.IsDir() {
|
| 55 |
+
return nil
|
| 56 |
+
}
|
| 57 |
+
data, err := ioutil.ReadFile(fname)
|
| 58 |
+
if err != nil {
|
| 59 |
+
t.Error("Failed to read corpus file", err)
|
| 60 |
+
return err
|
| 61 |
+
}
|
| 62 |
+
filename = fname
|
| 63 |
+
FuzzFunction(data)
|
| 64 |
+
return nil
|
| 65 |
+
})
|
| 66 |
+
if err != nil {
|
| 67 |
+
t.Error("Failed to run corpus", err)
|
| 68 |
+
}
|
| 69 |
+
if profname != "" {
|
| 70 |
+
pprof.StopCPUProfile()
|
| 71 |
+
f, err := os.Create(profname + ".heap.prof")
|
| 72 |
+
if err != nil {
|
| 73 |
+
t.Logf("error creating heap profile file %s\n", err)
|
| 74 |
+
}
|
| 75 |
+
if err = pprof.WriteHeapProfile(f); err != nil {
|
| 76 |
+
t.Logf("error writing heap profile file %s\n", err)
|
| 77 |
+
}
|
| 78 |
+
f.Close()
|
| 79 |
+
}
|
| 80 |
+
}
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-python/Dockerfile
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
|
| 17 |
+
ARG IMG_TAG=latest
|
| 18 |
+
FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG}
|
| 19 |
+
|
| 20 |
+
RUN install_python.sh
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/bash_parser.py
ADDED
|
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/python3
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
|
| 16 |
+
import os
|
| 17 |
+
import sys
|
| 18 |
+
|
| 19 |
+
from glob import glob
|
| 20 |
+
|
| 21 |
+
import bashlex
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def find_all_bash_scripts_in_src():
|
| 25 |
+
"""Finds all bash scripts that exist in SRC/. This is used to idenfiy scripts
|
| 26 |
+
that may be needed for reading during the AST parsing. This is the case
|
| 27 |
+
when a given build script calls another build script, then we need to
|
| 28 |
+
read those."""
|
| 29 |
+
all_local_scripts = [
|
| 30 |
+
y for x in os.walk('/src/') for y in glob(os.path.join(x[0], '*.sh'))
|
| 31 |
+
]
|
| 32 |
+
scripts_we_care_about = []
|
| 33 |
+
to_ignore = {'aflplusplus', 'honggfuzz', '/fuzztest', '/centipede'}
|
| 34 |
+
for s in all_local_scripts:
|
| 35 |
+
if any([x for x in to_ignore if x in s]):
|
| 36 |
+
continue
|
| 37 |
+
scripts_we_care_about.append(s)
|
| 38 |
+
|
| 39 |
+
print(scripts_we_care_about)
|
| 40 |
+
return scripts_we_care_about
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def should_discard_command(ast_tree) -> bool:
|
| 44 |
+
"""Returns True if the command shuold be avoided, otherwise False"""
|
| 45 |
+
try:
|
| 46 |
+
first_word = ast_tree.parts[0].word
|
| 47 |
+
except: # pylint: disable=bare-except
|
| 48 |
+
return False
|
| 49 |
+
|
| 50 |
+
if ('cmake' in first_word and
|
| 51 |
+
any('--build' in part.word for part in ast_tree.parts)):
|
| 52 |
+
return False
|
| 53 |
+
|
| 54 |
+
cmds_to_avoid_replaying = {
|
| 55 |
+
'configure', 'autoheader', 'autoconf', 'autoreconf', 'cmake', 'autogen.sh'
|
| 56 |
+
}
|
| 57 |
+
if any([cmd for cmd in cmds_to_avoid_replaying if cmd in first_word]):
|
| 58 |
+
return True
|
| 59 |
+
|
| 60 |
+
# Avoid all "make clean" calls. We dont want to erase previously build
|
| 61 |
+
# files.
|
| 62 |
+
try:
|
| 63 |
+
second_word = ast_tree.parts[1].word
|
| 64 |
+
except: # pylint: disable=bare-except
|
| 65 |
+
return False
|
| 66 |
+
if 'make' in first_word and 'clean' in second_word:
|
| 67 |
+
return True
|
| 68 |
+
|
| 69 |
+
# No match was found to commands we dont want to build. There is no
|
| 70 |
+
# indication we shuold avoid.
|
| 71 |
+
return False
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def is_local_redirection(ast_node, all_local_scripts):
|
| 75 |
+
"""Return the list of scripts corresponding to the command, in case
|
| 76 |
+
the command is an execution of a local script."""
|
| 77 |
+
# print("Checking")
|
| 78 |
+
|
| 79 |
+
# Capture local script called with ./random/path/build.sh
|
| 80 |
+
|
| 81 |
+
if len(ast_node.parts) >= 2:
|
| 82 |
+
try:
|
| 83 |
+
ast_node.parts[0].word
|
| 84 |
+
except:
|
| 85 |
+
return []
|
| 86 |
+
if ast_node.parts[0].word == '.':
|
| 87 |
+
suffixes_matching = []
|
| 88 |
+
#print(ast_node.parts[1].word)
|
| 89 |
+
for bash_script in all_local_scripts:
|
| 90 |
+
#print("- %s"%(bash_script))
|
| 91 |
+
cmd_to_exec = ast_node.parts[1].word.replace('$SRC', 'src')
|
| 92 |
+
if bash_script.endswith(cmd_to_exec):
|
| 93 |
+
suffixes_matching.append(bash_script)
|
| 94 |
+
#print(suffixes_matching)
|
| 95 |
+
return suffixes_matching
|
| 96 |
+
# Capture a local script called with $SRC/random/path/build.sh
|
| 97 |
+
if len(ast_node.parts) >= 1:
|
| 98 |
+
if '$SRC' in ast_node.parts[0].word:
|
| 99 |
+
suffixes_matching = []
|
| 100 |
+
print(ast_node.parts[0].word)
|
| 101 |
+
for bash_script in all_local_scripts:
|
| 102 |
+
print("- %s" % (bash_script))
|
| 103 |
+
cmd_to_exec = ast_node.parts[0].word.replace('$SRC', 'src')
|
| 104 |
+
if bash_script.endswith(cmd_to_exec):
|
| 105 |
+
suffixes_matching.append(bash_script)
|
| 106 |
+
print(suffixes_matching)
|
| 107 |
+
return suffixes_matching
|
| 108 |
+
|
| 109 |
+
return []
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def handle_ast_command(ast_node, all_scripts_in_fs, raw_script):
|
| 113 |
+
"""Generate bash script string for command node"""
|
| 114 |
+
new_script = ''
|
| 115 |
+
if should_discard_command(ast_node):
|
| 116 |
+
return ''
|
| 117 |
+
|
| 118 |
+
matches = is_local_redirection(ast_node, all_scripts_in_fs)
|
| 119 |
+
if len(matches) == 1:
|
| 120 |
+
new_script += parse_script(matches[0], all_scripts_in_fs) + '\n'
|
| 121 |
+
return ''
|
| 122 |
+
|
| 123 |
+
# Extract the command from the script string
|
| 124 |
+
idx_start = ast_node.pos[0]
|
| 125 |
+
idx_end = ast_node.pos[1]
|
| 126 |
+
new_script += raw_script[idx_start:idx_end]
|
| 127 |
+
#new_script += '\n'
|
| 128 |
+
|
| 129 |
+
# If mkdir is used, then ensure that '-p' is provided, as
|
| 130 |
+
# otherwise we will run into failures. We don't have to worry
|
| 131 |
+
# about multiple uses of -p as `mkdir -p -p -p`` is valid.
|
| 132 |
+
new_script = new_script.replace('mkdir', 'mkdir -p')
|
| 133 |
+
return new_script
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def handle_ast_list(ast_node, all_scripts_in_fs, raw_script):
|
| 137 |
+
"""Handles bashlex AST list."""
|
| 138 |
+
new_script = ''
|
| 139 |
+
try_hard = 1
|
| 140 |
+
|
| 141 |
+
if not try_hard:
|
| 142 |
+
list_start = ast_node.pos[0]
|
| 143 |
+
list_end = ast_node.pos[1]
|
| 144 |
+
new_script += raw_script[list_start:list_end] # + '\n'
|
| 145 |
+
else:
|
| 146 |
+
# This is more refined logic. Ideally, this should work, but it's a bit
|
| 147 |
+
# more intricate to get right due to e.g. white-space between positions
|
| 148 |
+
# and more extensive parsing needed. We don't neccesarily need this
|
| 149 |
+
# level of success rate for what we're trying to achieve, so am disabling
|
| 150 |
+
# this for now.
|
| 151 |
+
for part in ast_node.parts:
|
| 152 |
+
if part.kind == 'list':
|
| 153 |
+
new_script += handle_ast_list(part, all_scripts_in_fs, raw_script)
|
| 154 |
+
elif part.kind == 'command':
|
| 155 |
+
new_script += handle_ast_command(part, all_scripts_in_fs, raw_script)
|
| 156 |
+
else:
|
| 157 |
+
idx_start = part.pos[0]
|
| 158 |
+
idx_end = part.pos[1]
|
| 159 |
+
new_script += raw_script[idx_start:idx_end]
|
| 160 |
+
new_script += ' '
|
| 161 |
+
|
| 162 |
+
# Make sure what was created is valid syntax, and otherwise return empty
|
| 163 |
+
try:
|
| 164 |
+
bashlex.parse(new_script)
|
| 165 |
+
except: # pylint: disable=bare-except
|
| 166 |
+
# Maybe return the original here instead of skipping?
|
| 167 |
+
return ''
|
| 168 |
+
return new_script
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
def handle_ast_compound(ast_node, all_scripts_in_fs, raw_script):
|
| 172 |
+
"""Handles bashlex compound AST node."""
|
| 173 |
+
new_script = ''
|
| 174 |
+
list_start = ast_node.pos[0]
|
| 175 |
+
list_end = ast_node.pos[1]
|
| 176 |
+
new_script += raw_script[list_start:list_end] + '\n'
|
| 177 |
+
return new_script
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
def handle_node(ast_node, all_scripts_in_fs, build_script):
|
| 181 |
+
"""Generates a bash script string for a given node"""
|
| 182 |
+
if ast_node.kind == 'command':
|
| 183 |
+
return handle_ast_command(ast_node, all_scripts_in_fs, build_script)
|
| 184 |
+
elif ast_node.kind == 'list':
|
| 185 |
+
return handle_ast_list(ast_node, all_scripts_in_fs, build_script)
|
| 186 |
+
elif ast_node.kind == 'compound':
|
| 187 |
+
print('todo: handle compound')
|
| 188 |
+
return handle_ast_compound(ast_node, all_scripts_in_fs, build_script)
|
| 189 |
+
elif ast_node.kind == 'pipeline':
|
| 190 |
+
# Not supported
|
| 191 |
+
return ''
|
| 192 |
+
else:
|
| 193 |
+
raise Exception(f'Missing node handling: {ast_node.kind}')
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def parse_script(bash_script, all_scripts) -> str:
|
| 197 |
+
"""Top-level bash script parser"""
|
| 198 |
+
new_script = ''
|
| 199 |
+
with open(bash_script, 'r', encoding='utf-8') as f:
|
| 200 |
+
build_script = f.read()
|
| 201 |
+
try:
|
| 202 |
+
parts = bashlex.parse(build_script)
|
| 203 |
+
except bashlex.errors.ParsingError:
|
| 204 |
+
return ''
|
| 205 |
+
for part in parts:
|
| 206 |
+
new_script += handle_node(part, all_scripts, build_script)
|
| 207 |
+
new_script += '\n'
|
| 208 |
+
print("-" * 45)
|
| 209 |
+
print(part.kind)
|
| 210 |
+
print(part.dump())
|
| 211 |
+
|
| 212 |
+
return new_script
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def main():
|
| 216 |
+
"""Main function"""
|
| 217 |
+
all_scripts = find_all_bash_scripts_in_src()
|
| 218 |
+
replay_bash_script = parse_script(sys.argv[1], all_scripts)
|
| 219 |
+
|
| 220 |
+
print("REPLAYABLE BASH SCRIPT")
|
| 221 |
+
print("#" * 60)
|
| 222 |
+
print(replay_bash_script)
|
| 223 |
+
print("#" * 60)
|
| 224 |
+
|
| 225 |
+
out_dir = os.getenv('OUT', '/out')
|
| 226 |
+
with open(f'{out_dir}/replay-build-script.sh', 'w', encoding='utf-8') as f:
|
| 227 |
+
f.write(replay_bash_script)
|
| 228 |
+
|
| 229 |
+
src_dir = os.getenv('SRC', '/src')
|
| 230 |
+
with open(f'{src_dir}/replay_build.sh', 'w', encoding='utf-8') as f:
|
| 231 |
+
f.write(replay_bash_script)
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
if __name__ == "__main__":
|
| 235 |
+
main()
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/bisect_clang_test.py
ADDED
|
@@ -0,0 +1,294 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 Google Inc.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
"""Tests for bisect_clang.py"""
|
| 17 |
+
import os
|
| 18 |
+
from unittest import mock
|
| 19 |
+
import unittest
|
| 20 |
+
|
| 21 |
+
import bisect_clang
|
| 22 |
+
|
| 23 |
+
FILE_DIRECTORY = os.path.dirname(__file__)
|
| 24 |
+
LLVM_REPO_PATH = '/llvm-project'
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def get_git_command(*args):
|
| 28 |
+
"""Returns a git command for the LLVM repo with |args| as arguments."""
|
| 29 |
+
return ['git', '-C', LLVM_REPO_PATH] + list(args)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def patch_environ(testcase_obj):
|
| 33 |
+
"""Patch environment."""
|
| 34 |
+
env = {}
|
| 35 |
+
patcher = mock.patch.dict(os.environ, env)
|
| 36 |
+
testcase_obj.addCleanup(patcher.stop)
|
| 37 |
+
patcher.start()
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class BisectClangTestMixin: # pylint: disable=too-few-public-methods
|
| 41 |
+
"""Useful mixin for bisect_clang unittests."""
|
| 42 |
+
|
| 43 |
+
def setUp(self): # pylint: disable=invalid-name
|
| 44 |
+
"""Initialization method for unittests."""
|
| 45 |
+
patch_environ(self)
|
| 46 |
+
os.environ['SRC'] = '/src'
|
| 47 |
+
os.environ['WORK'] = '/work'
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class GetClangBuildEnvTest(BisectClangTestMixin, unittest.TestCase):
|
| 51 |
+
"""Tests for get_clang_build_env."""
|
| 52 |
+
|
| 53 |
+
def test_cflags(self):
|
| 54 |
+
"""Test that CFLAGS are not used compiling clang."""
|
| 55 |
+
os.environ['CFLAGS'] = 'blah'
|
| 56 |
+
self.assertNotIn('CFLAGS', bisect_clang.get_clang_build_env())
|
| 57 |
+
|
| 58 |
+
def test_cxxflags(self):
|
| 59 |
+
"""Test that CXXFLAGS are not used compiling clang."""
|
| 60 |
+
os.environ['CXXFLAGS'] = 'blah'
|
| 61 |
+
self.assertNotIn('CXXFLAGS', bisect_clang.get_clang_build_env())
|
| 62 |
+
|
| 63 |
+
def test_other_variables(self):
|
| 64 |
+
"""Test that other env vars are used when compiling clang."""
|
| 65 |
+
key = 'other'
|
| 66 |
+
value = 'blah'
|
| 67 |
+
os.environ[key] = value
|
| 68 |
+
self.assertEqual(value, bisect_clang.get_clang_build_env()[key])
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def read_test_data(filename):
|
| 72 |
+
"""Returns data from |filename| in the test_data directory."""
|
| 73 |
+
with open(os.path.join(FILE_DIRECTORY, 'test_data', filename)) as file_handle:
|
| 74 |
+
return file_handle.read()
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class SearchBisectOutputTest(BisectClangTestMixin, unittest.TestCase):
|
| 78 |
+
"""Tests for search_bisect_output."""
|
| 79 |
+
|
| 80 |
+
def test_search_bisect_output(self):
|
| 81 |
+
"""Test that search_bisect_output finds the responsible commit when one
|
| 82 |
+
exists."""
|
| 83 |
+
test_data = read_test_data('culprit-commit.txt')
|
| 84 |
+
self.assertEqual('ac9ee01fcbfac745aaedca0393a8e1c8a33acd8d',
|
| 85 |
+
bisect_clang.search_bisect_output(test_data))
|
| 86 |
+
|
| 87 |
+
def test_search_bisect_output_none(self):
|
| 88 |
+
"""Test that search_bisect_output doesnt find a non-existent culprit
|
| 89 |
+
commit."""
|
| 90 |
+
self.assertIsNone(bisect_clang.search_bisect_output('hello'))
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def create_mock_popen(
|
| 94 |
+
output=bytes('', 'utf-8'), err=bytes('', 'utf-8'), returncode=0):
|
| 95 |
+
"""Creates a mock subprocess.Popen."""
|
| 96 |
+
|
| 97 |
+
class MockPopen:
|
| 98 |
+
"""Mock subprocess.Popen."""
|
| 99 |
+
commands = []
|
| 100 |
+
testcases_written = []
|
| 101 |
+
|
| 102 |
+
def __init__(self, command, *args, **kwargs): # pylint: disable=unused-argument
|
| 103 |
+
"""Inits the MockPopen."""
|
| 104 |
+
stdout = kwargs.pop('stdout', None)
|
| 105 |
+
self.command = command
|
| 106 |
+
self.commands.append(command)
|
| 107 |
+
self.stdout = None
|
| 108 |
+
self.stderr = None
|
| 109 |
+
self.returncode = returncode
|
| 110 |
+
if hasattr(stdout, 'write'):
|
| 111 |
+
self.stdout = stdout
|
| 112 |
+
|
| 113 |
+
def communicate(self, input_data=None): # pylint: disable=unused-argument
|
| 114 |
+
"""Mock subprocess.Popen.communicate."""
|
| 115 |
+
if self.stdout:
|
| 116 |
+
self.stdout.write(output)
|
| 117 |
+
|
| 118 |
+
if self.stderr:
|
| 119 |
+
self.stderr.write(err)
|
| 120 |
+
|
| 121 |
+
return output, err
|
| 122 |
+
|
| 123 |
+
def poll(self, input_data=None): # pylint: disable=unused-argument
|
| 124 |
+
"""Mock subprocess.Popen.poll."""
|
| 125 |
+
return self.returncode
|
| 126 |
+
|
| 127 |
+
return MockPopen
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def mock_prepare_build_impl(llvm_project_path): # pylint: disable=unused-argument
|
| 131 |
+
"""Mocked prepare_build function."""
|
| 132 |
+
return '/work/llvm-build'
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
class BuildClangTest(BisectClangTestMixin, unittest.TestCase):
|
| 136 |
+
"""Tests for build_clang."""
|
| 137 |
+
|
| 138 |
+
def test_build_clang_test(self):
|
| 139 |
+
"""Tests that build_clang works as intended."""
|
| 140 |
+
with mock.patch('subprocess.Popen', create_mock_popen()) as mock_popen:
|
| 141 |
+
with mock.patch('bisect_clang.prepare_build', mock_prepare_build_impl):
|
| 142 |
+
llvm_src_dir = '/src/llvm-project'
|
| 143 |
+
bisect_clang.build_clang(llvm_src_dir)
|
| 144 |
+
self.assertEqual([['ninja', '-C', '/work/llvm-build', 'install']],
|
| 145 |
+
mock_popen.commands)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
class GitRepoTest(BisectClangTestMixin, unittest.TestCase):
|
| 149 |
+
"""Tests for GitRepo."""
|
| 150 |
+
|
| 151 |
+
# TODO(metzman): Mock filesystem. Until then, use a real directory.
|
| 152 |
+
|
| 153 |
+
def setUp(self):
|
| 154 |
+
super().setUp()
|
| 155 |
+
self.git = bisect_clang.GitRepo(LLVM_REPO_PATH)
|
| 156 |
+
self.good_commit = 'good_commit'
|
| 157 |
+
self.bad_commit = 'bad_commit'
|
| 158 |
+
self.test_command = 'testcommand'
|
| 159 |
+
|
| 160 |
+
def test_do_command(self):
|
| 161 |
+
"""Test do_command creates a new process as intended."""
|
| 162 |
+
# TODO(metzman): Test directory changing behavior.
|
| 163 |
+
command = ['subcommand', '--option']
|
| 164 |
+
with mock.patch('subprocess.Popen', create_mock_popen()) as mock_popen:
|
| 165 |
+
self.git.do_command(command)
|
| 166 |
+
self.assertEqual([get_git_command('subcommand', '--option')],
|
| 167 |
+
mock_popen.commands)
|
| 168 |
+
|
| 169 |
+
def _test_test_start_commit_unexpected(self, label, commit, returncode):
|
| 170 |
+
"""Tests test_start_commit works as intended when the test returns an
|
| 171 |
+
unexpected value."""
|
| 172 |
+
|
| 173 |
+
def mock_execute_impl(command, *args, **kwargs): # pylint: disable=unused-argument
|
| 174 |
+
if command == self.test_command:
|
| 175 |
+
return returncode, '', ''
|
| 176 |
+
return 0, '', ''
|
| 177 |
+
|
| 178 |
+
with mock.patch('bisect_clang.execute', mock_execute_impl):
|
| 179 |
+
with mock.patch('bisect_clang.prepare_build', mock_prepare_build_impl):
|
| 180 |
+
with self.assertRaises(bisect_clang.BisectError):
|
| 181 |
+
self.git.test_start_commit(commit, label, self.test_command)
|
| 182 |
+
|
| 183 |
+
def test_test_start_commit_bad_zero(self):
|
| 184 |
+
"""Tests test_start_commit works as intended when the test on the first bad
|
| 185 |
+
commit returns 0."""
|
| 186 |
+
self._test_test_start_commit_unexpected('bad', self.bad_commit, 0)
|
| 187 |
+
|
| 188 |
+
def test_test_start_commit_good_nonzero(self):
|
| 189 |
+
"""Tests test_start_commit works as intended when the test on the first good
|
| 190 |
+
commit returns nonzero."""
|
| 191 |
+
self._test_test_start_commit_unexpected('good', self.good_commit, 1)
|
| 192 |
+
|
| 193 |
+
def test_test_start_commit_good_zero(self):
|
| 194 |
+
"""Tests test_start_commit works as intended when the test on the first good
|
| 195 |
+
commit returns 0."""
|
| 196 |
+
self._test_test_start_commit_expected('good', self.good_commit, 0) # pylint: disable=no-value-for-parameter
|
| 197 |
+
|
| 198 |
+
@mock.patch('bisect_clang.build_clang')
|
| 199 |
+
def _test_test_start_commit_expected(self, label, commit, returncode,
|
| 200 |
+
mock_build_clang):
|
| 201 |
+
"""Tests test_start_commit works as intended when the test returns an
|
| 202 |
+
expected value."""
|
| 203 |
+
command_args = []
|
| 204 |
+
|
| 205 |
+
def mock_execute_impl(command, *args, **kwargs): # pylint: disable=unused-argument
|
| 206 |
+
command_args.append(command)
|
| 207 |
+
if command == self.test_command:
|
| 208 |
+
return returncode, '', ''
|
| 209 |
+
return 0, '', ''
|
| 210 |
+
|
| 211 |
+
with mock.patch('bisect_clang.execute', mock_execute_impl):
|
| 212 |
+
self.git.test_start_commit(commit, label, self.test_command)
|
| 213 |
+
self.assertEqual([
|
| 214 |
+
get_git_command('checkout', commit), self.test_command,
|
| 215 |
+
get_git_command('bisect', label)
|
| 216 |
+
], command_args)
|
| 217 |
+
mock_build_clang.assert_called_once_with(LLVM_REPO_PATH)
|
| 218 |
+
|
| 219 |
+
def test_test_start_commit_bad_nonzero(self):
|
| 220 |
+
"""Tests test_start_commit works as intended when the test on the first bad
|
| 221 |
+
commit returns nonzero."""
|
| 222 |
+
self._test_test_start_commit_expected('bad', self.bad_commit, 1) # pylint: disable=no-value-for-parameter
|
| 223 |
+
|
| 224 |
+
@mock.patch('bisect_clang.GitRepo.test_start_commit')
|
| 225 |
+
def test_bisect_start(self, mock_test_start_commit):
|
| 226 |
+
"""Tests bisect_start works as intended."""
|
| 227 |
+
with mock.patch('subprocess.Popen', create_mock_popen()) as mock_popen:
|
| 228 |
+
self.git.bisect_start(self.good_commit, self.bad_commit,
|
| 229 |
+
self.test_command)
|
| 230 |
+
self.assertEqual(get_git_command('bisect', 'start'),
|
| 231 |
+
mock_popen.commands[0])
|
| 232 |
+
mock_test_start_commit.assert_has_calls([
|
| 233 |
+
mock.call('bad_commit', 'bad', 'testcommand'),
|
| 234 |
+
mock.call('good_commit', 'good', 'testcommand')
|
| 235 |
+
])
|
| 236 |
+
|
| 237 |
+
def test_do_bisect_command(self):
|
| 238 |
+
"""Test do_bisect_command executes a git bisect subcommand as intended."""
|
| 239 |
+
subcommand = 'subcommand'
|
| 240 |
+
with mock.patch('subprocess.Popen', create_mock_popen()) as mock_popen:
|
| 241 |
+
self.git.do_bisect_command(subcommand)
|
| 242 |
+
self.assertEqual([get_git_command('bisect', subcommand)],
|
| 243 |
+
mock_popen.commands)
|
| 244 |
+
|
| 245 |
+
@mock.patch('bisect_clang.build_clang')
|
| 246 |
+
def _test_test_commit(self, label, output, returncode, mock_build_clang):
|
| 247 |
+
"""Test test_commit works as intended."""
|
| 248 |
+
command_args = []
|
| 249 |
+
|
| 250 |
+
def mock_execute_impl(command, *args, **kwargs): # pylint: disable=unused-argument
|
| 251 |
+
command_args.append(command)
|
| 252 |
+
if command == self.test_command:
|
| 253 |
+
return returncode, output, ''
|
| 254 |
+
return 0, output, ''
|
| 255 |
+
|
| 256 |
+
with mock.patch('bisect_clang.execute', mock_execute_impl):
|
| 257 |
+
result = self.git.test_commit(self.test_command)
|
| 258 |
+
self.assertEqual([self.test_command,
|
| 259 |
+
get_git_command('bisect', label)], command_args)
|
| 260 |
+
mock_build_clang.assert_called_once_with(LLVM_REPO_PATH)
|
| 261 |
+
return result
|
| 262 |
+
|
| 263 |
+
def test_test_commit_good(self):
|
| 264 |
+
"""Test test_commit labels a good commit as good."""
|
| 265 |
+
self.assertIsNone(self._test_test_commit('good', '', 0)) # pylint: disable=no-value-for-parameter
|
| 266 |
+
|
| 267 |
+
def test_test_commit_bad(self):
|
| 268 |
+
"""Test test_commit labels a bad commit as bad."""
|
| 269 |
+
self.assertIsNone(self._test_test_commit('bad', '', 1)) # pylint: disable=no-value-for-parameter
|
| 270 |
+
|
| 271 |
+
def test_test_commit_culprit(self):
|
| 272 |
+
"""Test test_commit returns the culprit"""
|
| 273 |
+
test_data = read_test_data('culprit-commit.txt')
|
| 274 |
+
self.assertEqual('ac9ee01fcbfac745aaedca0393a8e1c8a33acd8d',
|
| 275 |
+
self._test_test_commit('good', test_data, 0)) # pylint: disable=no-value-for-parameter
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
class GetTargetArchToBuildTest(unittest.TestCase):
|
| 279 |
+
"""Tests for get_target_arch_to_build."""
|
| 280 |
+
|
| 281 |
+
def test_unrecognized(self):
|
| 282 |
+
"""Test that an unrecognized architecture raises an exception."""
|
| 283 |
+
with mock.patch('bisect_clang.execute') as mock_execute:
|
| 284 |
+
mock_execute.return_value = (None, 'mips', None)
|
| 285 |
+
with self.assertRaises(Exception):
|
| 286 |
+
bisect_clang.get_clang_target_arch()
|
| 287 |
+
|
| 288 |
+
def test_recognized(self):
|
| 289 |
+
"""Test that a recognized architecture returns the expected value."""
|
| 290 |
+
arch_pairs = {'x86_64': 'X86', 'aarch64': 'AArch64'}
|
| 291 |
+
for uname_result, clang_target in arch_pairs.items():
|
| 292 |
+
with mock.patch('bisect_clang.execute') as mock_execute:
|
| 293 |
+
mock_execute.return_value = (None, uname_result, None)
|
| 294 |
+
self.assertEqual(clang_target, bisect_clang.get_clang_target_arch())
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_fuzztests.sh
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
set -x
|
| 19 |
+
|
| 20 |
+
# In order to identify fuzztest test case "bazel query" is used to search
|
| 21 |
+
# the project. A search of the entire project is done with a default "...",
|
| 22 |
+
# however, some projects may fail to, or have very long processing time, if
|
| 23 |
+
# searching the entire project. Additionally, it may include fuzzers in
|
| 24 |
+
# dependencies, which should not be build as part of a given project.
|
| 25 |
+
# Tensorflow is an example project that will fail when the entire project is
|
| 26 |
+
# queried. FUZZTEST_TARGET_FOLDER makes it posible to specify the folder
|
| 27 |
+
# where fuzztest fuzzers should be search for. FUZZTEST_TARGET_FOLDER is passed
|
| 28 |
+
# to "bazel query" below.
|
| 29 |
+
if [[ ${FUZZTEST_TARGET_FOLDER:-"unset"} == "unset" ]];
|
| 30 |
+
then
|
| 31 |
+
export TARGET_FOLDER="..."
|
| 32 |
+
else
|
| 33 |
+
TARGET_FOLDER=${FUZZTEST_TARGET_FOLDER}
|
| 34 |
+
fi
|
| 35 |
+
|
| 36 |
+
BUILD_ARGS="--config=oss-fuzz --subcommands"
|
| 37 |
+
if [[ ${FUZZTEST_EXTRA_ARGS:-"unset"} != "unset" ]];
|
| 38 |
+
then
|
| 39 |
+
BUILD_ARGS="$BUILD_ARGS ${FUZZTEST_EXTRA_ARGS}"
|
| 40 |
+
fi
|
| 41 |
+
|
| 42 |
+
# Trigger setup_configs rule of fuzztest as it generates the necessary
|
| 43 |
+
# configuration file based on OSS-Fuzz environment variables.
|
| 44 |
+
bazel run @com_google_fuzztest//bazel:setup_configs >> /etc/bazel.bazelrc
|
| 45 |
+
|
| 46 |
+
# Bazel target names of the fuzz binaries.
|
| 47 |
+
FUZZ_TEST_BINARIES=$(bazel query "kind(\"cc_test\", rdeps(${TARGET_FOLDER}, @com_google_fuzztest//fuzztest:fuzztest_gtest_main))")
|
| 48 |
+
|
| 49 |
+
# Bazel output paths of the fuzz binaries.
|
| 50 |
+
FUZZ_TEST_BINARIES_OUT_PATHS=$(bazel cquery "kind(\"cc_test\", rdeps(${TARGET_FOLDER}, @com_google_fuzztest//fuzztest:fuzztest_gtest_main))" --output=files)
|
| 51 |
+
|
| 52 |
+
# Build the project and fuzz binaries
|
| 53 |
+
# Expose `FUZZTEST_EXTRA_TARGETS` environment variable, in the event a project
|
| 54 |
+
# includes non-FuzzTest fuzzers then this can be used to compile these in the
|
| 55 |
+
# same `bazel build` command as when building the FuzzTest fuzzers.
|
| 56 |
+
# This is to avoid having to call `bazel build` twice.
|
| 57 |
+
bazel build $BUILD_ARGS -- ${FUZZ_TEST_BINARIES[*]} ${FUZZTEST_EXTRA_TARGETS:-}
|
| 58 |
+
|
| 59 |
+
# Iterate the fuzz binaries and list each fuzz entrypoint in the binary. For
|
| 60 |
+
# each entrypoint create a wrapper script that calls into the binaries the
|
| 61 |
+
# given entrypoint as argument.
|
| 62 |
+
# The scripts will be named:
|
| 63 |
+
# {binary_name}@{fuzztest_entrypoint}
|
| 64 |
+
for fuzz_main_file in $FUZZ_TEST_BINARIES_OUT_PATHS; do
|
| 65 |
+
FUZZ_TESTS=$($fuzz_main_file --list_fuzz_tests)
|
| 66 |
+
cp ${fuzz_main_file} $OUT/
|
| 67 |
+
fuzz_basename=$(basename $fuzz_main_file)
|
| 68 |
+
chmod -x $OUT/$fuzz_basename
|
| 69 |
+
for fuzz_entrypoint in $FUZZ_TESTS; do
|
| 70 |
+
TARGET_FUZZER="${fuzz_basename}@$fuzz_entrypoint"
|
| 71 |
+
|
| 72 |
+
# Write executer script
|
| 73 |
+
echo "#!/bin/sh
|
| 74 |
+
# LLVMFuzzerTestOneInput for fuzzer detection.
|
| 75 |
+
this_dir=\$(dirname \"\$0\")
|
| 76 |
+
chmod +x \$this_dir/$fuzz_basename
|
| 77 |
+
\$this_dir/$fuzz_basename --fuzz=$fuzz_entrypoint -- \$@" > $OUT/$TARGET_FUZZER
|
| 78 |
+
chmod +x $OUT/$TARGET_FUZZER
|
| 79 |
+
done
|
| 80 |
+
done
|
| 81 |
+
|
| 82 |
+
# Synchronise coverage directory to bazel output artifacts. This is a
|
| 83 |
+
# best-effort basis in that it will include source code in common
|
| 84 |
+
# bazel output folders.
|
| 85 |
+
# For projects that store results in non-standard folders or want to
|
| 86 |
+
# manage what code to include in the coverage report more specifically,
|
| 87 |
+
# the FUZZTEST_DO_SYNC environment variable is made available. Projects
|
| 88 |
+
# can then implement a custom way of synchronising source code with the
|
| 89 |
+
# coverage build. Set FUZZTEST_DO_SYNC to something other than "yes" and
|
| 90 |
+
# no effort will be made to automatically synchronise the source code with
|
| 91 |
+
# the code coverage visualisation utility.
|
| 92 |
+
if [[ "$SANITIZER" = "coverage" && ${FUZZTEST_DO_SYNC:-"yes"} == "yes" ]]
|
| 93 |
+
then
|
| 94 |
+
# Synchronize bazel source files to coverage collection.
|
| 95 |
+
declare -r REMAP_PATH="${OUT}/proc/self/cwd"
|
| 96 |
+
mkdir -p "${REMAP_PATH}"
|
| 97 |
+
|
| 98 |
+
# Synchronize the folder bazel-BAZEL_OUT_PROJECT.
|
| 99 |
+
declare -r RSYNC_FILTER_ARGS=("--include" "*.h" "--include" "*.cc" "--include" \
|
| 100 |
+
"*.hpp" "--include" "*.cpp" "--include" "*.c" "--include" "*/" "--include" "*.inc" \
|
| 101 |
+
"--exclude" "*")
|
| 102 |
+
|
| 103 |
+
project_folders="$(find . -name 'bazel-*' -type l -printf '%P\n' | \
|
| 104 |
+
grep -v -x -F \
|
| 105 |
+
-e 'bazel-bin' \
|
| 106 |
+
-e 'bazel-testlogs')"
|
| 107 |
+
for link in $project_folders; do
|
| 108 |
+
if [[ -d "${PWD}"/$link/external ]]
|
| 109 |
+
then
|
| 110 |
+
rsync -avLk "${RSYNC_FILTER_ARGS[@]}" "${PWD}"/$link/external "${REMAP_PATH}"
|
| 111 |
+
fi
|
| 112 |
+
# k8-opt is a common path for storing bazel output artifacts, e.g. bazel-out/k8-opt.
|
| 113 |
+
# It's the output folder for default amd-64 builds, but projects may specify custom
|
| 114 |
+
# platform output directories, see: https://github.com/bazelbuild/bazel/issues/13818
|
| 115 |
+
# We support the default at the moment, and if a project needs custom synchronizing of
|
| 116 |
+
# output artifacts and code coverage we currently recommend using FUZZTEST_DO_SYNC.
|
| 117 |
+
if [[ -d "${PWD}"/$link/k8-opt ]]
|
| 118 |
+
then
|
| 119 |
+
rsync -avLk "${RSYNC_FILTER_ARGS[@]}" "${PWD}"/$link/k8-opt "${REMAP_PATH}"/$link
|
| 120 |
+
fi
|
| 121 |
+
done
|
| 122 |
+
|
| 123 |
+
# Delete symlinks and sync the current folder.
|
| 124 |
+
find . -type l -ls -delete
|
| 125 |
+
rsync -av ${PWD}/ "${REMAP_PATH}"
|
| 126 |
+
fi
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_go_fuzzer
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2020 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
path=$1
|
| 19 |
+
function=$2
|
| 20 |
+
fuzzer=$3
|
| 21 |
+
tags="-tags gofuzz"
|
| 22 |
+
if [[ $# -eq 4 ]]; then
|
| 23 |
+
tags="-tags $4"
|
| 24 |
+
fi
|
| 25 |
+
|
| 26 |
+
# makes directory change temporary
|
| 27 |
+
(
|
| 28 |
+
cd $GOPATH/src/$path || true
|
| 29 |
+
# in the case we are in the right directory, with go.mod but no go.sum
|
| 30 |
+
go mod tidy || true
|
| 31 |
+
# project was downloaded with go get if go list fails
|
| 32 |
+
go list $tags $path || { cd $GOPATH/pkg/mod/ && cd `echo $path | cut -d/ -f1-3 | awk '{print $1"@*"}'`; } || cd -
|
| 33 |
+
# project does not have go.mod if go list fails again
|
| 34 |
+
go list $tags $path || { go mod init $path && go mod tidy ;}
|
| 35 |
+
|
| 36 |
+
if [[ $SANITIZER = *coverage* ]]; then
|
| 37 |
+
fuzzed_package=`go list $tags -f '{{.Name}}' $path`
|
| 38 |
+
abspath=`go list $tags -f {{.Dir}} $path`
|
| 39 |
+
cd $abspath
|
| 40 |
+
cp $GOPATH/ossfuzz_coverage_runner.go ./"${function,,}"_test.go
|
| 41 |
+
sed -i -e 's/FuzzFunction/'$function'/' ./"${function,,}"_test.go
|
| 42 |
+
sed -i -e 's/mypackagebeingfuzzed/'$fuzzed_package'/' ./"${function,,}"_test.go
|
| 43 |
+
sed -i -e 's/TestFuzzCorpus/Test'$function'Corpus/' ./"${function,,}"_test.go
|
| 44 |
+
|
| 45 |
+
# The repo is the module path/name, which is already created above in case it doesn't exist,
|
| 46 |
+
# but not always the same as the module path. This is necessary to handle SIV properly.
|
| 47 |
+
fuzzed_repo=$(go list $tags -f {{.Module}} "$path")
|
| 48 |
+
abspath_repo=`go list -m $tags -f {{.Dir}} $fuzzed_repo || go list $tags -f {{.Dir}} $fuzzed_repo`
|
| 49 |
+
# give equivalence to absolute paths in another file, as go test -cover uses golangish pkg.Dir
|
| 50 |
+
echo "s=$fuzzed_repo"="$abspath_repo"= > $OUT/$fuzzer.gocovpath
|
| 51 |
+
# Additional packages for which to get coverage.
|
| 52 |
+
pkgaddcov=""
|
| 53 |
+
# to prevent bash from failing about unbound variable
|
| 54 |
+
GO_COV_ADD_PKG_SET=${GO_COV_ADD_PKG:-}
|
| 55 |
+
if [[ -n "${GO_COV_ADD_PKG_SET}" ]]; then
|
| 56 |
+
pkgaddcov=","$GO_COV_ADD_PKG
|
| 57 |
+
abspath_repo=`go list -m $tags -f {{.Dir}} $GO_COV_ADD_PKG || go list $tags -f {{.Dir}} $GO_COV_ADD_PKG`
|
| 58 |
+
echo "s=^$GO_COV_ADD_PKG"="$abspath_repo"= >> $OUT/$fuzzer.gocovpath
|
| 59 |
+
fi
|
| 60 |
+
go test -run Test${function}Corpus -v $tags -coverpkg $fuzzed_repo/...$pkgaddcov -c -o $OUT/$fuzzer $path
|
| 61 |
+
else
|
| 62 |
+
# Compile and instrument all Go files relevant to this fuzz target.
|
| 63 |
+
echo "Running go-fuzz $tags -func $function -o $fuzzer.a $path"
|
| 64 |
+
go-fuzz $tags -func $function -o $fuzzer.a $path
|
| 65 |
+
|
| 66 |
+
# Link Go code ($fuzzer.a) with fuzzing engine to produce fuzz target binary.
|
| 67 |
+
$CXX $CXXFLAGS $LIB_FUZZING_ENGINE $fuzzer.a -o $OUT/$fuzzer
|
| 68 |
+
fi
|
| 69 |
+
)
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_native_go_fuzzer
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
function build_native_go_fuzzer() {
|
| 19 |
+
fuzzer=$1
|
| 20 |
+
function=$2
|
| 21 |
+
path=$3
|
| 22 |
+
tags="-tags gofuzz"
|
| 23 |
+
|
| 24 |
+
if [[ $SANITIZER == *coverage* ]]; then
|
| 25 |
+
current_dir=$(pwd)
|
| 26 |
+
mkdir $OUT/rawfuzzers || true
|
| 27 |
+
cd $abs_file_dir
|
| 28 |
+
go test $tags -c -run $fuzzer -o $OUT/$fuzzer -cover
|
| 29 |
+
cp "${fuzzer_filename}" "${OUT}/rawfuzzers/${fuzzer}"
|
| 30 |
+
|
| 31 |
+
fuzzed_repo=$(go list $tags -f {{.Module}} "$path")
|
| 32 |
+
abspath_repo=`go list -m $tags -f {{.Dir}} $fuzzed_repo || go list $tags -f {{.Dir}} $fuzzed_repo`
|
| 33 |
+
# give equivalence to absolute paths in another file, as go test -cover uses golangish pkg.Dir
|
| 34 |
+
echo "s=$fuzzed_repo"="$abspath_repo"= > $OUT/$fuzzer.gocovpath
|
| 35 |
+
|
| 36 |
+
cd $current_dir
|
| 37 |
+
else
|
| 38 |
+
go-118-fuzz-build $tags -o $fuzzer.a -func $function $abs_file_dir
|
| 39 |
+
$CXX $CXXFLAGS $LIB_FUZZING_ENGINE $fuzzer.a -o $OUT/$fuzzer
|
| 40 |
+
fi
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
path=$1
|
| 44 |
+
function=$2
|
| 45 |
+
fuzzer=$3
|
| 46 |
+
tags="-tags gofuzz"
|
| 47 |
+
|
| 48 |
+
# Get absolute path.
|
| 49 |
+
abs_file_dir=$(go list $tags -f {{.Dir}} $path)
|
| 50 |
+
|
| 51 |
+
# TODO(adamkorcz): Get rid of "-r" flag here.
|
| 52 |
+
fuzzer_filename=$(grep -r -l --include='*.go' -s "$function" "${abs_file_dir}")
|
| 53 |
+
|
| 54 |
+
# Test if file contains a line with "func $function" and "testing.F".
|
| 55 |
+
if [ $(grep -r "func $function" $fuzzer_filename | grep "testing.F" | wc -l) -eq 1 ]
|
| 56 |
+
then
|
| 57 |
+
build_native_go_fuzzer $fuzzer $function $abs_file_dir
|
| 58 |
+
else
|
| 59 |
+
echo "Could not find the function: func ${function}(f *testing.F)"
|
| 60 |
+
fi
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/debug_afl
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Source this file for afl++ debug sessions.
|
| 19 |
+
apt-get update
|
| 20 |
+
apt-get install -y strace gdb vim joe psmisc
|
| 21 |
+
|
| 22 |
+
pushd $SRC/aflplusplus > /dev/null
|
| 23 |
+
git checkout dev
|
| 24 |
+
git pull
|
| 25 |
+
test -n "$1" && { git checkout "$1" ; git pull ; }
|
| 26 |
+
CFLAGS_SAVE="$CFLAGS"
|
| 27 |
+
CXXFLAGS_SAVE="$CXXFLAGS"
|
| 28 |
+
unset CFLAGS
|
| 29 |
+
unset CXXFLAGS
|
| 30 |
+
make
|
| 31 |
+
export CFLAGS="$CFLAGS_SAVE"
|
| 32 |
+
export CXXFLAGS="$CXXFLAGS_SAVE"
|
| 33 |
+
popd > /dev/null
|
| 34 |
+
|
| 35 |
+
export ASAN_OPTIONS="detect_leaks=0:symbolize=0:detect_odr_violation=0:abort_on_error=1"
|
| 36 |
+
export AFL_LLVM_LAF_ALL=1
|
| 37 |
+
export AFL_LLVM_CMPLOG=1
|
| 38 |
+
touch "$OUT/afl_cmplog.txt"
|
| 39 |
+
export AFL_LLVM_DICT2FILE=$OUT/afl++.dict
|
| 40 |
+
ulimit -c unlimited
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/detect_repo_test.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Test the functionality of the detect_repo module.
|
| 15 |
+
This will consist of the following functional test:
|
| 16 |
+
1. Determine if an OSS-Fuzz projects main repo can be detected from example
|
| 17 |
+
commits.
|
| 18 |
+
2. Determine if an OSS-Fuzz project main repo can be detected from a
|
| 19 |
+
repo name.
|
| 20 |
+
"""
|
| 21 |
+
import os
|
| 22 |
+
import re
|
| 23 |
+
import sys
|
| 24 |
+
import tempfile
|
| 25 |
+
import unittest
|
| 26 |
+
from unittest import mock
|
| 27 |
+
|
| 28 |
+
import detect_repo
|
| 29 |
+
|
| 30 |
+
# Appending to path for access to repo_manager module.
|
| 31 |
+
# pylint: disable=wrong-import-position
|
| 32 |
+
sys.path.append(
|
| 33 |
+
os.path.dirname(os.path.dirname(os.path.dirname(
|
| 34 |
+
os.path.abspath(__file__)))))
|
| 35 |
+
import repo_manager
|
| 36 |
+
import test_repos
|
| 37 |
+
# pylint: enable=wrong-import-position
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class TestCheckForRepoName(unittest.TestCase):
|
| 41 |
+
"""Tests for check_for_repo_name."""
|
| 42 |
+
|
| 43 |
+
@mock.patch('os.path.exists', return_value=True)
|
| 44 |
+
@mock.patch('detect_repo.execute',
|
| 45 |
+
return_value=('https://github.com/google/syzkaller/', None))
|
| 46 |
+
def test_go_get_style_url(self, _, __):
|
| 47 |
+
"""Tests that check_for_repo_name works on repos that were downloaded using
|
| 48 |
+
go get."""
|
| 49 |
+
self.assertTrue(detect_repo.check_for_repo_name('fake-path', 'syzkaller'))
|
| 50 |
+
|
| 51 |
+
@mock.patch('os.path.exists', return_value=True)
|
| 52 |
+
@mock.patch('detect_repo.execute',
|
| 53 |
+
return_value=('https://github.com/google/syzkaller', None))
|
| 54 |
+
def test_missing_git_and_slash_url(self, _, __):
|
| 55 |
+
"""Tests that check_for_repo_name works on repos who's URLs do not end in
|
| 56 |
+
".git" or "/"."""
|
| 57 |
+
self.assertTrue(detect_repo.check_for_repo_name('fake-path', 'syzkaller'))
|
| 58 |
+
|
| 59 |
+
@mock.patch('os.path.exists', return_value=True)
|
| 60 |
+
@mock.patch('detect_repo.execute',
|
| 61 |
+
return_value=('https://github.com/google/syzkaller.git', None))
|
| 62 |
+
def test_normal_style_repo_url(self, _, __):
|
| 63 |
+
"""Tests that check_for_repo_name works on normally cloned repos."""
|
| 64 |
+
self.assertTrue(detect_repo.check_for_repo_name('fake-path', 'syzkaller'))
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
|
| 68 |
+
'INTEGRATION_TESTS=1 not set')
|
| 69 |
+
class DetectRepoIntegrationTest(unittest.TestCase):
|
| 70 |
+
"""Class to test the functionality of the detect_repo module."""
|
| 71 |
+
|
| 72 |
+
def test_infer_main_repo_from_commit(self):
|
| 73 |
+
"""Tests that the main repo can be inferred based on an example commit."""
|
| 74 |
+
|
| 75 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 76 |
+
# Construct example repo's to check for commits.
|
| 77 |
+
for test_repo in test_repos.TEST_REPOS:
|
| 78 |
+
repo_manager.clone_repo_and_get_manager(test_repo.git_url, tmp_dir)
|
| 79 |
+
self.check_with_repo(test_repo.git_url,
|
| 80 |
+
test_repo.git_repo_name,
|
| 81 |
+
tmp_dir,
|
| 82 |
+
commit=test_repo.old_commit)
|
| 83 |
+
|
| 84 |
+
def test_infer_main_repo_from_name(self):
|
| 85 |
+
"""Tests that the main project repo can be inferred from a repo name."""
|
| 86 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 87 |
+
for test_repo in test_repos.TEST_REPOS:
|
| 88 |
+
repo_manager.clone_repo_and_get_manager(test_repo.git_url, tmp_dir)
|
| 89 |
+
self.check_with_repo(test_repo.git_url, test_repo.git_repo_name,
|
| 90 |
+
tmp_dir)
|
| 91 |
+
|
| 92 |
+
def check_with_repo(self, repo_origin, repo_name, tmp_dir, commit=None):
|
| 93 |
+
"""Checks the detect repo's main method for a specific set of inputs.
|
| 94 |
+
|
| 95 |
+
Args:
|
| 96 |
+
repo_origin: URL of the git repo.
|
| 97 |
+
repo_name: The name of the directory it is cloned to.
|
| 98 |
+
tmp_dir: The location of the directory of git repos to be searched.
|
| 99 |
+
commit: The commit that should be used to look up the repo.
|
| 100 |
+
"""
|
| 101 |
+
command = ['python3', 'detect_repo.py', '--src_dir', tmp_dir]
|
| 102 |
+
|
| 103 |
+
if commit:
|
| 104 |
+
command += ['--example_commit', commit]
|
| 105 |
+
else:
|
| 106 |
+
command += ['--repo_name', repo_name]
|
| 107 |
+
|
| 108 |
+
out, _ = detect_repo.execute(command,
|
| 109 |
+
location=os.path.dirname(
|
| 110 |
+
os.path.realpath(__file__)))
|
| 111 |
+
match = re.search(r'\bDetected repo: ([^ ]+) ([^ ]+)', out.rstrip())
|
| 112 |
+
if match and match.group(1) and match.group(2):
|
| 113 |
+
self.assertEqual(match.group(1), repo_origin)
|
| 114 |
+
self.assertEqual(match.group(2), os.path.join(tmp_dir, repo_name))
|
| 115 |
+
else:
|
| 116 |
+
self.assertIsNone(repo_origin)
|
| 117 |
+
self.assertIsNone(repo_name)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
if __name__ == '__main__':
|
| 121 |
+
unittest.main()
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_deps.sh
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Install base-builder's dependencies in a architecture-aware way.
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
case $(uname -m) in
|
| 22 |
+
x86_64)
|
| 23 |
+
dpkg --add-architecture i386
|
| 24 |
+
;;
|
| 25 |
+
esac
|
| 26 |
+
|
| 27 |
+
apt-get update && \
|
| 28 |
+
apt-get install -y \
|
| 29 |
+
binutils-dev \
|
| 30 |
+
build-essential \
|
| 31 |
+
curl \
|
| 32 |
+
wget \
|
| 33 |
+
git \
|
| 34 |
+
jq \
|
| 35 |
+
patchelf \
|
| 36 |
+
rsync \
|
| 37 |
+
subversion \
|
| 38 |
+
zip
|
| 39 |
+
|
| 40 |
+
case $(uname -m) in
|
| 41 |
+
x86_64)
|
| 42 |
+
apt-get install -y libc6-dev-i386
|
| 43 |
+
;;
|
| 44 |
+
esac
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_go.sh
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
cd /tmp
|
| 19 |
+
|
| 20 |
+
wget https://go.dev/dl/go1.23.4.linux-amd64.tar.gz
|
| 21 |
+
mkdir temp-go
|
| 22 |
+
tar -C temp-go/ -xzf go1.23.4.linux-amd64.tar.gz
|
| 23 |
+
|
| 24 |
+
mkdir /root/.go/
|
| 25 |
+
mv temp-go/go/* /root/.go/
|
| 26 |
+
rm -rf temp-go
|
| 27 |
+
|
| 28 |
+
echo 'Set "GOPATH=/root/go"'
|
| 29 |
+
echo 'Set "PATH=$PATH:/root/.go/bin:$GOPATH/bin"'
|
| 30 |
+
|
| 31 |
+
go install github.com/mdempsky/go114-fuzz-build@latest
|
| 32 |
+
ln -s $GOPATH/bin/go114-fuzz-build $GOPATH/bin/go-fuzz
|
| 33 |
+
|
| 34 |
+
# Build signal handler
|
| 35 |
+
if [ -f "$GOPATH/gosigfuzz/gosigfuzz.c" ]; then
|
| 36 |
+
clang -c $GOPATH/gosigfuzz/gosigfuzz.c -o $GOPATH/gosigfuzz/gosigfuzz.o
|
| 37 |
+
fi
|
| 38 |
+
|
| 39 |
+
cd /tmp
|
| 40 |
+
git clone https://github.com/AdamKorcz/go-118-fuzz-build
|
| 41 |
+
cd go-118-fuzz-build
|
| 42 |
+
go build
|
| 43 |
+
mv go-118-fuzz-build $GOPATH/bin/
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_java.sh
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Install OpenJDK 17 and trim its size by removing unused components. This enables using Jazzer's mutation framework.
|
| 19 |
+
cd /tmp
|
| 20 |
+
curl --silent -L -O https://download.java.net/java/GA/jdk17.0.2/dfd4a8d0985749f896bed50d7138ee7f/8/GPL/openjdk-17.0.2_linux-x64_bin.tar.gz && \
|
| 21 |
+
mkdir -p $JAVA_HOME
|
| 22 |
+
tar -xz --strip-components=1 -f openjdk-17.0.2_linux-x64_bin.tar.gz --directory $JAVA_HOME && \
|
| 23 |
+
rm -f openjdk-17.0.2_linux-x64_bin.tar.gz
|
| 24 |
+
rm -rf $JAVA_HOME/jmods $JAVA_HOME/lib/src.zip
|
| 25 |
+
|
| 26 |
+
# Install OpenJDK 15 and trim its size by removing unused components. Some projects only run with Java 15.
|
| 27 |
+
curl --silent -L -O https://download.java.net/java/GA/jdk15.0.2/0d1cfde4252546c6931946de8db48ee2/7/GPL/openjdk-15.0.2_linux-x64_bin.tar.gz && \
|
| 28 |
+
mkdir -p $JAVA_15_HOME
|
| 29 |
+
tar -xz --strip-components=1 -f openjdk-15.0.2_linux-x64_bin.tar.gz --directory $JAVA_15_HOME && \
|
| 30 |
+
rm -f openjdk-15.0.2_linux-x64_bin.tar.gz
|
| 31 |
+
rm -rf $JAVA_15_HOME/jmods $JAVA_15_HOME/lib/src.zip
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_ruby.sh
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
apt update
|
| 19 |
+
apt install -y lsb-release software-properties-common gnupg2 binutils xz-utils libyaml-dev
|
| 20 |
+
gpg2 --keyserver keyserver.ubuntu.com --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB
|
| 21 |
+
curl -sSL https://get.rvm.io | bash
|
| 22 |
+
|
| 23 |
+
. /etc/profile.d/rvm.sh
|
| 24 |
+
|
| 25 |
+
rvm install ruby-3.3.1
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_rust.sh
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
curl https://sh.rustup.rs | sh -s -- -y --default-toolchain=$RUSTUP_TOOLCHAIN --profile=minimal
|
| 19 |
+
cargo install cargo-fuzz --locked && rm -rf /rust/registry
|
| 20 |
+
# Needed to recompile rust std library for MSAN
|
| 21 |
+
rustup component add rust-src
|
| 22 |
+
cp -r /usr/local/lib/x86_64-unknown-linux-gnu/* /usr/local/lib/
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_swift.sh
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
SWIFT_PACKAGES="wget \
|
| 20 |
+
binutils \
|
| 21 |
+
git \
|
| 22 |
+
gnupg2 \
|
| 23 |
+
libc6-dev \
|
| 24 |
+
libcurl4 \
|
| 25 |
+
libedit2 \
|
| 26 |
+
libgcc-9-dev \
|
| 27 |
+
libpython2.7 \
|
| 28 |
+
libsqlite3-0 \
|
| 29 |
+
libstdc++-9-dev \
|
| 30 |
+
libxml2 \
|
| 31 |
+
libz3-dev \
|
| 32 |
+
pkg-config \
|
| 33 |
+
tzdata \
|
| 34 |
+
zlib1g-dev"
|
| 35 |
+
SWIFT_SYMBOLIZER_PACKAGES="build-essential make cmake ninja-build git python3 g++-multilib binutils-dev zlib1g-dev"
|
| 36 |
+
apt-get update && apt install -y $SWIFT_PACKAGES && \
|
| 37 |
+
apt install -y $SWIFT_SYMBOLIZER_PACKAGES --no-install-recommends
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
wget -q https://download.swift.org/swift-5.10.1-release/ubuntu2004/swift-5.10.1-RELEASE/swift-5.10.1-RELEASE-ubuntu20.04.tar.gz
|
| 41 |
+
tar xzf swift-5.10.1-RELEASE-ubuntu20.04.tar.gz
|
| 42 |
+
cp -r swift-5.10.1-RELEASE-ubuntu20.04/usr/* /usr/
|
| 43 |
+
rm -rf swift-5.10.1-RELEASE-ubuntu20.04.tar.gz swift-5.10.1-RELEASE-ubuntu20.04/
|
| 44 |
+
# TODO: Move to a seperate work dir
|
| 45 |
+
git clone https://github.com/llvm/llvm-project.git
|
| 46 |
+
cd llvm-project
|
| 47 |
+
git checkout 63bf228450b8403e0c5e828d276be47ffbcd00d0 # TODO: Keep in sync with base-clang.
|
| 48 |
+
git apply ../llvmsymbol.diff --verbose
|
| 49 |
+
cmake -G "Ninja" \
|
| 50 |
+
-DLIBCXX_ENABLE_SHARED=OFF \
|
| 51 |
+
-DLIBCXX_ENABLE_STATIC_ABI_LIBRARY=ON \
|
| 52 |
+
-DLIBCXXABI_ENABLE_SHARED=OFF \
|
| 53 |
+
-DCMAKE_BUILD_TYPE=Release \
|
| 54 |
+
-DLLVM_TARGETS_TO_BUILD=X86 \
|
| 55 |
+
-DCMAKE_C_COMPILER=clang \
|
| 56 |
+
-DCMAKE_CXX_COMPILER=clang++ \
|
| 57 |
+
-DLLVM_BUILD_TESTS=OFF \
|
| 58 |
+
-DLLVM_INCLUDE_TESTS=OFF llvm
|
| 59 |
+
ninja -j$(nproc) llvm-symbolizer
|
| 60 |
+
cp bin/llvm-symbolizer /usr/local/bin/llvm-symbolizer-swift
|
| 61 |
+
|
| 62 |
+
cd $SRC
|
| 63 |
+
rm -rf llvm-project llvmsymbol.diff
|
| 64 |
+
|
| 65 |
+
# TODO: Cleanup packages
|
| 66 |
+
apt-get remove --purge -y wget zlib1g-dev
|
| 67 |
+
apt-get autoremove -y
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_afl
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
echo "Precompiling AFLplusplus"
|
| 19 |
+
|
| 20 |
+
pushd $SRC/aflplusplus > /dev/null
|
| 21 |
+
make clean
|
| 22 |
+
# Unset CFLAGS and CXXFLAGS while building AFL since we don't want to slow it
|
| 23 |
+
# down with sanitizers.
|
| 24 |
+
SAVE_CXXFLAGS=$CXXFLAGS
|
| 25 |
+
SAVE_CFLAGS=$CFLAGS
|
| 26 |
+
unset CXXFLAGS
|
| 27 |
+
unset CFLAGS
|
| 28 |
+
export AFL_IGNORE_UNKNOWN_ENVS=1
|
| 29 |
+
make clean
|
| 30 |
+
AFL_NO_X86=1 PYTHON_INCLUDE=/ make
|
| 31 |
+
make -C utils/aflpp_driver
|
| 32 |
+
|
| 33 |
+
popd > /dev/null
|
| 34 |
+
|
| 35 |
+
echo "Done."
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_centipede
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
echo -n "Precompiling centipede"
|
| 19 |
+
|
| 20 |
+
# Build Centipede with bazel.
|
| 21 |
+
cd "$SRC/fuzztest/centipede/"
|
| 22 |
+
apt-get update && apt-get install libssl-dev -y
|
| 23 |
+
unset CXXFLAGS CFLAGS
|
| 24 |
+
# We need to use an older version of BAZEL because fuzztest relies on WORKSPACE
|
| 25 |
+
# Ref: https://github.com/google/oss-fuzz/pull/12838#issue-2733821058
|
| 26 |
+
export USE_BAZEL_VERSION=7.4.0
|
| 27 |
+
echo 'build --cxxopt=-stdlib=libc++ --linkopt=-lc++' >> /tmp/centipede.bazelrc
|
| 28 |
+
bazel --bazelrc=/tmp/centipede.bazelrc build -c opt :all
|
| 29 |
+
unset USE_BAZEL_VERSION
|
| 30 |
+
|
| 31 |
+
# Prepare the weak symbols:
|
| 32 |
+
# This is necessary because we compile the target binary and the intermediate
|
| 33 |
+
# auxiliary binaries with the same cflags. The auxiliary binaries do not need
|
| 34 |
+
# data-flow tracing flags, but will still throw errors when they cannot find
|
| 35 |
+
# the corresponding functions.
|
| 36 |
+
# The weak symbols provides fake implementations for intermediate binaries.
|
| 37 |
+
$CXX "$SRC/fuzztest/centipede/weak_sancov_stubs.cc" -c -o "$SRC/fuzztest/centipede/weak.o"
|
| 38 |
+
|
| 39 |
+
echo 'Removing extra stuff leftover to avoid bloating image.'
|
| 40 |
+
|
| 41 |
+
rm -rf /clang-*.tgz /clang
|
| 42 |
+
|
| 43 |
+
BAZEL_BIN_REAL_DIR=$(readlink -f $CENTIPEDE_BIN_DIR)
|
| 44 |
+
rm -rf $CENTIPEDE_BIN_DIR
|
| 45 |
+
mkdir -p $CENTIPEDE_BIN_DIR
|
| 46 |
+
mv $BAZEL_BIN_REAL_DIR/centipede/{centipede,libcentipede_runner.pic.a} $CENTIPEDE_BIN_DIR
|
| 47 |
+
rm -rf /root/.cache
|
| 48 |
+
|
| 49 |
+
echo 'Done.'
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_honggfuzz
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2019 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
echo "Precompiling honggfuzz"
|
| 19 |
+
export BUILD_OSSFUZZ_STATIC=true
|
| 20 |
+
|
| 21 |
+
PACKAGES=(
|
| 22 |
+
libunwind8-dev
|
| 23 |
+
libblocksruntime-dev
|
| 24 |
+
liblzma-dev
|
| 25 |
+
libiberty-dev
|
| 26 |
+
zlib1g-dev
|
| 27 |
+
pkg-config)
|
| 28 |
+
|
| 29 |
+
apt-get update && apt-get install -y ${PACKAGES[@]}
|
| 30 |
+
|
| 31 |
+
pushd $SRC/honggfuzz > /dev/null
|
| 32 |
+
make clean
|
| 33 |
+
# These CFLAGs match honggfuzz's default, with the exception of -mtune to
|
| 34 |
+
# improve portability and `-D_HF_LINUX_NO_BFD` to remove assembly instructions
|
| 35 |
+
# from the filenames.
|
| 36 |
+
CC=clang CFLAGS="-O3 -funroll-loops -D_HF_LINUX_NO_BFD" make
|
| 37 |
+
|
| 38 |
+
# libhfuzz.a will be added by CC/CXX linker directly during linking,
|
| 39 |
+
# but it's defined here to satisfy the build infrastructure
|
| 40 |
+
ar rcs honggfuzz.a libhfuzz/*.o libhfcommon/*.o
|
| 41 |
+
popd > /dev/null
|
| 42 |
+
|
| 43 |
+
apt-get remove -y --purge ${PACKAGES[@]}
|
| 44 |
+
apt-get autoremove -y
|
| 45 |
+
echo "Done."
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/python_coverage_helper.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2022 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Extracts file paths to copy files from pyinstaller-generated executables"""
|
| 15 |
+
import os
|
| 16 |
+
import sys
|
| 17 |
+
import shutil
|
| 18 |
+
import zipfile
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
# Finds all *.toc files in ./workpath and reads these files in order to
|
| 22 |
+
# identify Python files associated with a pyinstaller packaged executable.
|
| 23 |
+
# Copies all of the Python files to a temporary directory (/medio) following
|
| 24 |
+
# the original directory structure.
|
| 25 |
+
def get_all_files_from_toc(toc_file, file_path_set):
|
| 26 |
+
"""
|
| 27 |
+
Extract filepaths from a .toc file and add to file_path_set
|
| 28 |
+
"""
|
| 29 |
+
with open(toc_file, 'rb') as toc_file_fd:
|
| 30 |
+
for line in toc_file_fd:
|
| 31 |
+
try:
|
| 32 |
+
line = line.decode()
|
| 33 |
+
except: # pylint:disable=bare-except
|
| 34 |
+
continue
|
| 35 |
+
if '.py' not in line:
|
| 36 |
+
continue
|
| 37 |
+
|
| 38 |
+
split_line = line.split(' ')
|
| 39 |
+
for word in split_line:
|
| 40 |
+
word = word.replace('\'', '').replace(',', '').replace('\n', '')
|
| 41 |
+
if '.py' not in word:
|
| 42 |
+
continue
|
| 43 |
+
# Check if .egg is in the path and if so we need to split it
|
| 44 |
+
if os.path.isfile(word):
|
| 45 |
+
file_path_set.add(word)
|
| 46 |
+
elif '.egg' in word: # check if this is an egg
|
| 47 |
+
egg_path_split = word.split('.egg')
|
| 48 |
+
if len(egg_path_split) != 2:
|
| 49 |
+
continue
|
| 50 |
+
egg_path = egg_path_split[0] + '.egg'
|
| 51 |
+
if not os.path.isfile(egg_path):
|
| 52 |
+
continue
|
| 53 |
+
|
| 54 |
+
print('Unzipping contents of %s' % egg_path)
|
| 55 |
+
|
| 56 |
+
# We have an egg. This needs to be unzipped and then replaced
|
| 57 |
+
# with the unzipped data.
|
| 58 |
+
tmp_dir_name = 'zipdcontents'
|
| 59 |
+
if os.path.isdir(tmp_dir_name):
|
| 60 |
+
shutil.rmtree(tmp_dir_name)
|
| 61 |
+
|
| 62 |
+
# unzip egg and replace path with unzipped content
|
| 63 |
+
with zipfile.ZipFile(egg_path, 'r') as zip_f:
|
| 64 |
+
zip_f.extractall(tmp_dir_name)
|
| 65 |
+
os.remove(egg_path)
|
| 66 |
+
shutil.copytree(tmp_dir_name, egg_path)
|
| 67 |
+
|
| 68 |
+
# Now the lines should be accessible, so check again
|
| 69 |
+
if os.path.isfile(word):
|
| 70 |
+
file_path_set.add(word)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def create_file_structure_from_tocs(work_path, out_path):
|
| 74 |
+
"""
|
| 75 |
+
Extract the Python files that are added as paths in the output of
|
| 76 |
+
a pyinstaller operation. The files are determined by reading through
|
| 77 |
+
all of the *.toc files in the workpath of pyinstaller.
|
| 78 |
+
|
| 79 |
+
The files will be copied into the out_path using a similar file path
|
| 80 |
+
as they originally are. If any archive (.egg) files are present in the
|
| 81 |
+
.toc files, then unzip the archives and substitute the archive for the
|
| 82 |
+
unzipped content, i.e. we will extract the archives and collect the source
|
| 83 |
+
files.
|
| 84 |
+
"""
|
| 85 |
+
print('Extracts files from the pyinstaller workpath')
|
| 86 |
+
file_path_set = set()
|
| 87 |
+
for path1 in os.listdir(work_path):
|
| 88 |
+
full_path = os.path.join(work_path, path1)
|
| 89 |
+
if not os.path.isdir(full_path):
|
| 90 |
+
continue
|
| 91 |
+
|
| 92 |
+
# We have a directory
|
| 93 |
+
for path2 in os.listdir(full_path):
|
| 94 |
+
if not '.toc' in path2:
|
| 95 |
+
continue
|
| 96 |
+
full_toc_file = os.path.join(full_path, path2)
|
| 97 |
+
get_all_files_from_toc(full_toc_file, file_path_set)
|
| 98 |
+
|
| 99 |
+
for file_path in file_path_set:
|
| 100 |
+
relative_src = file_path[1:] if file_path[0] == '/' else file_path
|
| 101 |
+
dst_path = os.path.join(out_path, relative_src)
|
| 102 |
+
os.makedirs(os.path.dirname(dst_path), exist_ok=True)
|
| 103 |
+
shutil.copy(file_path, dst_path)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def main():
|
| 107 |
+
"""
|
| 108 |
+
Main handler.
|
| 109 |
+
"""
|
| 110 |
+
if len(sys.argv) != 3:
|
| 111 |
+
print('Use: python3 python_coverage_helper.py pyinstaller_workpath '
|
| 112 |
+
'destination_for_output')
|
| 113 |
+
sys.exit(1)
|
| 114 |
+
work_path = sys.argv[1]
|
| 115 |
+
out_path = sys.argv[2]
|
| 116 |
+
create_file_structure_from_tocs(work_path, out_path)
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
if __name__ == '__main__':
|
| 120 |
+
main()
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/srcmap
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2016 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Deterimine srcmap of checked out source code
|
| 19 |
+
|
| 20 |
+
SRCMAP=$(tempfile)
|
| 21 |
+
echo "{}" > $SRCMAP
|
| 22 |
+
|
| 23 |
+
# $1 - json file, $2 - jq program
|
| 24 |
+
function jq_inplace() {
|
| 25 |
+
F=$(tempfile) && cat $1 | jq "$2" > $F && mv $F $1
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
PATHS_TO_SCAN="$SRC"
|
| 29 |
+
|
| 30 |
+
if [[ $FUZZING_LANGUAGE == "go" ]]; then
|
| 31 |
+
PATHS_TO_SCAN="$PATHS_TO_SCAN $GOPATH"
|
| 32 |
+
fi
|
| 33 |
+
|
| 34 |
+
# Git
|
| 35 |
+
for DOT_GIT_DIR in $(find $PATHS_TO_SCAN -name ".git" -type d); do
|
| 36 |
+
GIT_DIR=$(dirname $DOT_GIT_DIR)
|
| 37 |
+
cd $GIT_DIR
|
| 38 |
+
GIT_URL=$(git config --get remote.origin.url)
|
| 39 |
+
GIT_REV=$(git rev-parse HEAD)
|
| 40 |
+
jq_inplace $SRCMAP ".\"$GIT_DIR\" = { type: \"git\", url: \"$GIT_URL\", rev: \"$GIT_REV\" }"
|
| 41 |
+
done
|
| 42 |
+
|
| 43 |
+
# Subversion
|
| 44 |
+
for DOT_SVN_DIR in $(find $PATHS_TO_SCAN -name ".svn" -type d); do
|
| 45 |
+
SVN_DIR=$(dirname $DOT_SVN_DIR)
|
| 46 |
+
cd $SVN_DIR
|
| 47 |
+
SVN_URL=$(svn info | grep "^URL:" | sed 's/URL: //g')
|
| 48 |
+
SVN_REV=$(svn info -r HEAD | grep "^Revision:" | sed 's/Revision: //g')
|
| 49 |
+
jq_inplace $SRCMAP ".\"$SVN_DIR\" = { type: \"svn\", url: \"$SVN_URL\", rev: \"$SVN_REV\" }"
|
| 50 |
+
done
|
| 51 |
+
|
| 52 |
+
# Mercurial
|
| 53 |
+
for DOT_HG_DIR in $(find $PATHS_TO_SCAN -name ".hg" -type d); do
|
| 54 |
+
HG_DIR=$(dirname $DOT_HG_DIR)
|
| 55 |
+
cd $HG_DIR
|
| 56 |
+
HG_URL=$(hg paths default)
|
| 57 |
+
HG_REV=$(hg --debug id -r. -i)
|
| 58 |
+
jq_inplace $SRCMAP ".\"$HG_DIR\" = { type: \"hg\", url: \"$HG_URL\", rev: \"$HG_REV\" }"
|
| 59 |
+
done
|
| 60 |
+
|
| 61 |
+
if [ "${OSSFUZZ_REVISION-}" != "" ]; then
|
| 62 |
+
jq_inplace $SRCMAP ".\"/src\" = { type: \"git\", url: \"https://github.com/google/oss-fuzz.git\", rev: \"$OSSFUZZ_REVISION\" }"
|
| 63 |
+
fi
|
| 64 |
+
|
| 65 |
+
cat $SRCMAP
|
| 66 |
+
rm $SRCMAP
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/write_labels.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
"""Script for writing from project.yaml to .labels file."""
|
| 16 |
+
|
| 17 |
+
import os
|
| 18 |
+
import json
|
| 19 |
+
import sys
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def main():
|
| 23 |
+
"""Writes labels."""
|
| 24 |
+
if len(sys.argv) != 3:
|
| 25 |
+
print('Usage: write_labels.py labels_json out_dir', file=sys.stderr)
|
| 26 |
+
sys.exit(1)
|
| 27 |
+
|
| 28 |
+
labels_by_target = json.loads(sys.argv[1])
|
| 29 |
+
out = sys.argv[2]
|
| 30 |
+
|
| 31 |
+
for target_name, labels in labels_by_target.items():
|
| 32 |
+
# Skip over wildcard value applying to all fuzz targets
|
| 33 |
+
if target_name == '*':
|
| 34 |
+
continue
|
| 35 |
+
with open(os.path.join(out, target_name + '.labels'), 'w') as file_handle:
|
| 36 |
+
file_handle.write('\n'.join(labels))
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
if __name__ == '__main__':
|
| 40 |
+
main()
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-image/Dockerfile
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2016 Google Inc.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
|
| 17 |
+
# Base image for all other images.
|
| 18 |
+
|
| 19 |
+
ARG parent_image=ubuntu:20.04@sha256:4a45212e9518f35983a976eead0de5eecc555a2f047134e9dd2cfc589076a00d
|
| 20 |
+
|
| 21 |
+
FROM $parent_image
|
| 22 |
+
|
| 23 |
+
ENV DEBIAN_FRONTEND noninteractive
|
| 24 |
+
# Install tzadata to match ClusterFuzz
|
| 25 |
+
# (https://github.com/google/oss-fuzz/issues/9280).
|
| 26 |
+
|
| 27 |
+
# Use Azure mirrors for consistent apt repository access.
|
| 28 |
+
RUN cp /etc/apt/sources.list /etc/apt/sources.list.backup && \
|
| 29 |
+
sed -i 's|http://archive.ubuntu.com/ubuntu/|http://azure.archive.ubuntu.com/ubuntu/|g' /etc/apt/sources.list && \
|
| 30 |
+
sed -i 's|http://security.ubuntu.com/ubuntu/|http://azure.archive.ubuntu.com/ubuntu/|g' /etc/apt/sources.list
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
RUN apt-get update && \
|
| 34 |
+
apt-get upgrade -y && \
|
| 35 |
+
apt-get install -y libc6-dev binutils libgcc-9-dev tzdata locales locales-all && \
|
| 36 |
+
apt-get autoremove -y
|
| 37 |
+
|
| 38 |
+
ENV OUT=/out
|
| 39 |
+
ENV SRC=/src
|
| 40 |
+
ENV WORK=/work
|
| 41 |
+
ENV PATH="$PATH:/out"
|
| 42 |
+
ENV HWASAN_OPTIONS=random_tags=0
|
| 43 |
+
#set locale to utf8
|
| 44 |
+
ENV LC_ALL=C.UTF-8
|
| 45 |
+
|
| 46 |
+
RUN mkdir -p $OUT $SRC $WORK && chmod a+rwx $OUT $SRC $WORK
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner-debug/Dockerfile
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2016 Google Inc.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
|
| 17 |
+
ARG IMG_TAG=latest
|
| 18 |
+
FROM ghcr.io/aixcc-finals/base-runner:${IMG_TAG}
|
| 19 |
+
RUN apt-get update && apt-get install -y valgrind zip
|
| 20 |
+
|
| 21 |
+
# Installing GDB 12, re https://github.com/google/oss-fuzz/issues/7513.
|
| 22 |
+
RUN apt-get install -y build-essential libgmp-dev && \
|
| 23 |
+
wget https://ftp.gnu.org/gnu/gdb/gdb-12.1.tar.xz && \
|
| 24 |
+
tar -xf gdb-12.1.tar.xz && cd gdb-12.1 && ./configure && \
|
| 25 |
+
make -j $(expr $(nproc) / 2) && make install && cd .. && \
|
| 26 |
+
rm -rf gdb-12.1* && apt-get remove --purge -y build-essential libgmp-dev
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/Dockerfile
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2016 Google Inc.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
|
| 17 |
+
# Build rust stuff in its own image. We only need the resulting binaries.
|
| 18 |
+
# Keeping the rust toolchain in the image wastes 1 GB.
|
| 19 |
+
ARG IMG_TAG=latest
|
| 20 |
+
FROM ghcr.io/aixcc-finals/base-image:${IMG_TAG} as temp-runner-binary-builder
|
| 21 |
+
|
| 22 |
+
RUN apt-get update && apt-get install -y cargo libyaml-dev
|
| 23 |
+
RUN cargo install rustfilt
|
| 24 |
+
|
| 25 |
+
# Using multi-stage build to copy some LLVM binaries needed in the runner image.
|
| 26 |
+
FROM ghcr.io/aixcc-finals/base-clang:${IMG_TAG} AS base-clang
|
| 27 |
+
FROM ghcr.io/aixcc-finals/base-builder-ruby:${IMG_TAG} AS base-ruby
|
| 28 |
+
|
| 29 |
+
# The base builder image compiles a specific Python version. Using a multi-stage build
|
| 30 |
+
# to copy that same Python interpreter into the runner image saves build time and keeps
|
| 31 |
+
# the Python versions in sync.
|
| 32 |
+
FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG} AS base-builder
|
| 33 |
+
|
| 34 |
+
# Real image that will be used later.
|
| 35 |
+
FROM ghcr.io/aixcc-finals/base-image:${IMG_TAG}
|
| 36 |
+
|
| 37 |
+
COPY --from=temp-runner-binary-builder /root/.cargo/bin/rustfilt /usr/local/bin
|
| 38 |
+
|
| 39 |
+
# Copy the binaries needed for code coverage and crash symbolization.
|
| 40 |
+
COPY --from=base-clang /usr/local/bin/llvm-cov \
|
| 41 |
+
/usr/local/bin/llvm-profdata \
|
| 42 |
+
/usr/local/bin/llvm-symbolizer \
|
| 43 |
+
/usr/local/bin/
|
| 44 |
+
|
| 45 |
+
# Copy the pre-compiled Python binaries and libraries
|
| 46 |
+
COPY --from=base-builder /usr/local/bin/python3.10 /usr/local/bin/python3.10
|
| 47 |
+
COPY --from=base-builder /usr/local/lib/libpython3.10.so.1.0 /usr/local/lib/libpython3.10.so.1.0
|
| 48 |
+
COPY --from=base-builder /usr/local/include/python3.10 /usr/local/include/python3.10
|
| 49 |
+
COPY --from=base-builder /usr/local/lib/python3.10 /usr/local/lib/python3.10
|
| 50 |
+
COPY --from=base-builder /usr/local/bin/pip3 /usr/local/bin/pip3
|
| 51 |
+
|
| 52 |
+
# Create symbolic links to ensure compatibility
|
| 53 |
+
RUN ldconfig && \
|
| 54 |
+
ln -s /usr/local/bin/python3.10 /usr/local/bin/python3 && \
|
| 55 |
+
ln -s /usr/local/bin/python3.10 /usr/local/bin/python
|
| 56 |
+
|
| 57 |
+
COPY install_deps.sh /
|
| 58 |
+
RUN /install_deps.sh && rm /install_deps.sh
|
| 59 |
+
|
| 60 |
+
ENV CODE_COVERAGE_SRC=/opt/code_coverage
|
| 61 |
+
# Pin coverage to the same as in the base builder:
|
| 62 |
+
# https://github.com/google/oss-fuzz/blob/master/infra/base-images/base-builder/install_python.sh#L22
|
| 63 |
+
RUN git clone https://chromium.googlesource.com/chromium/src/tools/code_coverage $CODE_COVERAGE_SRC && \
|
| 64 |
+
cd /opt/code_coverage && \
|
| 65 |
+
git checkout edba4873b5e8a390e977a64c522db2df18a8b27d && \
|
| 66 |
+
pip3 install wheel && \
|
| 67 |
+
# If version "Jinja2==2.10" is in requirements.txt, bump it to a patch version that
|
| 68 |
+
# supports upgrading its MarkupSafe dependency to a Python 3.10 compatible release:
|
| 69 |
+
sed -i 's/Jinja2==2.10/Jinja2==2.10.3/' requirements.txt && \
|
| 70 |
+
pip3 install -r requirements.txt && \
|
| 71 |
+
pip3 install MarkupSafe==2.0.1 && \
|
| 72 |
+
pip3 install coverage==6.3.2
|
| 73 |
+
|
| 74 |
+
# Default environment options for various sanitizers.
|
| 75 |
+
# Note that these match the settings used in ClusterFuzz and
|
| 76 |
+
# shouldn't be changed unless a corresponding change is made on
|
| 77 |
+
# ClusterFuzz side as well.
|
| 78 |
+
ENV ASAN_OPTIONS="alloc_dealloc_mismatch=0:allocator_may_return_null=1:allocator_release_to_os_interval_ms=500:check_malloc_usable_size=0:detect_container_overflow=1:detect_odr_violation=0:detect_leaks=1:detect_stack_use_after_return=1:fast_unwind_on_fatal=0:handle_abort=1:handle_segv=1:handle_sigill=1:max_uar_stack_size_log=16:print_scariness=1:quarantine_size_mb=10:strict_memcmp=1:strip_path_prefix=/workspace/:symbolize=1:use_sigaltstack=1:dedup_token_length=3"
|
| 79 |
+
ENV MSAN_OPTIONS="print_stats=1:strip_path_prefix=/workspace/:symbolize=1:dedup_token_length=3"
|
| 80 |
+
ENV UBSAN_OPTIONS="print_stacktrace=1:print_summary=1:silence_unsigned_overflow=1:strip_path_prefix=/workspace/:symbolize=1:dedup_token_length=3"
|
| 81 |
+
ENV FUZZER_ARGS="-rss_limit_mb=2560 -timeout=25"
|
| 82 |
+
ENV AFL_FUZZER_ARGS="-m none"
|
| 83 |
+
|
| 84 |
+
# Set up Golang environment variables (copied from /root/.bash_profile).
|
| 85 |
+
ENV GOPATH /root/go
|
| 86 |
+
|
| 87 |
+
# /root/.go/bin is for the standard Go binaries (i.e. go, gofmt, etc).
|
| 88 |
+
# $GOPATH/bin is for the binaries from the dependencies installed via "go get".
|
| 89 |
+
ENV PATH $PATH:$GOPATH/bin
|
| 90 |
+
COPY gocoverage $GOPATH/gocoverage
|
| 91 |
+
|
| 92 |
+
COPY install_go.sh /
|
| 93 |
+
RUN /install_go.sh && rm -rf /install_go.sh /root/.go
|
| 94 |
+
|
| 95 |
+
# Install OpenJDK 15 and trim its size by removing unused components.
|
| 96 |
+
ENV JAVA_HOME=/usr/lib/jvm/java-17-openjdk-amd64
|
| 97 |
+
ENV JAVA_15_HOME=/usr/lib/jvm/java-15-openjdk-amd64
|
| 98 |
+
ENV JVM_LD_LIBRARY_PATH=$JAVA_HOME/lib/server
|
| 99 |
+
ENV PATH=$PATH:$JAVA_HOME/bin
|
| 100 |
+
|
| 101 |
+
COPY install_java.sh /
|
| 102 |
+
RUN /install_java.sh && rm /install_java.sh
|
| 103 |
+
|
| 104 |
+
# Install JaCoCo for JVM coverage.
|
| 105 |
+
RUN wget https://repo1.maven.org/maven2/org/jacoco/org.jacoco.cli/0.8.7/org.jacoco.cli-0.8.7-nodeps.jar -O /opt/jacoco-cli.jar && \
|
| 106 |
+
wget https://repo1.maven.org/maven2/org/jacoco/org.jacoco.agent/0.8.7/org.jacoco.agent-0.8.7-runtime.jar -O /opt/jacoco-agent.jar && \
|
| 107 |
+
echo "37df187b76888101ecd745282e9cd1ad4ea508d6 /opt/jacoco-agent.jar" | shasum --check && \
|
| 108 |
+
echo "c1814e7bba5fd8786224b09b43c84fd6156db690 /opt/jacoco-cli.jar" | shasum --check
|
| 109 |
+
|
| 110 |
+
COPY install_javascript.sh /
|
| 111 |
+
RUN /install_javascript.sh && rm /install_javascript.sh
|
| 112 |
+
|
| 113 |
+
# Copy built ruby and ruzzy from builder
|
| 114 |
+
COPY --from=base-ruby /usr/local/rvm /usr/local/rvm
|
| 115 |
+
COPY --from=base-ruby /install/ruzzy /install/ruzzy
|
| 116 |
+
COPY ruzzy /usr/bin/ruzzy
|
| 117 |
+
ENV PATH="$PATH:/usr/local/rvm/rubies/ruby-3.3.1/bin"
|
| 118 |
+
# RubyGems installation directory
|
| 119 |
+
ENV GEM_HOME="$OUT/fuzz-gem"
|
| 120 |
+
ENV GEM_PATH="/install/ruzzy"
|
| 121 |
+
|
| 122 |
+
# Do this last to make developing these files easier/faster due to caching.
|
| 123 |
+
COPY bad_build_check \
|
| 124 |
+
coverage \
|
| 125 |
+
coverage_helper \
|
| 126 |
+
download_corpus \
|
| 127 |
+
jacoco_report_converter.py \
|
| 128 |
+
nyc_report_converter.py \
|
| 129 |
+
rcfilt \
|
| 130 |
+
reproduce \
|
| 131 |
+
run_fuzzer \
|
| 132 |
+
parse_options.py \
|
| 133 |
+
generate_differential_cov_report.py \
|
| 134 |
+
profraw_update.py \
|
| 135 |
+
targets_list \
|
| 136 |
+
test_all.py \
|
| 137 |
+
test_one.py \
|
| 138 |
+
python_coverage_runner_help.py \
|
| 139 |
+
/usr/local/bin/
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/README.md
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# base-runner
|
| 2 |
+
> Base image for fuzzer runners.
|
| 3 |
+
|
| 4 |
+
```bash
|
| 5 |
+
docker run -ti ghcr.io/aixcc-finals/base-runner <command> <args>
|
| 6 |
+
```
|
| 7 |
+
|
| 8 |
+
## Commands
|
| 9 |
+
|
| 10 |
+
| Command | Description |
|
| 11 |
+
|---------|-------------|
|
| 12 |
+
| `reproduce <fuzzer_name> <fuzzer_options>` | build all fuzz targets and run specified one with testcase `/testcase` and given options.
|
| 13 |
+
| `run_fuzzer <fuzzer_name> <fuzzer_options>` | runs specified fuzzer combining options with `.options` file |
|
| 14 |
+
| `test_all.py` | runs every binary in `/out` as a fuzzer for a while to ensure it works. |
|
| 15 |
+
| `coverage <fuzzer_name>` | generate a coverage report for the given fuzzer. |
|
| 16 |
+
|
| 17 |
+
# Examples
|
| 18 |
+
|
| 19 |
+
- *Reproduce using latest OSS-Fuzz build:*
|
| 20 |
+
|
| 21 |
+
<pre>
|
| 22 |
+
docker run --rm -ti -v <b><i><testcase_path></i></b>:/testcase gcr.io/oss-fuzz/<b><i>$PROJECT_NAME</i></b> reproduce <b><i><fuzzer_name></i></b>
|
| 23 |
+
</pre>
|
| 24 |
+
|
| 25 |
+
- *Reproduce using local source checkout:*
|
| 26 |
+
|
| 27 |
+
<pre>
|
| 28 |
+
docker run --rm -ti -v <b><i><source_path></i></b>:/src/<b><i>$PROJECT_NAME</i></b> \
|
| 29 |
+
-v <b><i><testcase_path></i></b>:/testcase gcr.io/oss-fuzz/<b><i>$PROJECT_NAME</i></b> \
|
| 30 |
+
reproduce <b><i><fuzzer_name></i></b>
|
| 31 |
+
</pre>
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/bad_build_check
ADDED
|
@@ -0,0 +1,494 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -u
|
| 2 |
+
# Copyright 2017 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# A minimal number of runs to test fuzz target with a non-empty input.
|
| 19 |
+
MIN_NUMBER_OF_RUNS=4
|
| 20 |
+
|
| 21 |
+
# The "example" target has 73 with ASan, 65 with UBSan, and 6648 with MSan.
|
| 22 |
+
# Real world targets have greater values (arduinojson: 407, zlib: 664).
|
| 23 |
+
# Mercurial's bdiff_fuzzer has 116 PCs when built with ASan.
|
| 24 |
+
THRESHOLD_FOR_NUMBER_OF_EDGES=100
|
| 25 |
+
|
| 26 |
+
# A fuzz target is supposed to have at least two functions, such as
|
| 27 |
+
# LLVMFuzzerTestOneInput and an API that is being called from there.
|
| 28 |
+
THRESHOLD_FOR_NUMBER_OF_FUNCTIONS=2
|
| 29 |
+
|
| 30 |
+
# Threshold values for different sanitizers used by instrumentation checks.
|
| 31 |
+
ASAN_CALLS_THRESHOLD_FOR_ASAN_BUILD=1000
|
| 32 |
+
ASAN_CALLS_THRESHOLD_FOR_NON_ASAN_BUILD=0
|
| 33 |
+
|
| 34 |
+
# The value below can definitely be higher (like 500-1000), but avoid being too
|
| 35 |
+
# agressive here while still evaluating the DFT-based fuzzing approach.
|
| 36 |
+
DFSAN_CALLS_THRESHOLD_FOR_DFSAN_BUILD=100
|
| 37 |
+
DFSAN_CALLS_THRESHOLD_FOR_NON_DFSAN_BUILD=0
|
| 38 |
+
|
| 39 |
+
MSAN_CALLS_THRESHOLD_FOR_MSAN_BUILD=1000
|
| 40 |
+
# Some engines (e.g. honggfuzz) may make a very small number of calls to msan
|
| 41 |
+
# for memory poisoning.
|
| 42 |
+
MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD=3
|
| 43 |
+
|
| 44 |
+
# Usually, a non UBSan build (e.g. ASan) has 165 calls to UBSan runtime. The
|
| 45 |
+
# majority of targets built with UBSan have 200+ UBSan calls, but there are
|
| 46 |
+
# some very small targets that may have < 200 UBSan calls even in a UBSan build.
|
| 47 |
+
# Use the threshold value of 168 (slightly > 165) for UBSan build.
|
| 48 |
+
UBSAN_CALLS_THRESHOLD_FOR_UBSAN_BUILD=168
|
| 49 |
+
|
| 50 |
+
# It would be risky to use the threshold value close to 165 for non UBSan build,
|
| 51 |
+
# as UBSan runtime may change any time and thus we could have different number
|
| 52 |
+
# of calls to UBSan runtime even in ASan build. With that, we use the threshold
|
| 53 |
+
# value of 200 that would detect unnecessary UBSan instrumentation in the vast
|
| 54 |
+
# majority of targets, except of a handful very small ones, which would not be
|
| 55 |
+
# a big concern either way as the overhead for them would not be significant.
|
| 56 |
+
UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD=200
|
| 57 |
+
|
| 58 |
+
# ASan builds on i386 generally have about 250 UBSan runtime calls.
|
| 59 |
+
if [[ $ARCHITECTURE == 'i386' ]]
|
| 60 |
+
then
|
| 61 |
+
UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD=280
|
| 62 |
+
fi
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
# Verify that the given fuzz target is correctly built to run with a particular
|
| 66 |
+
# engine.
|
| 67 |
+
function check_engine {
|
| 68 |
+
local FUZZER=$1
|
| 69 |
+
local FUZZER_NAME=$(basename $FUZZER)
|
| 70 |
+
local FUZZER_OUTPUT="/tmp/$FUZZER_NAME.output"
|
| 71 |
+
local CHECK_FAILED=0
|
| 72 |
+
|
| 73 |
+
if [[ "$FUZZING_ENGINE" == libfuzzer ]]; then
|
| 74 |
+
# Store fuzz target's output into a temp file to be used for further checks.
|
| 75 |
+
$FUZZER -seed=1337 -runs=$MIN_NUMBER_OF_RUNS &>$FUZZER_OUTPUT
|
| 76 |
+
CHECK_FAILED=$(egrep "ERROR: no interesting inputs were found. Is the code instrumented" -c $FUZZER_OUTPUT)
|
| 77 |
+
if (( $CHECK_FAILED > 0 )); then
|
| 78 |
+
echo "BAD BUILD: $FUZZER does not seem to have coverage instrumentation."
|
| 79 |
+
cat $FUZZER_OUTPUT
|
| 80 |
+
# Bail out as the further check does not make any sense, there are 0 PCs.
|
| 81 |
+
return 1
|
| 82 |
+
fi
|
| 83 |
+
|
| 84 |
+
local NUMBER_OF_EDGES=$(grep -Po "INFO: Loaded [[:digit:]]+ module.*\(.*(counters|guards)\):[[:space:]]+\K[[:digit:]]+" $FUZZER_OUTPUT)
|
| 85 |
+
|
| 86 |
+
# If a fuzz target fails to start, grep won't find anything, so bail out early to let check_startup_crash deal with it.
|
| 87 |
+
[[ -z "$NUMBER_OF_EDGES" ]] && return
|
| 88 |
+
|
| 89 |
+
if (( $NUMBER_OF_EDGES < $THRESHOLD_FOR_NUMBER_OF_EDGES )); then
|
| 90 |
+
echo "BAD BUILD: $FUZZER seems to have only partial coverage instrumentation."
|
| 91 |
+
fi
|
| 92 |
+
elif [[ "$FUZZING_ENGINE" == afl ]]; then
|
| 93 |
+
AFL_FORKSRV_INIT_TMOUT=30000 AFL_NO_UI=1 SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 35s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT
|
| 94 |
+
CHECK_PASSED=$(egrep "All set and ready to roll" -c $FUZZER_OUTPUT)
|
| 95 |
+
if (( $CHECK_PASSED == 0 )); then
|
| 96 |
+
echo "BAD BUILD: fuzzing $FUZZER with afl-fuzz failed."
|
| 97 |
+
cat $FUZZER_OUTPUT
|
| 98 |
+
return 1
|
| 99 |
+
fi
|
| 100 |
+
elif [[ "$FUZZING_ENGINE" == honggfuzz ]]; then
|
| 101 |
+
SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 20s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT
|
| 102 |
+
CHECK_PASSED=$(egrep "^Sz:[0-9]+ Tm:[0-9]+" -c $FUZZER_OUTPUT)
|
| 103 |
+
if (( $CHECK_PASSED == 0 )); then
|
| 104 |
+
echo "BAD BUILD: fuzzing $FUZZER with honggfuzz failed."
|
| 105 |
+
cat $FUZZER_OUTPUT
|
| 106 |
+
return 1
|
| 107 |
+
fi
|
| 108 |
+
elif [[ "$FUZZING_ENGINE" == dataflow ]]; then
|
| 109 |
+
$FUZZER &> $FUZZER_OUTPUT
|
| 110 |
+
local NUMBER_OF_FUNCTIONS=$(grep -Po "INFO:\s+\K[[:digit:]]+(?=\s+instrumented function.*)" $FUZZER_OUTPUT)
|
| 111 |
+
[[ -z "$NUMBER_OF_FUNCTIONS" ]] && NUMBER_OF_FUNCTIONS=0
|
| 112 |
+
if (( $NUMBER_OF_FUNCTIONS < $THRESHOLD_FOR_NUMBER_OF_FUNCTIONS )); then
|
| 113 |
+
echo "BAD BUILD: $FUZZER does not seem to be properly built in 'dataflow' config."
|
| 114 |
+
cat $FUZZER_OUTPUT
|
| 115 |
+
return 1
|
| 116 |
+
fi
|
| 117 |
+
elif [[ "$FUZZING_ENGINE" == centipede \
|
| 118 |
+
&& ("${HELPER:-}" == True || "$SANITIZER" == none ) ]]; then
|
| 119 |
+
# Performs run test on unsanitized binaries with auxiliary sanitized
|
| 120 |
+
# binaries if they are built with helper.py.
|
| 121 |
+
# Performs run test on unsanitized binaries without auxiliary sanitized
|
| 122 |
+
# binaries if they are from trial build and production build.
|
| 123 |
+
# TODO(Dongge): Support run test with sanitized binaries for trial and
|
| 124 |
+
# production build.
|
| 125 |
+
SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 20s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT
|
| 126 |
+
CHECK_PASSED=$(egrep "\[S0.0] begin-fuzz: ft: 0 corp: 0/0" -c $FUZZER_OUTPUT)
|
| 127 |
+
if (( $CHECK_PASSED == 0 )); then
|
| 128 |
+
echo "BAD BUILD: fuzzing $FUZZER with centipede failed."
|
| 129 |
+
cat $FUZZER_OUTPUT
|
| 130 |
+
return 1
|
| 131 |
+
fi
|
| 132 |
+
fi
|
| 133 |
+
|
| 134 |
+
return 0
|
| 135 |
+
}
|
| 136 |
+
|
| 137 |
+
# Verify that the given fuzz target has been built properly and works.
|
| 138 |
+
function check_startup_crash {
|
| 139 |
+
local FUZZER=$1
|
| 140 |
+
local FUZZER_NAME=$(basename $FUZZER)
|
| 141 |
+
local FUZZER_OUTPUT="/tmp/$FUZZER_NAME.output"
|
| 142 |
+
local CHECK_PASSED=0
|
| 143 |
+
|
| 144 |
+
if [[ "$FUZZING_ENGINE" = libfuzzer ]]; then
|
| 145 |
+
# Skip seed corpus as there is another explicit check that uses seed corpora.
|
| 146 |
+
SKIP_SEED_CORPUS=1 run_fuzzer $FUZZER_NAME -seed=1337 -runs=$MIN_NUMBER_OF_RUNS &>$FUZZER_OUTPUT
|
| 147 |
+
CHECK_PASSED=$(egrep "Done $MIN_NUMBER_OF_RUNS runs" -c $FUZZER_OUTPUT)
|
| 148 |
+
elif [[ "$FUZZING_ENGINE" = afl ]]; then
|
| 149 |
+
AFL_FORKSRV_INIT_TMOUT=30000 AFL_NO_UI=1 SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 35s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT
|
| 150 |
+
if [ $(egrep "target binary (crashed|terminated)" -c $FUZZER_OUTPUT) -eq 0 ]; then
|
| 151 |
+
CHECK_PASSED=1
|
| 152 |
+
fi
|
| 153 |
+
elif [[ "$FUZZING_ENGINE" = dataflow ]]; then
|
| 154 |
+
# TODO(https://github.com/google/oss-fuzz/issues/1632): add check for
|
| 155 |
+
# binaries compiled with dataflow engine when the interface becomes stable.
|
| 156 |
+
CHECK_PASSED=1
|
| 157 |
+
else
|
| 158 |
+
# TODO: add checks for another fuzzing engines if possible.
|
| 159 |
+
CHECK_PASSED=1
|
| 160 |
+
fi
|
| 161 |
+
|
| 162 |
+
if [ "$CHECK_PASSED" -eq "0" ]; then
|
| 163 |
+
echo "BAD BUILD: $FUZZER seems to have either startup crash or exit:"
|
| 164 |
+
cat $FUZZER_OUTPUT
|
| 165 |
+
return 1
|
| 166 |
+
fi
|
| 167 |
+
|
| 168 |
+
return 0
|
| 169 |
+
}
|
| 170 |
+
|
| 171 |
+
# Mixed sanitizers check for ASan build.
|
| 172 |
+
function check_asan_build {
|
| 173 |
+
local FUZZER=$1
|
| 174 |
+
local ASAN_CALLS=$2
|
| 175 |
+
local DFSAN_CALLS=$3
|
| 176 |
+
local MSAN_CALLS=$4
|
| 177 |
+
local UBSAN_CALLS=$5
|
| 178 |
+
|
| 179 |
+
# Perform all the checks for more detailed error message.
|
| 180 |
+
if (( $ASAN_CALLS < $ASAN_CALLS_THRESHOLD_FOR_ASAN_BUILD )); then
|
| 181 |
+
echo "BAD BUILD: $FUZZER does not seem to be compiled with ASan."
|
| 182 |
+
return 1
|
| 183 |
+
fi
|
| 184 |
+
|
| 185 |
+
if (( $DFSAN_CALLS > $DFSAN_CALLS_THRESHOLD_FOR_NON_DFSAN_BUILD )); then
|
| 186 |
+
echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with DFSan."
|
| 187 |
+
return 1
|
| 188 |
+
fi
|
| 189 |
+
|
| 190 |
+
if (( $MSAN_CALLS > $MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD )); then
|
| 191 |
+
echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with MSan."
|
| 192 |
+
return 1
|
| 193 |
+
fi
|
| 194 |
+
|
| 195 |
+
if (( $UBSAN_CALLS > $UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD )); then
|
| 196 |
+
echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with UBSan."
|
| 197 |
+
return 1
|
| 198 |
+
fi
|
| 199 |
+
|
| 200 |
+
return 0
|
| 201 |
+
}
|
| 202 |
+
|
| 203 |
+
# Mixed sanitizers check for DFSan build.
|
| 204 |
+
function check_dfsan_build {
|
| 205 |
+
local FUZZER=$1
|
| 206 |
+
local ASAN_CALLS=$2
|
| 207 |
+
local DFSAN_CALLS=$3
|
| 208 |
+
local MSAN_CALLS=$4
|
| 209 |
+
local UBSAN_CALLS=$5
|
| 210 |
+
|
| 211 |
+
# Perform all the checks for more detailed error message.
|
| 212 |
+
if (( $ASAN_CALLS > $ASAN_CALLS_THRESHOLD_FOR_NON_ASAN_BUILD )); then
|
| 213 |
+
echo "BAD BUILD: DFSan build of $FUZZER seems to be compiled with ASan."
|
| 214 |
+
return 1
|
| 215 |
+
fi
|
| 216 |
+
|
| 217 |
+
if (( $DFSAN_CALLS < $DFSAN_CALLS_THRESHOLD_FOR_DFSAN_BUILD )); then
|
| 218 |
+
echo "BAD BUILD: $FUZZER does not seem to be compiled with DFSan."
|
| 219 |
+
return 1
|
| 220 |
+
fi
|
| 221 |
+
|
| 222 |
+
if (( $MSAN_CALLS > $MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD )); then
|
| 223 |
+
echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with MSan."
|
| 224 |
+
return 1
|
| 225 |
+
fi
|
| 226 |
+
|
| 227 |
+
if (( $UBSAN_CALLS > $UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD )); then
|
| 228 |
+
echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with UBSan."
|
| 229 |
+
return 1
|
| 230 |
+
fi
|
| 231 |
+
|
| 232 |
+
return 0
|
| 233 |
+
}
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
# Mixed sanitizers check for MSan build.
|
| 237 |
+
function check_msan_build {
|
| 238 |
+
local FUZZER=$1
|
| 239 |
+
local ASAN_CALLS=$2
|
| 240 |
+
local DFSAN_CALLS=$3
|
| 241 |
+
local MSAN_CALLS=$4
|
| 242 |
+
local UBSAN_CALLS=$5
|
| 243 |
+
|
| 244 |
+
# Perform all the checks for more detailed error message.
|
| 245 |
+
if (( $ASAN_CALLS > $ASAN_CALLS_THRESHOLD_FOR_NON_ASAN_BUILD )); then
|
| 246 |
+
echo "BAD BUILD: MSan build of $FUZZER seems to be compiled with ASan."
|
| 247 |
+
return 1
|
| 248 |
+
fi
|
| 249 |
+
|
| 250 |
+
if (( $DFSAN_CALLS > $DFSAN_CALLS_THRESHOLD_FOR_NON_DFSAN_BUILD )); then
|
| 251 |
+
echo "BAD BUILD: MSan build of $FUZZER seems to be compiled with DFSan."
|
| 252 |
+
return 1
|
| 253 |
+
fi
|
| 254 |
+
|
| 255 |
+
if (( $MSAN_CALLS < $MSAN_CALLS_THRESHOLD_FOR_MSAN_BUILD )); then
|
| 256 |
+
echo "BAD BUILD: $FUZZER does not seem to be compiled with MSan."
|
| 257 |
+
return 1
|
| 258 |
+
fi
|
| 259 |
+
|
| 260 |
+
if (( $UBSAN_CALLS > $UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD )); then
|
| 261 |
+
echo "BAD BUILD: MSan build of $FUZZER seems to be compiled with UBSan."
|
| 262 |
+
return 1
|
| 263 |
+
fi
|
| 264 |
+
|
| 265 |
+
return 0
|
| 266 |
+
}
|
| 267 |
+
|
| 268 |
+
# Mixed sanitizers check for UBSan build.
|
| 269 |
+
function check_ubsan_build {
|
| 270 |
+
local FUZZER=$1
|
| 271 |
+
local ASAN_CALLS=$2
|
| 272 |
+
local DFSAN_CALLS=$3
|
| 273 |
+
local MSAN_CALLS=$4
|
| 274 |
+
local UBSAN_CALLS=$5
|
| 275 |
+
|
| 276 |
+
if [[ "$FUZZING_ENGINE" != libfuzzer ]]; then
|
| 277 |
+
# Ignore UBSan checks for fuzzing engines other than libFuzzer because:
|
| 278 |
+
# A) we (probably) are not going to use those with UBSan
|
| 279 |
+
# B) such builds show indistinguishable number of calls to UBSan
|
| 280 |
+
return 0
|
| 281 |
+
fi
|
| 282 |
+
|
| 283 |
+
# Perform all the checks for more detailed error message.
|
| 284 |
+
if (( $ASAN_CALLS > $ASAN_CALLS_THRESHOLD_FOR_NON_ASAN_BUILD )); then
|
| 285 |
+
echo "BAD BUILD: UBSan build of $FUZZER seems to be compiled with ASan."
|
| 286 |
+
return 1
|
| 287 |
+
fi
|
| 288 |
+
|
| 289 |
+
if (( $DFSAN_CALLS > $DFSAN_CALLS_THRESHOLD_FOR_NON_DFSAN_BUILD )); then
|
| 290 |
+
echo "BAD BUILD: UBSan build of $FUZZER seems to be compiled with DFSan."
|
| 291 |
+
return 1
|
| 292 |
+
fi
|
| 293 |
+
|
| 294 |
+
if (( $MSAN_CALLS > $MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD )); then
|
| 295 |
+
echo "BAD BUILD: UBSan build of $FUZZER seems to be compiled with MSan."
|
| 296 |
+
return 1
|
| 297 |
+
fi
|
| 298 |
+
|
| 299 |
+
if (( $UBSAN_CALLS < $UBSAN_CALLS_THRESHOLD_FOR_UBSAN_BUILD )); then
|
| 300 |
+
echo "BAD BUILD: $FUZZER does not seem to be compiled with UBSan."
|
| 301 |
+
return 1
|
| 302 |
+
fi
|
| 303 |
+
}
|
| 304 |
+
|
| 305 |
+
# Verify that the given fuzz target is compiled with correct sanitizer.
|
| 306 |
+
function check_mixed_sanitizers {
|
| 307 |
+
local FUZZER=$1
|
| 308 |
+
local result=0
|
| 309 |
+
local CALL_INSN=
|
| 310 |
+
|
| 311 |
+
if [ "${FUZZING_LANGUAGE:-}" = "jvm" ]; then
|
| 312 |
+
# Sanitizer runtime is linked into the Jazzer driver, so this check does not
|
| 313 |
+
# apply.
|
| 314 |
+
return 0
|
| 315 |
+
fi
|
| 316 |
+
|
| 317 |
+
if [ "${FUZZING_LANGUAGE:-}" = "javascript" ]; then
|
| 318 |
+
# Jazzer.js currently does not support using sanitizers with native Node.js addons.
|
| 319 |
+
# This is not relevant anyways since supporting this will be done by preloading
|
| 320 |
+
# the sanitizers in the wrapper script starting Jazzer.js.
|
| 321 |
+
return 0
|
| 322 |
+
fi
|
| 323 |
+
|
| 324 |
+
if [ "${FUZZING_LANGUAGE:-}" = "python" ]; then
|
| 325 |
+
# Sanitizer runtime is loaded via LD_PRELOAD, so this check does not apply.
|
| 326 |
+
return 0
|
| 327 |
+
fi
|
| 328 |
+
|
| 329 |
+
# For fuzztest fuzzers point to the binary instead of launcher script.
|
| 330 |
+
if [[ $FUZZER == *"@"* ]]; then
|
| 331 |
+
FUZZER=(${FUZZER//@/ }[0])
|
| 332 |
+
fi
|
| 333 |
+
|
| 334 |
+
CALL_INSN=
|
| 335 |
+
if [[ $ARCHITECTURE == "x86_64" ]]
|
| 336 |
+
then
|
| 337 |
+
CALL_INSN="callq?\s+[0-9a-f]+\s+<"
|
| 338 |
+
elif [[ $ARCHITECTURE == "i386" ]]
|
| 339 |
+
then
|
| 340 |
+
CALL_INSN="call\s+[0-9a-f]+\s+<"
|
| 341 |
+
elif [[ $ARCHITECTURE == "aarch64" ]]
|
| 342 |
+
then
|
| 343 |
+
CALL_INSN="bl\s+[0-9a-f]+\s+<"
|
| 344 |
+
else
|
| 345 |
+
echo "UNSUPPORTED ARCHITECTURE"
|
| 346 |
+
exit 1
|
| 347 |
+
fi
|
| 348 |
+
local ASAN_CALLS=$(objdump -dC $FUZZER | egrep "${CALL_INSN}__asan" -c)
|
| 349 |
+
local DFSAN_CALLS=$(objdump -dC $FUZZER | egrep "${CALL_INSN}__dfsan" -c)
|
| 350 |
+
local MSAN_CALLS=$(objdump -dC $FUZZER | egrep "${CALL_INSN}__msan" -c)
|
| 351 |
+
local UBSAN_CALLS=$(objdump -dC $FUZZER | egrep "${CALL_INSN}__ubsan" -c)
|
| 352 |
+
|
| 353 |
+
|
| 354 |
+
if [[ "$SANITIZER" = address ]]; then
|
| 355 |
+
check_asan_build $FUZZER $ASAN_CALLS $DFSAN_CALLS $MSAN_CALLS $UBSAN_CALLS
|
| 356 |
+
result=$?
|
| 357 |
+
elif [[ "$SANITIZER" = dataflow ]]; then
|
| 358 |
+
check_dfsan_build $FUZZER $ASAN_CALLS $DFSAN_CALLS $MSAN_CALLS $UBSAN_CALLS
|
| 359 |
+
result=$?
|
| 360 |
+
elif [[ "$SANITIZER" = memory ]]; then
|
| 361 |
+
check_msan_build $FUZZER $ASAN_CALLS $DFSAN_CALLS $MSAN_CALLS $UBSAN_CALLS
|
| 362 |
+
result=$?
|
| 363 |
+
elif [[ "$SANITIZER" = undefined ]]; then
|
| 364 |
+
check_ubsan_build $FUZZER $ASAN_CALLS $DFSAN_CALLS $MSAN_CALLS $UBSAN_CALLS
|
| 365 |
+
result=$?
|
| 366 |
+
elif [[ "$SANITIZER" = thread ]]; then
|
| 367 |
+
# TODO(metzman): Implement this.
|
| 368 |
+
result=0
|
| 369 |
+
fi
|
| 370 |
+
|
| 371 |
+
return $result
|
| 372 |
+
}
|
| 373 |
+
|
| 374 |
+
# Verify that the given fuzz target doesn't crash on the seed corpus.
|
| 375 |
+
function check_seed_corpus {
|
| 376 |
+
local FUZZER=$1
|
| 377 |
+
local FUZZER_NAME="$(basename $FUZZER)"
|
| 378 |
+
local FUZZER_OUTPUT="/tmp/$FUZZER_NAME.output"
|
| 379 |
+
|
| 380 |
+
if [[ "$FUZZING_ENGINE" != libfuzzer ]]; then
|
| 381 |
+
return 0
|
| 382 |
+
fi
|
| 383 |
+
|
| 384 |
+
# Set up common fuzzing arguments, otherwise "run_fuzzer" errors out.
|
| 385 |
+
if [ -z "$FUZZER_ARGS" ]; then
|
| 386 |
+
export FUZZER_ARGS="-rss_limit_mb=2560 -timeout=25"
|
| 387 |
+
fi
|
| 388 |
+
|
| 389 |
+
bash -c "run_fuzzer $FUZZER_NAME -runs=0" &> $FUZZER_OUTPUT
|
| 390 |
+
|
| 391 |
+
# Don't output anything if fuzz target hasn't crashed.
|
| 392 |
+
if [ $? -ne 0 ]; then
|
| 393 |
+
echo "BAD BUILD: $FUZZER has a crashing input in its seed corpus:"
|
| 394 |
+
cat $FUZZER_OUTPUT
|
| 395 |
+
return 1
|
| 396 |
+
fi
|
| 397 |
+
|
| 398 |
+
return 0
|
| 399 |
+
}
|
| 400 |
+
|
| 401 |
+
function check_architecture {
|
| 402 |
+
local FUZZER=$1
|
| 403 |
+
local FUZZER_NAME=$(basename $FUZZER)
|
| 404 |
+
|
| 405 |
+
if [ "${FUZZING_LANGUAGE:-}" = "jvm" ]; then
|
| 406 |
+
# The native dependencies of a JVM project are not packaged, but loaded
|
| 407 |
+
# dynamically at runtime and thus cannot be checked here.
|
| 408 |
+
return 0;
|
| 409 |
+
fi
|
| 410 |
+
|
| 411 |
+
if [ "${FUZZING_LANGUAGE:-}" = "javascript" ]; then
|
| 412 |
+
# Jazzer.js fuzzers are wrapper scripts that start the fuzz target with
|
| 413 |
+
# the Jazzer.js CLI.
|
| 414 |
+
return 0;
|
| 415 |
+
fi
|
| 416 |
+
|
| 417 |
+
if [ "${FUZZING_LANGUAGE:-}" = "python" ]; then
|
| 418 |
+
FUZZER=${FUZZER}.pkg
|
| 419 |
+
fi
|
| 420 |
+
|
| 421 |
+
# For fuzztest fuzzers point to the binary instead of launcher script.
|
| 422 |
+
if [[ $FUZZER == *"@"* ]]; then
|
| 423 |
+
FUZZER=(${FUZZER//@/ }[0])
|
| 424 |
+
fi
|
| 425 |
+
|
| 426 |
+
FILE_OUTPUT=$(file $FUZZER)
|
| 427 |
+
if [[ $ARCHITECTURE == "x86_64" ]]
|
| 428 |
+
then
|
| 429 |
+
echo $FILE_OUTPUT | grep "x86-64" > /dev/null
|
| 430 |
+
elif [[ $ARCHITECTURE == "i386" ]]
|
| 431 |
+
then
|
| 432 |
+
echo $FILE_OUTPUT | grep "80386" > /dev/null
|
| 433 |
+
elif [[ $ARCHITECTURE == "aarch64" ]]
|
| 434 |
+
then
|
| 435 |
+
echo $FILE_OUTPUT | grep "aarch64" > /dev/null
|
| 436 |
+
else
|
| 437 |
+
echo "UNSUPPORTED ARCHITECTURE"
|
| 438 |
+
return 1
|
| 439 |
+
fi
|
| 440 |
+
result=$?
|
| 441 |
+
if [[ $result != 0 ]]
|
| 442 |
+
then
|
| 443 |
+
echo "BAD BUILD $FUZZER is not built for architecture: $ARCHITECTURE"
|
| 444 |
+
echo "file command output: $FILE_OUTPUT"
|
| 445 |
+
echo "check_mixed_sanitizers test will fail."
|
| 446 |
+
fi
|
| 447 |
+
return $result
|
| 448 |
+
}
|
| 449 |
+
|
| 450 |
+
function main {
|
| 451 |
+
local FUZZER=$1
|
| 452 |
+
local AUXILIARY_FUZZER=${2:-}
|
| 453 |
+
local checks_failed=0
|
| 454 |
+
local result=0
|
| 455 |
+
|
| 456 |
+
export RUN_FUZZER_MODE="batch"
|
| 457 |
+
check_engine $FUZZER
|
| 458 |
+
result=$?
|
| 459 |
+
checks_failed=$(( $checks_failed + $result ))
|
| 460 |
+
|
| 461 |
+
check_architecture $FUZZER
|
| 462 |
+
result=$?
|
| 463 |
+
checks_failed=$(( $checks_failed + $result ))
|
| 464 |
+
|
| 465 |
+
if [[ "$FUZZING_ENGINE" == centipede \
|
| 466 |
+
&& "$SANITIZER" != none && "${HELPER:-}" == True ]]; then
|
| 467 |
+
check_mixed_sanitizers $AUXILIARY_FUZZER
|
| 468 |
+
else
|
| 469 |
+
check_mixed_sanitizers $FUZZER
|
| 470 |
+
fi
|
| 471 |
+
result=$?
|
| 472 |
+
checks_failed=$(( $checks_failed + $result ))
|
| 473 |
+
|
| 474 |
+
check_startup_crash $FUZZER
|
| 475 |
+
result=$?
|
| 476 |
+
checks_failed=$(( $checks_failed + $result ))
|
| 477 |
+
|
| 478 |
+
# TODO: re-enable after introducing bug auto-filing for bad builds.
|
| 479 |
+
# check_seed_corpus $FUZZER
|
| 480 |
+
return $checks_failed
|
| 481 |
+
}
|
| 482 |
+
|
| 483 |
+
|
| 484 |
+
if [ $# -ne 1 -a $# -ne 2 ]; then
|
| 485 |
+
echo "Usage: $0 <fuzz_target_binary> [<auxiliary_binary>]"
|
| 486 |
+
exit 1
|
| 487 |
+
fi
|
| 488 |
+
|
| 489 |
+
# Fuzz target path.
|
| 490 |
+
FUZZER=$1
|
| 491 |
+
AUXILIARY_FUZZER=${2:-}
|
| 492 |
+
|
| 493 |
+
main $FUZZER $AUXILIARY_FUZZER
|
| 494 |
+
exit $?
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage
ADDED
|
@@ -0,0 +1,549 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -u
|
| 2 |
+
# Copyright 2018 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
cd $OUT
|
| 18 |
+
|
| 19 |
+
if (( $# > 0 )); then
|
| 20 |
+
FUZZ_TARGETS="$@"
|
| 21 |
+
else
|
| 22 |
+
FUZZ_TARGETS="$(find . -maxdepth 1 -type f -executable -printf '%P\n' | \
|
| 23 |
+
grep -v -x -F \
|
| 24 |
+
-e 'llvm-symbolizer' \
|
| 25 |
+
-e 'jazzer_agent_deploy.jar' \
|
| 26 |
+
-e 'jazzer_driver' \
|
| 27 |
+
-e 'jazzer_driver_with_sanitizer' \
|
| 28 |
+
-e 'sanitizer_with_fuzzer.so')"
|
| 29 |
+
fi
|
| 30 |
+
|
| 31 |
+
COVERAGE_OUTPUT_DIR=${COVERAGE_OUTPUT_DIR:-$OUT}
|
| 32 |
+
|
| 33 |
+
DUMPS_DIR="$COVERAGE_OUTPUT_DIR/dumps"
|
| 34 |
+
FUZZERS_COVERAGE_DUMPS_DIR="$DUMPS_DIR/fuzzers_coverage"
|
| 35 |
+
MERGED_COVERAGE_DIR="$COVERAGE_OUTPUT_DIR/merged_coverage"
|
| 36 |
+
FUZZER_STATS_DIR="$COVERAGE_OUTPUT_DIR/fuzzer_stats"
|
| 37 |
+
TEXTCOV_REPORT_DIR="$COVERAGE_OUTPUT_DIR/textcov_reports"
|
| 38 |
+
LOGS_DIR="$COVERAGE_OUTPUT_DIR/logs"
|
| 39 |
+
REPORT_ROOT_DIR="$COVERAGE_OUTPUT_DIR/report"
|
| 40 |
+
REPORT_BY_TARGET_ROOT_DIR="$COVERAGE_OUTPUT_DIR/report_target"
|
| 41 |
+
PLATFORM=linux
|
| 42 |
+
REPORT_PLATFORM_DIR="$COVERAGE_OUTPUT_DIR/report/$PLATFORM"
|
| 43 |
+
|
| 44 |
+
for directory in $DUMPS_DIR $FUZZER_STATS_DIR $LOGS_DIR $REPORT_ROOT_DIR $TEXTCOV_REPORT_DIR\
|
| 45 |
+
$REPORT_PLATFORM_DIR $REPORT_BY_TARGET_ROOT_DIR $FUZZERS_COVERAGE_DUMPS_DIR $MERGED_COVERAGE_DIR; do
|
| 46 |
+
rm -rf $directory
|
| 47 |
+
mkdir -p $directory
|
| 48 |
+
done
|
| 49 |
+
|
| 50 |
+
PROFILE_FILE="$DUMPS_DIR/merged.profdata"
|
| 51 |
+
SUMMARY_FILE="$REPORT_PLATFORM_DIR/summary.json"
|
| 52 |
+
COVERAGE_TARGET_FILE="$FUZZER_STATS_DIR/coverage_targets.txt"
|
| 53 |
+
|
| 54 |
+
# Use path mapping, as $SRC directory from the builder is copied into $OUT/$SRC.
|
| 55 |
+
PATH_EQUIVALENCE_ARGS="-path-equivalence=/,$OUT"
|
| 56 |
+
|
| 57 |
+
# It's important to use $COVERAGE_EXTRA_ARGS as the last argument, because it
|
| 58 |
+
# can contain paths to source files / directories which are positional args.
|
| 59 |
+
LLVM_COV_COMMON_ARGS="$PATH_EQUIVALENCE_ARGS \
|
| 60 |
+
-ignore-filename-regex=.*src/libfuzzer/.* $COVERAGE_EXTRA_ARGS"
|
| 61 |
+
|
| 62 |
+
# Options to extract branch coverage.
|
| 63 |
+
BRANCH_COV_ARGS="--show-branches=count --show-expansions"
|
| 64 |
+
|
| 65 |
+
# Timeout for running a single fuzz target.
|
| 66 |
+
TIMEOUT=1h
|
| 67 |
+
|
| 68 |
+
# This will be used by llvm-cov command to generate the actual report.
|
| 69 |
+
objects=""
|
| 70 |
+
|
| 71 |
+
# Number of CPUs available, this is needed for running tests in parallel.
|
| 72 |
+
# Set the max number of parallel jobs to be the CPU count and a max of 10.
|
| 73 |
+
NPROC=$(nproc)
|
| 74 |
+
MAX_PARALLEL_COUNT=10
|
| 75 |
+
|
| 76 |
+
CORPUS_DIR=${CORPUS_DIR:-"/corpus"}
|
| 77 |
+
|
| 78 |
+
function run_fuzz_target {
|
| 79 |
+
local target=$1
|
| 80 |
+
|
| 81 |
+
# '%1m' will produce separate dump files for every object. For example, if a
|
| 82 |
+
# fuzz target loads a shared library, we will have dumps for both of them.
|
| 83 |
+
local profraw_file="$DUMPS_DIR/$target.%1m.profraw"
|
| 84 |
+
local profraw_file_mask="$DUMPS_DIR/$target.*.profraw"
|
| 85 |
+
local profdata_file="$DUMPS_DIR/$target.profdata"
|
| 86 |
+
local corpus_real="$CORPUS_DIR/${target}"
|
| 87 |
+
|
| 88 |
+
# -merge=1 requires an output directory, create a new, empty dir for that.
|
| 89 |
+
local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}"
|
| 90 |
+
rm -rf $corpus_dummy && mkdir -p $corpus_dummy
|
| 91 |
+
|
| 92 |
+
# Use -merge=1 instead of -runs=0 because merge is crash resistant and would
|
| 93 |
+
# let to get coverage using all corpus files even if there are crash inputs.
|
| 94 |
+
# Merge should not introduce any significant overhead compared to -runs=0,
|
| 95 |
+
# because (A) corpuses are already minimized; (B) we do not use sancov, and so
|
| 96 |
+
# libFuzzer always finishes merge with an empty output dir.
|
| 97 |
+
# Use 100s timeout instead of 25s as code coverage builds can be very slow.
|
| 98 |
+
local args="-merge=1 -timeout=100 $corpus_dummy $corpus_real"
|
| 99 |
+
|
| 100 |
+
export LLVM_PROFILE_FILE=$profraw_file
|
| 101 |
+
timeout $TIMEOUT $OUT/$target $args &> $LOGS_DIR/$target.log
|
| 102 |
+
if (( $? != 0 )); then
|
| 103 |
+
echo "Error occured while running $target:"
|
| 104 |
+
cat $LOGS_DIR/$target.log
|
| 105 |
+
fi
|
| 106 |
+
|
| 107 |
+
rm -rf $corpus_dummy
|
| 108 |
+
if (( $(du -c $profraw_file_mask | tail -n 1 | cut -f 1) == 0 )); then
|
| 109 |
+
# Skip fuzz targets that failed to produce profile dumps.
|
| 110 |
+
return 0
|
| 111 |
+
fi
|
| 112 |
+
|
| 113 |
+
# If necessary translate to latest profraw version.
|
| 114 |
+
if [[ $target == *"@"* ]]; then
|
| 115 |
+
# Extract fuzztest binary name from fuzztest wrapper script.
|
| 116 |
+
target=(${target//@/ }[0])
|
| 117 |
+
fi
|
| 118 |
+
profraw_update.py $OUT/$target -i $profraw_file_mask
|
| 119 |
+
llvm-profdata merge -j=1 -sparse $profraw_file_mask -o $profdata_file
|
| 120 |
+
|
| 121 |
+
# Delete unnecessary and (potentially) large .profraw files.
|
| 122 |
+
rm $profraw_file_mask
|
| 123 |
+
|
| 124 |
+
shared_libraries=$(coverage_helper shared_libs -build-dir=$OUT -object=$target)
|
| 125 |
+
|
| 126 |
+
llvm-cov export -summary-only -instr-profile=$profdata_file -object=$target \
|
| 127 |
+
$shared_libraries $LLVM_COV_COMMON_ARGS > $FUZZER_STATS_DIR/$target.json
|
| 128 |
+
|
| 129 |
+
# For introspector.
|
| 130 |
+
llvm-cov show -instr-profile=$profdata_file -object=$target -line-coverage-gt=0 $shared_libraries $BRANCH_COV_ARGS $LLVM_COV_COMMON_ARGS > ${TEXTCOV_REPORT_DIR}/$target.covreport
|
| 131 |
+
}
|
| 132 |
+
|
| 133 |
+
function run_go_fuzz_target {
|
| 134 |
+
local target=$1
|
| 135 |
+
|
| 136 |
+
echo "Running go target $target"
|
| 137 |
+
export FUZZ_CORPUS_DIR="$CORPUS_DIR/${target}/"
|
| 138 |
+
export FUZZ_PROFILE_NAME="$DUMPS_DIR/$target.perf"
|
| 139 |
+
|
| 140 |
+
# setup for native go fuzzers
|
| 141 |
+
cd $OUT
|
| 142 |
+
mkdir -p "testdata/fuzz/${target}"
|
| 143 |
+
cp -r "${FUZZ_CORPUS_DIR}" "testdata/fuzz/"
|
| 144 |
+
|
| 145 |
+
# rewrite libFuzzer corpus to Std Go corpus if native fuzzing
|
| 146 |
+
grep "TestFuzzCorpus" $target > /dev/null 2>&1 && $SYSGOPATH/bin/convertcorpus $target "testdata/fuzz/${target}"
|
| 147 |
+
cd -
|
| 148 |
+
|
| 149 |
+
timeout $TIMEOUT $OUT/$target -test.coverprofile $DUMPS_DIR/$target.profdata &> $LOGS_DIR/$target.log
|
| 150 |
+
if (( $? != 0 )); then
|
| 151 |
+
echo "Error occured while running $target:"
|
| 152 |
+
cat $LOGS_DIR/$target.log
|
| 153 |
+
fi
|
| 154 |
+
|
| 155 |
+
# cleanup after native go fuzzers
|
| 156 |
+
rm -r "${OUT}/testdata/fuzz/${target}"
|
| 157 |
+
|
| 158 |
+
# The Go 1.18 fuzzers are renamed to "*_fuzz_.go" during "infra/helper.py build_fuzzers".
|
| 159 |
+
# They are are therefore refered to as "*_fuzz_.go" in the profdata files.
|
| 160 |
+
# Since the copies named "*_fuzz_.go" do not exist in the file tree during
|
| 161 |
+
# the coverage build, we change the references in the .profdata files
|
| 162 |
+
# to the original file names.
|
| 163 |
+
#sed -i "s/_test.go_fuzz_.go/_test.go/g" $DUMPS_DIR/$target.profdata
|
| 164 |
+
# translate from golangish paths to current absolute paths
|
| 165 |
+
cat $OUT/$target.gocovpath | while read i; do sed -i $i $DUMPS_DIR/$target.profdata; done
|
| 166 |
+
# cf PATH_EQUIVALENCE_ARGS
|
| 167 |
+
sed -i 's=/='$OUT'/=' $DUMPS_DIR/$target.profdata
|
| 168 |
+
$SYSGOPATH/bin/gocovsum $DUMPS_DIR/$target.profdata > $FUZZER_STATS_DIR/$target.json
|
| 169 |
+
}
|
| 170 |
+
|
| 171 |
+
function run_python_fuzz_target {
|
| 172 |
+
local target=$1
|
| 173 |
+
local zipped_sources="$DUMPS_DIR/$target.deps.zip"
|
| 174 |
+
local corpus_real="$CORPUS_DIR/${target}"
|
| 175 |
+
# Write dummy stats file
|
| 176 |
+
echo "{}" > "$FUZZER_STATS_DIR/$target.json"
|
| 177 |
+
|
| 178 |
+
# Run fuzzer
|
| 179 |
+
$OUT/$target $corpus_real -atheris_runs=$(ls -la $corpus_real | wc -l) > $LOGS_DIR/$target.log 2>&1
|
| 180 |
+
if (( $? != 0 )); then
|
| 181 |
+
echo "Error happened getting coverage of $target"
|
| 182 |
+
echo "This is likely because Atheris did not exit gracefully"
|
| 183 |
+
cat $LOGS_DIR/$target.log
|
| 184 |
+
return 0
|
| 185 |
+
fi
|
| 186 |
+
mv .coverage $OUT/.coverage_$target
|
| 187 |
+
}
|
| 188 |
+
|
| 189 |
+
function run_java_fuzz_target {
|
| 190 |
+
local target=$1
|
| 191 |
+
|
| 192 |
+
local exec_file="$DUMPS_DIR/$target.exec"
|
| 193 |
+
local class_dump_dir="$DUMPS_DIR/${target}_classes/"
|
| 194 |
+
mkdir "$class_dump_dir"
|
| 195 |
+
local corpus_real="$CORPUS_DIR/${target}"
|
| 196 |
+
|
| 197 |
+
# -merge=1 requires an output directory, create a new, empty dir for that.
|
| 198 |
+
local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}"
|
| 199 |
+
rm -rf $corpus_dummy && mkdir -p $corpus_dummy
|
| 200 |
+
|
| 201 |
+
# Use 100s timeout instead of 25s as code coverage builds can be very slow.
|
| 202 |
+
local jacoco_args="destfile=$exec_file,classdumpdir=$class_dump_dir,excludes=com.code_intelligence.jazzer.*\\:com.sun.tools.attach.VirtualMachine"
|
| 203 |
+
local args="-merge=1 -timeout=100 --nohooks \
|
| 204 |
+
--additional_jvm_args=-javaagent\\:/opt/jacoco-agent.jar=$jacoco_args \
|
| 205 |
+
$corpus_dummy $corpus_real"
|
| 206 |
+
|
| 207 |
+
timeout $TIMEOUT $OUT/$target $args &> $LOGS_DIR/$target.log
|
| 208 |
+
if (( $? != 0 )); then
|
| 209 |
+
echo "Error occured while running $target:"
|
| 210 |
+
cat $LOGS_DIR/$target.log
|
| 211 |
+
fi
|
| 212 |
+
|
| 213 |
+
if (( $(du -c $exec_file | tail -n 1 | cut -f 1) == 0 )); then
|
| 214 |
+
# Skip fuzz targets that failed to produce .exec files.
|
| 215 |
+
echo "$target failed to produce .exec file."
|
| 216 |
+
return 0
|
| 217 |
+
fi
|
| 218 |
+
|
| 219 |
+
# Generate XML report only as input to jacoco_report_converter.
|
| 220 |
+
# Source files are not needed for the summary.
|
| 221 |
+
local xml_report="$DUMPS_DIR/${target}.xml"
|
| 222 |
+
local summary_file="$FUZZER_STATS_DIR/$target.json"
|
| 223 |
+
java -jar /opt/jacoco-cli.jar report $exec_file \
|
| 224 |
+
--xml $xml_report \
|
| 225 |
+
--classfiles $class_dump_dir
|
| 226 |
+
|
| 227 |
+
# Write llvm-cov summary file.
|
| 228 |
+
jacoco_report_converter.py $xml_report $summary_file
|
| 229 |
+
}
|
| 230 |
+
|
| 231 |
+
function run_javascript_fuzz_target {
|
| 232 |
+
local target=$1
|
| 233 |
+
local corpus_real="$CORPUS_DIR/${target}"
|
| 234 |
+
|
| 235 |
+
# -merge=1 requires an output directory, create a new, empty dir for that.
|
| 236 |
+
local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}"
|
| 237 |
+
rm -rf $corpus_dummy && mkdir -p $corpus_dummy
|
| 238 |
+
|
| 239 |
+
# IstanbulJS currently does not work when the tested program creates
|
| 240 |
+
# subprocesses. For this reason, we first minimize the corpus removing
|
| 241 |
+
# any crashing inputs so that we can report source-based code coverage
|
| 242 |
+
# with a single sweep over the minimized corpus
|
| 243 |
+
local merge_args="-merge=1 -timeout=100 $corpus_dummy $corpus_real"
|
| 244 |
+
timeout $TIMEOUT $OUT/$target $merge_args &> $LOGS_DIR/$target.log
|
| 245 |
+
|
| 246 |
+
# nyc saves the coverage reports in a directory with the default name "coverage"
|
| 247 |
+
local coverage_dir="$DUMPS_DIR/coverage_dir_for_${target}"
|
| 248 |
+
rm -rf $coverage_dir && mkdir -p $coverage_dir
|
| 249 |
+
|
| 250 |
+
local nyc_json_coverage_file="$coverage_dir/coverage-final.json"
|
| 251 |
+
local nyc_json_summary_file="$coverage_dir/coverage-summary.json"
|
| 252 |
+
|
| 253 |
+
local args="-runs=0 $corpus_dummy"
|
| 254 |
+
local jazzerjs_args="--coverage --coverageDirectory $coverage_dir --coverageReporters json --coverageReporters json-summary"
|
| 255 |
+
|
| 256 |
+
JAZZERJS_EXTRA_ARGS=$jazzerjs_args $OUT/$target $args &> $LOGS_DIR/$target.log
|
| 257 |
+
|
| 258 |
+
if (( $? != 0 )); then
|
| 259 |
+
echo "Error occured while running $target:"
|
| 260 |
+
cat $LOGS_DIR/$target.log
|
| 261 |
+
fi
|
| 262 |
+
|
| 263 |
+
if [ ! -s $nyc_json_coverage_file ]; then
|
| 264 |
+
# Skip fuzz targets that failed to produce coverage-final.json file.
|
| 265 |
+
echo "$target failed to produce coverage-final.json file."
|
| 266 |
+
return 0
|
| 267 |
+
fi
|
| 268 |
+
|
| 269 |
+
cp $nyc_json_coverage_file $FUZZERS_COVERAGE_DUMPS_DIR/$target.json
|
| 270 |
+
|
| 271 |
+
local summary_file="$FUZZER_STATS_DIR/$target.json"
|
| 272 |
+
|
| 273 |
+
nyc_report_converter.py $nyc_json_summary_file $summary_file
|
| 274 |
+
}
|
| 275 |
+
|
| 276 |
+
function generate_html {
|
| 277 |
+
local profdata=$1
|
| 278 |
+
local shared_libraries=$2
|
| 279 |
+
local objects=$3
|
| 280 |
+
local output_dir=$4
|
| 281 |
+
|
| 282 |
+
rm -rf "$output_dir"
|
| 283 |
+
mkdir -p "$output_dir/$PLATFORM"
|
| 284 |
+
|
| 285 |
+
local llvm_cov_args="-instr-profile=$profdata $objects $LLVM_COV_COMMON_ARGS"
|
| 286 |
+
llvm-cov show -format=html -output-dir=$output_dir -Xdemangler rcfilt $llvm_cov_args
|
| 287 |
+
|
| 288 |
+
# Export coverage summary in JSON format.
|
| 289 |
+
local summary_file=$output_dir/$PLATFORM/summary.json
|
| 290 |
+
|
| 291 |
+
llvm-cov export -summary-only $llvm_cov_args > $summary_file
|
| 292 |
+
|
| 293 |
+
coverage_helper -v post_process -src-root-dir=/ -summary-file=$summary_file \
|
| 294 |
+
-output-dir=$output_dir $PATH_EQUIVALENCE_ARGS
|
| 295 |
+
}
|
| 296 |
+
|
| 297 |
+
export SYSGOPATH=$GOPATH
|
| 298 |
+
export GOPATH=$OUT/$GOPATH
|
| 299 |
+
# Run each fuzz target, generate raw coverage dumps.
|
| 300 |
+
for fuzz_target in $FUZZ_TARGETS; do
|
| 301 |
+
# Test if fuzz target is a golang one.
|
| 302 |
+
if [[ $FUZZING_LANGUAGE == "go" ]]; then
|
| 303 |
+
# Continue if not a fuzz target.
|
| 304 |
+
if [[ $FUZZING_ENGINE != "none" ]]; then
|
| 305 |
+
grep "FUZZ_CORPUS_DIR" $fuzz_target > /dev/null 2>&1 || grep "testing\.T" $fuzz_target > /dev/null 2>&1 || continue
|
| 306 |
+
fi
|
| 307 |
+
# Log the target in the targets file.
|
| 308 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 309 |
+
|
| 310 |
+
# Run the coverage collection.
|
| 311 |
+
run_go_fuzz_target $fuzz_target &
|
| 312 |
+
elif [[ $FUZZING_LANGUAGE == "python" ]]; then
|
| 313 |
+
echo "Entering python fuzzing"
|
| 314 |
+
# Log the target in the targets file.
|
| 315 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 316 |
+
|
| 317 |
+
# Run the coverage collection.
|
| 318 |
+
run_python_fuzz_target $fuzz_target
|
| 319 |
+
elif [[ $FUZZING_LANGUAGE == "jvm" ]]; then
|
| 320 |
+
# Continue if not a fuzz target.
|
| 321 |
+
if [[ $FUZZING_ENGINE != "none" ]]; then
|
| 322 |
+
grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue
|
| 323 |
+
fi
|
| 324 |
+
|
| 325 |
+
echo "Running $fuzz_target"
|
| 326 |
+
# Log the target in the targets file.
|
| 327 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 328 |
+
|
| 329 |
+
# Run the coverage collection.
|
| 330 |
+
run_java_fuzz_target $fuzz_target &
|
| 331 |
+
elif [[ $FUZZING_LANGUAGE == "javascript" ]]; then
|
| 332 |
+
# Continue if not a fuzz target.
|
| 333 |
+
if [[ $FUZZING_ENGINE != "none" ]]; then
|
| 334 |
+
grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue
|
| 335 |
+
fi
|
| 336 |
+
|
| 337 |
+
echo "Running $fuzz_target"
|
| 338 |
+
# Log the target in the targets file.
|
| 339 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 340 |
+
|
| 341 |
+
# Run the coverage collection.
|
| 342 |
+
run_javascript_fuzz_target $fuzz_target &
|
| 343 |
+
else
|
| 344 |
+
# Continue if not a fuzz target.
|
| 345 |
+
if [[ $FUZZING_ENGINE != "none" ]]; then
|
| 346 |
+
grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue
|
| 347 |
+
fi
|
| 348 |
+
|
| 349 |
+
echo "Running $fuzz_target"
|
| 350 |
+
# Log the target in the targets file.
|
| 351 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 352 |
+
|
| 353 |
+
# Run the coverage collection.
|
| 354 |
+
run_fuzz_target $fuzz_target &
|
| 355 |
+
|
| 356 |
+
# Rewrite object if its a FUZZTEST target
|
| 357 |
+
if [[ $fuzz_target == *"@"* ]]; then
|
| 358 |
+
# Extract fuzztest binary name from fuzztest wrapper script.
|
| 359 |
+
fuzz_target=(${fuzz_target//@/ }[0])
|
| 360 |
+
fi
|
| 361 |
+
if [[ -z $objects ]]; then
|
| 362 |
+
# The first object needs to be passed without -object= flag.
|
| 363 |
+
objects="$fuzz_target"
|
| 364 |
+
else
|
| 365 |
+
objects="$objects -object=$fuzz_target"
|
| 366 |
+
fi
|
| 367 |
+
fi
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
# Limit the number of processes to be spawned.
|
| 371 |
+
n_child_proc=$(jobs -rp | wc -l)
|
| 372 |
+
while [[ "$n_child_proc" -eq "$NPROC" || "$n_child_proc" -gt "$MAX_PARALLEL_COUNT" ]]; do
|
| 373 |
+
sleep 4
|
| 374 |
+
n_child_proc=$(jobs -rp | wc -l)
|
| 375 |
+
done
|
| 376 |
+
done
|
| 377 |
+
|
| 378 |
+
# Wait for background processes to finish.
|
| 379 |
+
wait
|
| 380 |
+
|
| 381 |
+
if [[ $FUZZING_LANGUAGE == "go" ]]; then
|
| 382 |
+
echo $DUMPS_DIR
|
| 383 |
+
$SYSGOPATH/bin/gocovmerge $DUMPS_DIR/*.profdata > fuzz.cov
|
| 384 |
+
gotoolcover -html=fuzz.cov -o $REPORT_ROOT_DIR/index.html
|
| 385 |
+
$SYSGOPATH/bin/gocovsum fuzz.cov > $SUMMARY_FILE
|
| 386 |
+
cp $REPORT_ROOT_DIR/index.html $REPORT_PLATFORM_DIR/index.html
|
| 387 |
+
$SYSGOPATH/bin/pprof-merge $DUMPS_DIR/*.perf.cpu.prof
|
| 388 |
+
mv merged.data $REPORT_ROOT_DIR/cpu.prof
|
| 389 |
+
$SYSGOPATH/bin/pprof-merge $DUMPS_DIR/*.perf.heap.prof
|
| 390 |
+
mv merged.data $REPORT_ROOT_DIR/heap.prof
|
| 391 |
+
#TODO some proxy for go tool pprof -http=127.0.0.1:8001 $DUMPS_DIR/cpu.prof
|
| 392 |
+
echo "Finished generating code coverage report for Go fuzz targets."
|
| 393 |
+
elif [[ $FUZZING_LANGUAGE == "python" ]]; then
|
| 394 |
+
# Extract source files from all dependency zip folders
|
| 395 |
+
mkdir -p /pythoncovmergedfiles/medio
|
| 396 |
+
PYCOVDIR=/pycovdir/
|
| 397 |
+
mkdir $PYCOVDIR
|
| 398 |
+
for fuzzer in $FUZZ_TARGETS; do
|
| 399 |
+
fuzzer_deps=${fuzzer}.pkg.deps.zip
|
| 400 |
+
unzip $OUT/${fuzzer_deps}
|
| 401 |
+
rsync -r ./medio /pythoncovmergedfiles/medio
|
| 402 |
+
rm -rf ./medio
|
| 403 |
+
|
| 404 |
+
# Translate paths in unzipped folders to paths that we can use
|
| 405 |
+
mv $OUT/.coverage_$fuzzer .coverage
|
| 406 |
+
python3 /usr/local/bin/python_coverage_runner_help.py translate /pythoncovmergedfiles/medio
|
| 407 |
+
cp .new_coverage $PYCOVDIR/.coverage_$fuzzer
|
| 408 |
+
cp .new_coverage $OUT/coverage_d_$fuzzer
|
| 409 |
+
done
|
| 410 |
+
|
| 411 |
+
# Combine coverage
|
| 412 |
+
cd $PYCOVDIR
|
| 413 |
+
python3 /usr/local/bin/python_coverage_runner_help.py combine .coverage_*
|
| 414 |
+
python3 /usr/local/bin/python_coverage_runner_help.py html
|
| 415 |
+
# Produce all_cov file used by fuzz introspector.
|
| 416 |
+
python3 /usr/local/bin/python_coverage_runner_help.py json -o ${TEXTCOV_REPORT_DIR}/all_cov.json
|
| 417 |
+
|
| 418 |
+
# Generate .json with similar format to llvm-cov output.
|
| 419 |
+
python3 /usr/local/bin/python_coverage_runner_help.py \
|
| 420 |
+
convert-to-summary-json ${TEXTCOV_REPORT_DIR}/all_cov.json $SUMMARY_FILE
|
| 421 |
+
|
| 422 |
+
# Copy coverage date out
|
| 423 |
+
cp htmlcov/status.json ${TEXTCOV_REPORT_DIR}/html_status.json
|
| 424 |
+
|
| 425 |
+
mv htmlcov/* $REPORT_PLATFORM_DIR/
|
| 426 |
+
mv .coverage_* $REPORT_PLATFORM_DIR/
|
| 427 |
+
elif [[ $FUZZING_LANGUAGE == "jvm" ]]; then
|
| 428 |
+
|
| 429 |
+
# From this point on the script does not tolerate any errors.
|
| 430 |
+
set -e
|
| 431 |
+
|
| 432 |
+
# Merge .exec files from the individual targets.
|
| 433 |
+
jacoco_merged_exec=$DUMPS_DIR/jacoco.merged.exec
|
| 434 |
+
java -jar /opt/jacoco-cli.jar merge $DUMPS_DIR/*.exec \
|
| 435 |
+
--destfile $jacoco_merged_exec
|
| 436 |
+
|
| 437 |
+
# Prepare classes directory for jacoco process
|
| 438 |
+
classes_dir=$DUMPS_DIR/classes
|
| 439 |
+
mkdir $classes_dir
|
| 440 |
+
|
| 441 |
+
# Only copy class files found in $OUT/$SRC to ensure they are
|
| 442 |
+
# lively compiled from the project, avoiding inclusion of
|
| 443 |
+
# dependency classes. This also includes the fuzzer classes.
|
| 444 |
+
find "$OUT/$SRC" -type f -name "*.class" | while read -r class_file; do
|
| 445 |
+
# Skip module-info.class
|
| 446 |
+
if [[ "$(basename "$class_file")" == "module-info.class" ]]; then
|
| 447 |
+
continue
|
| 448 |
+
fi
|
| 449 |
+
|
| 450 |
+
# Use javap to extract the fully qualified name of the class and copy it to $classes_dir
|
| 451 |
+
fqn=$(javap -verbose "$class_file" 2>/dev/null | grep "this_class:" | grep -oP '(?<=// ).*')
|
| 452 |
+
if [ -n "$fqn" ]; then
|
| 453 |
+
mkdir -p $classes_dir/$(dirname $fqn)
|
| 454 |
+
cp $class_file $classes_dir/$fqn.class
|
| 455 |
+
fi
|
| 456 |
+
done
|
| 457 |
+
|
| 458 |
+
# Heuristically determine source directories based on Maven structure.
|
| 459 |
+
# Always include the $SRC root as it likely contains the fuzzer sources.
|
| 460 |
+
sourcefiles_args=(--sourcefiles $OUT/$SRC)
|
| 461 |
+
source_dirs=$(find $OUT/$SRC -type d -name 'java')
|
| 462 |
+
for source_dir in $source_dirs; do
|
| 463 |
+
sourcefiles_args+=(--sourcefiles "$source_dir")
|
| 464 |
+
done
|
| 465 |
+
|
| 466 |
+
# Generate HTML and XML reports.
|
| 467 |
+
xml_report=$REPORT_PLATFORM_DIR/index.xml
|
| 468 |
+
java -jar /opt/jacoco-cli.jar report $jacoco_merged_exec \
|
| 469 |
+
--html $REPORT_PLATFORM_DIR \
|
| 470 |
+
--xml $xml_report \
|
| 471 |
+
--classfiles $classes_dir \
|
| 472 |
+
"${sourcefiles_args[@]}"
|
| 473 |
+
|
| 474 |
+
# Also serve the raw exec file and XML report, which can be useful for
|
| 475 |
+
# automated analysis.
|
| 476 |
+
cp $jacoco_merged_exec $REPORT_PLATFORM_DIR/jacoco.exec
|
| 477 |
+
cp $xml_report $REPORT_PLATFORM_DIR/jacoco.xml
|
| 478 |
+
cp $xml_report $TEXTCOV_REPORT_DIR/jacoco.xml
|
| 479 |
+
|
| 480 |
+
# Write llvm-cov summary file.
|
| 481 |
+
jacoco_report_converter.py $xml_report $SUMMARY_FILE
|
| 482 |
+
|
| 483 |
+
set +e
|
| 484 |
+
elif [[ $FUZZING_LANGUAGE == "javascript" ]]; then
|
| 485 |
+
|
| 486 |
+
# From this point on the script does not tolerate any errors.
|
| 487 |
+
set -e
|
| 488 |
+
|
| 489 |
+
json_report=$MERGED_COVERAGE_DIR/coverage.json
|
| 490 |
+
nyc merge $FUZZERS_COVERAGE_DUMPS_DIR $json_report
|
| 491 |
+
|
| 492 |
+
nyc report -t $MERGED_COVERAGE_DIR --report-dir $REPORT_PLATFORM_DIR --reporter=html --reporter=json-summary
|
| 493 |
+
|
| 494 |
+
nyc_json_summary_file=$REPORT_PLATFORM_DIR/coverage-summary.json
|
| 495 |
+
|
| 496 |
+
# Write llvm-cov summary file.
|
| 497 |
+
nyc_report_converter.py $nyc_json_summary_file $SUMMARY_FILE
|
| 498 |
+
|
| 499 |
+
set +e
|
| 500 |
+
else
|
| 501 |
+
|
| 502 |
+
# From this point on the script does not tolerate any errors.
|
| 503 |
+
set -e
|
| 504 |
+
|
| 505 |
+
# Merge all dumps from the individual targets.
|
| 506 |
+
rm -f $PROFILE_FILE
|
| 507 |
+
llvm-profdata merge -sparse $DUMPS_DIR/*.profdata -o $PROFILE_FILE
|
| 508 |
+
|
| 509 |
+
# TODO(mmoroz): add script from Chromium for rendering directory view reports.
|
| 510 |
+
# The first path in $objects does not have -object= prefix (llvm-cov format).
|
| 511 |
+
shared_libraries=$(coverage_helper shared_libs -build-dir=$OUT -object=$objects)
|
| 512 |
+
objects="$objects $shared_libraries"
|
| 513 |
+
|
| 514 |
+
generate_html $PROFILE_FILE "$shared_libraries" "$objects" "$REPORT_ROOT_DIR"
|
| 515 |
+
|
| 516 |
+
# Per target reports.
|
| 517 |
+
for fuzz_target in $FUZZ_TARGETS; do
|
| 518 |
+
if [[ $fuzz_target == *"@"* ]]; then
|
| 519 |
+
profdata_path=$DUMPS_DIR/$fuzz_target.profdata
|
| 520 |
+
report_dir=$REPORT_BY_TARGET_ROOT_DIR/$fuzz_target
|
| 521 |
+
# Extract fuzztest binary name from fuzztest wrapper script.
|
| 522 |
+
fuzz_target=(${fuzz_target//@/ }[0])
|
| 523 |
+
else
|
| 524 |
+
profdata_path=$DUMPS_DIR/$fuzz_target.profdata
|
| 525 |
+
report_dir=$REPORT_BY_TARGET_ROOT_DIR/$fuzz_target
|
| 526 |
+
fi
|
| 527 |
+
if [[ ! -f "$profdata_path" ]]; then
|
| 528 |
+
echo "WARNING: $fuzz_target has no profdata generated."
|
| 529 |
+
continue
|
| 530 |
+
fi
|
| 531 |
+
|
| 532 |
+
generate_html $profdata_path "$shared_libraries" "$fuzz_target" "$report_dir"
|
| 533 |
+
done
|
| 534 |
+
|
| 535 |
+
set +e
|
| 536 |
+
fi
|
| 537 |
+
|
| 538 |
+
# Make sure report is readable.
|
| 539 |
+
chmod -R +r $REPORT_ROOT_DIR $REPORT_BY_TARGET_ROOT_DIR
|
| 540 |
+
find $REPORT_ROOT_DIR $REPORT_BY_TARGET_ROOT_DIR -type d -exec chmod +x {} +
|
| 541 |
+
|
| 542 |
+
# HTTP_PORT is optional.
|
| 543 |
+
set +u
|
| 544 |
+
if [[ -n $HTTP_PORT ]]; then
|
| 545 |
+
# Serve the report locally.
|
| 546 |
+
echo "Serving the report on http://127.0.0.1:$HTTP_PORT/linux/index.html"
|
| 547 |
+
cd $REPORT_ROOT_DIR
|
| 548 |
+
python3 -m http.server $HTTP_PORT
|
| 549 |
+
fi
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage_helper
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -u
|
| 2 |
+
# Copyright 2018 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
python3 $CODE_COVERAGE_SRC/coverage_utils.py $@
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/download_corpus
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -u
|
| 2 |
+
# Copyright 2018 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
if (( $# < 1 )); then
|
| 19 |
+
echo "Usage: $0 \"path_download_to url_download_from\" (can be repeated)" >&2
|
| 20 |
+
exit 1
|
| 21 |
+
fi
|
| 22 |
+
|
| 23 |
+
for pair in "$@"; do
|
| 24 |
+
read path url <<< "$pair"
|
| 25 |
+
wget -q -O $path $url
|
| 26 |
+
done
|
| 27 |
+
|
| 28 |
+
# Always exit with 0 as we do not track wget return codes and should not rely
|
| 29 |
+
# on the latest command execution.
|
| 30 |
+
exit 0
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/generate_differential_cov_report.py
ADDED
|
@@ -0,0 +1,228 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
#
|
| 3 |
+
# Copyright 2023 Google LLC
|
| 4 |
+
#
|
| 5 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 6 |
+
# you may not use this file except in compliance with the License.
|
| 7 |
+
# You may obtain a copy of the License at
|
| 8 |
+
#
|
| 9 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 10 |
+
#
|
| 11 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 12 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 13 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 14 |
+
# See the License for the specific language governing permissions and
|
| 15 |
+
# limitations under the License.
|
| 16 |
+
#
|
| 17 |
+
################################################################################
|
| 18 |
+
"""Script for generating differential coverage reports.
|
| 19 |
+
generate_differential_cov_report.py <profdata-dump-directory> \
|
| 20 |
+
<profdata-directory-to-subtract-from-first> <output-directory>
|
| 21 |
+
"""
|
| 22 |
+
import os
|
| 23 |
+
import shutil
|
| 24 |
+
import subprocess
|
| 25 |
+
import sys
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class ProfData:
|
| 29 |
+
"""Class representing a profdata file."""
|
| 30 |
+
|
| 31 |
+
def __init__(self, text):
|
| 32 |
+
self.function_profs = []
|
| 33 |
+
for function_prof in text.split('\n\n'):
|
| 34 |
+
if not function_prof:
|
| 35 |
+
continue
|
| 36 |
+
self.function_profs.append(FunctionProf(function_prof))
|
| 37 |
+
|
| 38 |
+
def to_string(self):
|
| 39 |
+
"""Convert back to a string."""
|
| 40 |
+
return '\n'.join(
|
| 41 |
+
[function_prof.to_string() for function_prof in self.function_profs])
|
| 42 |
+
|
| 43 |
+
def find_function(self, function, idx=None):
|
| 44 |
+
"""Find the same function in this profdata."""
|
| 45 |
+
if idx is not None:
|
| 46 |
+
try:
|
| 47 |
+
possibility = self.function_profs[idx]
|
| 48 |
+
if function.func_hash == possibility.func_hash:
|
| 49 |
+
return possibility
|
| 50 |
+
except IndexError:
|
| 51 |
+
pass
|
| 52 |
+
for function_prof in self.function_profs:
|
| 53 |
+
if function_prof.func_hash == function.func_hash:
|
| 54 |
+
return function_prof
|
| 55 |
+
return None
|
| 56 |
+
|
| 57 |
+
def subtract(self, subtrahend):
|
| 58 |
+
"""Subtract subtrahend from this profdata."""
|
| 59 |
+
for idx, function_prof in enumerate(self.function_profs):
|
| 60 |
+
subtrahend_function_prof = subtrahend.find_function(function_prof, idx)
|
| 61 |
+
function_prof.subtract(subtrahend_function_prof)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class FunctionProf:
|
| 65 |
+
"""Profile of a function."""
|
| 66 |
+
FUNC_HASH_COMMENT_LINE = '# Func Hash:'
|
| 67 |
+
NUM_COUNTERS_COMMENT_LINE = '# Num Counters:'
|
| 68 |
+
COUNTER_VALUES_COMMENT_LINE = '# Counter Values:'
|
| 69 |
+
|
| 70 |
+
def __init__(self, text):
|
| 71 |
+
print(text)
|
| 72 |
+
lines = text.splitlines()
|
| 73 |
+
self.function = lines[0]
|
| 74 |
+
assert self.FUNC_HASH_COMMENT_LINE == lines[1]
|
| 75 |
+
self.func_hash = lines[2]
|
| 76 |
+
assert self.NUM_COUNTERS_COMMENT_LINE == lines[3]
|
| 77 |
+
self.num_counters = int(lines[4])
|
| 78 |
+
assert self.COUNTER_VALUES_COMMENT_LINE == lines[5]
|
| 79 |
+
self.counter_values = [1 if int(line) else 0 for line in lines[6:]]
|
| 80 |
+
|
| 81 |
+
def to_string(self):
|
| 82 |
+
"""Convert back to text."""
|
| 83 |
+
lines = [
|
| 84 |
+
self.function,
|
| 85 |
+
self.FUNC_HASH_COMMENT_LINE,
|
| 86 |
+
self.func_hash,
|
| 87 |
+
self.NUM_COUNTERS_COMMENT_LINE,
|
| 88 |
+
str(self.num_counters),
|
| 89 |
+
self.COUNTER_VALUES_COMMENT_LINE,
|
| 90 |
+
] + [str(num) for num in self.counter_values]
|
| 91 |
+
return '\n'.join(lines)
|
| 92 |
+
|
| 93 |
+
def subtract(self, subtrahend_prof):
|
| 94 |
+
"""Subtract this other function from this function."""
|
| 95 |
+
if not subtrahend_prof:
|
| 96 |
+
print(self.function, 'has no subtrahend')
|
| 97 |
+
# Nothing to subtract.
|
| 98 |
+
return
|
| 99 |
+
self.counter_values = [
|
| 100 |
+
max(counter1 - counter2, 0) for counter1, counter2 in zip(
|
| 101 |
+
self.counter_values, subtrahend_prof.counter_values)
|
| 102 |
+
]
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def get_profdata_files(directory):
|
| 106 |
+
"""Returns profdata files in |directory|."""
|
| 107 |
+
profdatas = []
|
| 108 |
+
for filename in os.listdir(directory):
|
| 109 |
+
filename = os.path.join(directory, filename)
|
| 110 |
+
if filename.endswith('.profdata'):
|
| 111 |
+
profdatas.append(filename)
|
| 112 |
+
return profdatas
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def convert_profdata_to_text(profdata):
|
| 116 |
+
"""Convert a profdata binary file to a profdata text file."""
|
| 117 |
+
profdata_text = f'{profdata}.txt'
|
| 118 |
+
if os.path.exists(profdata_text):
|
| 119 |
+
os.remove(profdata_text)
|
| 120 |
+
command = [
|
| 121 |
+
'llvm-profdata', 'merge', '-j=1', '-sparse', profdata, '--text', '-o',
|
| 122 |
+
profdata_text
|
| 123 |
+
]
|
| 124 |
+
print(command)
|
| 125 |
+
subprocess.run(command, check=True)
|
| 126 |
+
return profdata_text
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def convert_text_profdata_to_bin(profdata_text):
|
| 130 |
+
"""Convert a profdata text file to a profdata binary file."""
|
| 131 |
+
profdata = profdata_text.replace('.txt', '').replace('.profdata',
|
| 132 |
+
'') + '.profdata'
|
| 133 |
+
print('bin profdata', profdata)
|
| 134 |
+
if os.path.exists(profdata):
|
| 135 |
+
os.remove(profdata)
|
| 136 |
+
command = [
|
| 137 |
+
'llvm-profdata', 'merge', '-j=1', '-sparse', profdata_text, '-o', profdata
|
| 138 |
+
]
|
| 139 |
+
print(command)
|
| 140 |
+
subprocess.run(command, check=True)
|
| 141 |
+
return profdata
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def get_difference(minuend_filename, subtrahend_filename):
|
| 145 |
+
"""Subtract subtrahend_filename from minuend_filename."""
|
| 146 |
+
with open(minuend_filename, 'r', encoding='utf-8') as minuend_file:
|
| 147 |
+
print('minuend', minuend_filename)
|
| 148 |
+
minuend = ProfData(minuend_file.read())
|
| 149 |
+
with open(subtrahend_filename, 'r', encoding='utf-8') as subtrahend_file:
|
| 150 |
+
print('subtrahend', subtrahend_filename)
|
| 151 |
+
subtrahend = ProfData(subtrahend_file.read())
|
| 152 |
+
|
| 153 |
+
minuend.subtract(subtrahend)
|
| 154 |
+
return minuend
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def profdatas_to_objects(profdatas):
|
| 158 |
+
"""Get the corresponding objects for each profdata."""
|
| 159 |
+
return [
|
| 160 |
+
os.path.splitext(os.path.basename(profdata))[0] for profdata in profdatas
|
| 161 |
+
]
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def generate_differential_cov_reports(minuend_profdatas, subtrahend_profdatas,
|
| 165 |
+
difference_dir):
|
| 166 |
+
"""Calculate the differences between all profdatas and generate differential
|
| 167 |
+
coverage reports."""
|
| 168 |
+
profdata_objects = profdatas_to_objects(minuend_profdatas)
|
| 169 |
+
real_profdata_objects = [
|
| 170 |
+
binobject for binobject in profdata_objects if binobject != 'merged'
|
| 171 |
+
]
|
| 172 |
+
for minuend, subtrahend, binobject in zip(minuend_profdatas,
|
| 173 |
+
subtrahend_profdatas,
|
| 174 |
+
profdata_objects):
|
| 175 |
+
minuend_text = convert_profdata_to_text(minuend)
|
| 176 |
+
subtrahend_text = convert_profdata_to_text(subtrahend)
|
| 177 |
+
difference = get_difference(minuend_text, subtrahend_text)
|
| 178 |
+
basename = os.path.basename(minuend_text)
|
| 179 |
+
difference_text = os.path.join(difference_dir, basename)
|
| 180 |
+
with open(difference_text, 'w', encoding='utf-8') as file_handle:
|
| 181 |
+
file_handle.write(difference.to_string())
|
| 182 |
+
difference_profdata = convert_text_profdata_to_bin(difference_text)
|
| 183 |
+
if not difference_profdata.endswith('merged.profdata'):
|
| 184 |
+
generate_html_report(difference_profdata, [binobject],
|
| 185 |
+
os.path.join(difference_dir, binobject))
|
| 186 |
+
else:
|
| 187 |
+
generate_html_report(difference_profdata, real_profdata_objects,
|
| 188 |
+
os.path.join(difference_dir, 'merged'))
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def generate_html_report(profdata, objects, directory):
|
| 192 |
+
"""Generate an HTML coverage report."""
|
| 193 |
+
# TODO(metzman): Deal with shared libs.
|
| 194 |
+
html_dir = os.path.join(directory, 'reports')
|
| 195 |
+
if os.path.exists(html_dir):
|
| 196 |
+
os.remove(html_dir)
|
| 197 |
+
os.makedirs(html_dir)
|
| 198 |
+
out_dir = os.getenv('OUT', '/out')
|
| 199 |
+
command = [
|
| 200 |
+
'llvm-cov', 'show', f'-path-equivalence=/,{out_dir}', '-format=html',
|
| 201 |
+
'-Xdemangler', 'rcfilt', f'-instr-profile={profdata}'
|
| 202 |
+
]
|
| 203 |
+
|
| 204 |
+
objects = [os.path.join(out_dir, binobject) for binobject in objects]
|
| 205 |
+
command += objects + ['-o', html_dir]
|
| 206 |
+
print(' '.join(command))
|
| 207 |
+
subprocess.run(command, check=True)
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
def main():
|
| 211 |
+
"""Generate differential coverage reports."""
|
| 212 |
+
if len(sys.argv) != 4:
|
| 213 |
+
print(
|
| 214 |
+
f'Usage: {sys.argv[0]} <minuend_dir> <subtrahend_dir> <difference_dir>')
|
| 215 |
+
minuend_dir = sys.argv[1]
|
| 216 |
+
subtrahend_dir = sys.argv[2]
|
| 217 |
+
difference_dir = sys.argv[3]
|
| 218 |
+
if os.path.exists(difference_dir):
|
| 219 |
+
shutil.rmtree(difference_dir)
|
| 220 |
+
os.makedirs(difference_dir, exist_ok=True)
|
| 221 |
+
minuend_profdatas = get_profdata_files(minuend_dir)
|
| 222 |
+
subtrahend_profdatas = get_profdata_files(subtrahend_dir)
|
| 223 |
+
generate_differential_cov_reports(minuend_profdatas, subtrahend_profdatas,
|
| 224 |
+
difference_dir)
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
if __name__ == '__main__':
|
| 228 |
+
main()
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_deps.sh
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Install dependencies in a platform-aware way.
|
| 19 |
+
|
| 20 |
+
apt-get update && apt-get install -y \
|
| 21 |
+
binutils \
|
| 22 |
+
file \
|
| 23 |
+
ca-certificates \
|
| 24 |
+
fonts-dejavu \
|
| 25 |
+
git \
|
| 26 |
+
libcap2 \
|
| 27 |
+
rsync \
|
| 28 |
+
unzip \
|
| 29 |
+
wget \
|
| 30 |
+
zip --no-install-recommends
|
| 31 |
+
|
| 32 |
+
case $(uname -m) in
|
| 33 |
+
x86_64)
|
| 34 |
+
# We only need to worry about i386 if we are on x86_64.
|
| 35 |
+
apt-get install -y lib32gcc1 libc6-i386
|
| 36 |
+
;;
|
| 37 |
+
esac
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_go.sh
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Install go on x86_64, don't do anything on ARM.
|
| 19 |
+
|
| 20 |
+
case $(uname -m) in
|
| 21 |
+
x86_64)
|
| 22 |
+
# Download and install Go 1.19.
|
| 23 |
+
wget -q https://storage.googleapis.com/golang/getgo/installer_linux -O $SRC/installer_linux
|
| 24 |
+
chmod +x $SRC/installer_linux
|
| 25 |
+
SHELL="bash" $SRC/installer_linux -version 1.19
|
| 26 |
+
rm $SRC/installer_linux
|
| 27 |
+
# Set up Golang coverage modules.
|
| 28 |
+
printf $(find . -name gocoverage)
|
| 29 |
+
cd $GOPATH/gocoverage && /root/.go/bin/go install ./...
|
| 30 |
+
cd convertcorpus && /root/.go/bin/go install .
|
| 31 |
+
cd /root/.go/src/cmd/cover && /root/.go/bin/go build && mv cover $GOPATH/bin/gotoolcover
|
| 32 |
+
;;
|
| 33 |
+
aarch64)
|
| 34 |
+
# Don't install go because installer is not provided.
|
| 35 |
+
echo "Not installing go: aarch64."
|
| 36 |
+
;;
|
| 37 |
+
*)
|
| 38 |
+
echo "Error: unsupported architecture: $(uname -m)"
|
| 39 |
+
exit 1
|
| 40 |
+
;;
|
| 41 |
+
esac
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_java.sh
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Install java in a platform-aware way.
|
| 19 |
+
|
| 20 |
+
ARCHITECTURE=
|
| 21 |
+
case $(uname -m) in
|
| 22 |
+
x86_64)
|
| 23 |
+
ARCHITECTURE=x64
|
| 24 |
+
;;
|
| 25 |
+
aarch64)
|
| 26 |
+
ARCHITECTURE=aarch64
|
| 27 |
+
;;
|
| 28 |
+
*)
|
| 29 |
+
echo "Error: unsupported architecture: $(uname -m)"
|
| 30 |
+
exit 1
|
| 31 |
+
;;
|
| 32 |
+
esac
|
| 33 |
+
|
| 34 |
+
wget -q https://download.java.net/java/GA/jdk17.0.2/dfd4a8d0985749f896bed50d7138ee7f/8/GPL/openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz -O /tmp/openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz
|
| 35 |
+
wget -q https://download.java.net/java/GA/jdk15.0.2/0d1cfde4252546c6931946de8db48ee2/7/GPL/openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz -O /tmp/openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz
|
| 36 |
+
cd /tmp
|
| 37 |
+
mkdir -p $JAVA_HOME
|
| 38 |
+
tar -xz --strip-components=1 -f openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz --directory $JAVA_HOME
|
| 39 |
+
rm -f openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz
|
| 40 |
+
rm -rf $JAVA_HOME/jmods $JAVA_HOME/lib/src.zip
|
| 41 |
+
|
| 42 |
+
# Install OpenJDK 15 and trim its size by removing unused components. Some projects only run with Java 15.
|
| 43 |
+
mkdir -p $JAVA_15_HOME
|
| 44 |
+
tar -xz --strip-components=1 -f openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz --directory $JAVA_15_HOME
|
| 45 |
+
rm -f openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz
|
| 46 |
+
rm -rf $JAVA_15_HOME/jmods $JAVA_15_HOME/lib/src.zip
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_javascript.sh
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2023 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
# see installation instructions: https://github.com/nodesource/distributions#available-architectures
|
| 18 |
+
apt-get update
|
| 19 |
+
apt-get install -y ca-certificates curl gnupg
|
| 20 |
+
mkdir -p /etc/apt/keyrings
|
| 21 |
+
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
|
| 22 |
+
|
| 23 |
+
NODE_MAJOR=20
|
| 24 |
+
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
|
| 25 |
+
|
| 26 |
+
apt-get update
|
| 27 |
+
apt-get install nodejs -y
|
| 28 |
+
|
| 29 |
+
# Install latest versions of nyc for source-based coverage reporting
|
| 30 |
+
npm install --global nyc
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/jacoco_report_converter.py
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Helper script for creating an llvm-cov style JSON summary from a JaCoCo XML
|
| 18 |
+
report."""
|
| 19 |
+
import json
|
| 20 |
+
import os
|
| 21 |
+
import sys
|
| 22 |
+
import xml.etree.ElementTree as ET
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def convert(xml):
|
| 26 |
+
"""Turns a JaCoCo XML report into an llvm-cov JSON summary."""
|
| 27 |
+
summary = {
|
| 28 |
+
'type': 'oss-fuzz.java.coverage.json.export',
|
| 29 |
+
'version': '1.0.0',
|
| 30 |
+
'data': [{
|
| 31 |
+
'totals': {},
|
| 32 |
+
'files': [],
|
| 33 |
+
}],
|
| 34 |
+
}
|
| 35 |
+
|
| 36 |
+
report = ET.fromstring(xml)
|
| 37 |
+
totals = make_element_summary(report)
|
| 38 |
+
summary['data'][0]['totals'] = totals
|
| 39 |
+
|
| 40 |
+
# Since Java compilation does not track source file location, we match
|
| 41 |
+
# coverage info to source files via the full class name, e.g. we search for
|
| 42 |
+
# a path in /out/src ending in foo/bar/Baz.java for the class foo.bar.Baz.
|
| 43 |
+
# Under the assumptions that a given project only ever contains a single
|
| 44 |
+
# version of a class and that no class name appears as a suffix of another
|
| 45 |
+
# class name, we can assign coverage info to every source file matched in that
|
| 46 |
+
# way.
|
| 47 |
+
src_files = list_src_files()
|
| 48 |
+
|
| 49 |
+
for class_element in report.findall('./package/class'):
|
| 50 |
+
# Skip fuzzer classes
|
| 51 |
+
if is_fuzzer_class(class_element):
|
| 52 |
+
continue
|
| 53 |
+
|
| 54 |
+
# Skip non class elements
|
| 55 |
+
if 'sourcefilename' not in class_element.attrib:
|
| 56 |
+
continue
|
| 57 |
+
|
| 58 |
+
class_name = class_element.attrib['name']
|
| 59 |
+
package_name = os.path.dirname(class_name)
|
| 60 |
+
basename = class_element.attrib['sourcefilename']
|
| 61 |
+
# This path is 'foo/Bar.java' for the class element
|
| 62 |
+
# <class name="foo/Bar" sourcefilename="Bar.java">.
|
| 63 |
+
canonical_path = os.path.join(package_name, basename)
|
| 64 |
+
|
| 65 |
+
class_summary = make_element_summary(class_element)
|
| 66 |
+
for src_file in relative_to_src_path(src_files, canonical_path):
|
| 67 |
+
summary['data'][0]['files'].append({
|
| 68 |
+
'filename': src_file,
|
| 69 |
+
'summary': class_summary,
|
| 70 |
+
})
|
| 71 |
+
|
| 72 |
+
return json.dumps(summary)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def list_src_files():
|
| 76 |
+
"""Returns a map from basename to full path for all files in $OUT/$SRC."""
|
| 77 |
+
filename_to_paths = {}
|
| 78 |
+
out_path = os.environ['OUT'] + '/'
|
| 79 |
+
src_path = os.environ['SRC']
|
| 80 |
+
src_in_out = out_path + src_path
|
| 81 |
+
for dirpath, _, filenames in os.walk(src_in_out):
|
| 82 |
+
for filename in filenames:
|
| 83 |
+
full_path = dirpath + '/' + filename
|
| 84 |
+
# Map /out//src/... to /src/...
|
| 85 |
+
file_path = full_path[len(out_path):]
|
| 86 |
+
filename_to_paths.setdefault(filename, []).append(file_path)
|
| 87 |
+
return filename_to_paths
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def is_fuzzer_class(class_element):
|
| 91 |
+
"""Check if the class is fuzzer class."""
|
| 92 |
+
method_elements = class_element.find('./method[@name=\"fuzzerTestOneInput\"]')
|
| 93 |
+
if method_elements:
|
| 94 |
+
return True
|
| 95 |
+
|
| 96 |
+
return False
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def relative_to_src_path(src_files, canonical_path):
|
| 100 |
+
"""Returns all paths in src_files ending in canonical_path."""
|
| 101 |
+
basename = os.path.basename(canonical_path)
|
| 102 |
+
if basename not in src_files:
|
| 103 |
+
return []
|
| 104 |
+
candidate_paths = src_files[basename]
|
| 105 |
+
return [
|
| 106 |
+
path for path in candidate_paths if path.endswith("/" + canonical_path)
|
| 107 |
+
]
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def make_element_summary(element):
|
| 111 |
+
"""Returns a coverage summary for an element in the XML report."""
|
| 112 |
+
summary = {}
|
| 113 |
+
|
| 114 |
+
function_counter = element.find('./counter[@type=\'METHOD\']')
|
| 115 |
+
summary['functions'] = make_counter_summary(function_counter)
|
| 116 |
+
|
| 117 |
+
line_counter = element.find('./counter[@type=\'LINE\']')
|
| 118 |
+
summary['lines'] = make_counter_summary(line_counter)
|
| 119 |
+
|
| 120 |
+
# JaCoCo tracks branch coverage, which counts the covered control-flow edges
|
| 121 |
+
# between llvm-cov's regions instead of the covered regions themselves. For
|
| 122 |
+
# non-trivial code parts, the difference is usually negligible. However, if
|
| 123 |
+
# all methods of a class consist of a single region only (no branches),
|
| 124 |
+
# JaCoCo does not report any branch coverage even if there is instruction
|
| 125 |
+
# coverage. Since this would give incorrect results for CI Fuzz purposes, we
|
| 126 |
+
# increase the regions counter by 1 if there is any amount of instruction
|
| 127 |
+
# coverage.
|
| 128 |
+
instruction_counter = element.find('./counter[@type=\'INSTRUCTION\']')
|
| 129 |
+
has_some_coverage = instruction_counter is not None and int(
|
| 130 |
+
instruction_counter.attrib["covered"]) > 0
|
| 131 |
+
branch_covered_adjustment = 1 if has_some_coverage else 0
|
| 132 |
+
region_counter = element.find('./counter[@type=\'BRANCH\']')
|
| 133 |
+
summary['regions'] = make_counter_summary(
|
| 134 |
+
region_counter, covered_adjustment=branch_covered_adjustment)
|
| 135 |
+
|
| 136 |
+
return summary
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def make_counter_summary(counter_element, covered_adjustment=0):
|
| 140 |
+
"""Turns a JaCoCo <counter> element into an llvm-cov totals entry."""
|
| 141 |
+
summary = {}
|
| 142 |
+
covered = covered_adjustment
|
| 143 |
+
missed = 0
|
| 144 |
+
if counter_element is not None:
|
| 145 |
+
covered += int(counter_element.attrib['covered'])
|
| 146 |
+
missed += int(counter_element.attrib['missed'])
|
| 147 |
+
summary['covered'] = covered
|
| 148 |
+
summary['notcovered'] = missed
|
| 149 |
+
summary['count'] = summary['covered'] + summary['notcovered']
|
| 150 |
+
if summary['count'] != 0:
|
| 151 |
+
summary['percent'] = (100.0 * summary['covered']) / summary['count']
|
| 152 |
+
else:
|
| 153 |
+
summary['percent'] = 0
|
| 154 |
+
return summary
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def main():
|
| 158 |
+
"""Produces an llvm-cov style JSON summary from a JaCoCo XML report."""
|
| 159 |
+
if len(sys.argv) != 3:
|
| 160 |
+
sys.stderr.write('Usage: %s <path_to_jacoco_xml> <out_path_json>\n' %
|
| 161 |
+
sys.argv[0])
|
| 162 |
+
return 1
|
| 163 |
+
|
| 164 |
+
with open(sys.argv[1], 'r') as xml_file:
|
| 165 |
+
xml_report = xml_file.read()
|
| 166 |
+
json_summary = convert(xml_report)
|
| 167 |
+
with open(sys.argv[2], 'w') as json_file:
|
| 168 |
+
json_file.write(json_summary)
|
| 169 |
+
|
| 170 |
+
return 0
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
if __name__ == '__main__':
|
| 174 |
+
sys.exit(main())
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/nyc_report_converter.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2023 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Helper script for creating a llvm-cov style JSON summary from a nyc
|
| 18 |
+
JSON summary."""
|
| 19 |
+
import json
|
| 20 |
+
import sys
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def convert(nyc_json_summary):
|
| 24 |
+
"""Turns a nyc JSON report into a llvm-cov JSON summary."""
|
| 25 |
+
summary = {
|
| 26 |
+
'type':
|
| 27 |
+
'oss-fuzz.javascript.coverage.json.export',
|
| 28 |
+
'version':
|
| 29 |
+
'1.0.0',
|
| 30 |
+
'data': [{
|
| 31 |
+
'totals':
|
| 32 |
+
file_summary(nyc_json_summary['total']),
|
| 33 |
+
'files': [{
|
| 34 |
+
'filename': src_file,
|
| 35 |
+
'summary': file_summary(nyc_json_summary[src_file])
|
| 36 |
+
} for src_file in nyc_json_summary if src_file != 'total'],
|
| 37 |
+
}],
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
return json.dumps(summary)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def file_summary(nyc_file_summary):
|
| 44 |
+
"""Returns a summary for a given file in the nyc JSON summary report."""
|
| 45 |
+
return {
|
| 46 |
+
'functions': element_summary(nyc_file_summary['functions']),
|
| 47 |
+
'lines': element_summary(nyc_file_summary['lines']),
|
| 48 |
+
'regions': element_summary(nyc_file_summary['branches'])
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def element_summary(element):
|
| 53 |
+
"""Returns a summary of a coverage element in the nyc JSON summary
|
| 54 |
+
of the file"""
|
| 55 |
+
return {
|
| 56 |
+
'count': element['total'],
|
| 57 |
+
'covered': element['covered'],
|
| 58 |
+
'notcovered': element['total'] - element['covered'] - element['skipped'],
|
| 59 |
+
'percent': element['pct'] if element['pct'] != 'Unknown' else 0
|
| 60 |
+
}
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def main():
|
| 64 |
+
"""Produces a llvm-cov style JSON summary from a nyc JSON summary."""
|
| 65 |
+
if len(sys.argv) != 3:
|
| 66 |
+
sys.stderr.write('Usage: %s <path_to_nyc_json_summary> <out_path_json>\n' %
|
| 67 |
+
sys.argv[0])
|
| 68 |
+
return 1
|
| 69 |
+
|
| 70 |
+
with open(sys.argv[1], 'r') as nyc_json_summary_file:
|
| 71 |
+
nyc_json_summary = json.load(nyc_json_summary_file)
|
| 72 |
+
json_summary = convert(nyc_json_summary)
|
| 73 |
+
with open(sys.argv[2], 'w') as json_output_file:
|
| 74 |
+
json_output_file.write(json_summary)
|
| 75 |
+
|
| 76 |
+
return 0
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
if __name__ == '__main__':
|
| 80 |
+
sys.exit(main())
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/parse_options.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2020 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Helper script for parsing custom fuzzing options."""
|
| 18 |
+
import configparser
|
| 19 |
+
import sys
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def parse_options(options_file_path, options_section):
|
| 23 |
+
"""Parses the given file and returns options from the given section."""
|
| 24 |
+
parser = configparser.ConfigParser()
|
| 25 |
+
parser.read(options_file_path)
|
| 26 |
+
|
| 27 |
+
if not parser.has_section(options_section):
|
| 28 |
+
return None
|
| 29 |
+
|
| 30 |
+
options = parser[options_section]
|
| 31 |
+
|
| 32 |
+
if options_section == 'libfuzzer':
|
| 33 |
+
options_string = ' '.join(
|
| 34 |
+
'-%s=%s' % (key, value) for key, value in options.items())
|
| 35 |
+
else:
|
| 36 |
+
# Sanitizer options.
|
| 37 |
+
options_string = ':'.join(
|
| 38 |
+
'%s=%s' % (key, value) for key, value in options.items())
|
| 39 |
+
|
| 40 |
+
return options_string
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def main():
|
| 44 |
+
"""Processes the arguments and prints the options in the correct format."""
|
| 45 |
+
if len(sys.argv) < 3:
|
| 46 |
+
sys.stderr.write('Usage: %s <path_to_options_file> <options_section>\n' %
|
| 47 |
+
sys.argv[0])
|
| 48 |
+
return 1
|
| 49 |
+
|
| 50 |
+
options = parse_options(sys.argv[1], sys.argv[2])
|
| 51 |
+
if options is not None:
|
| 52 |
+
print(options)
|
| 53 |
+
|
| 54 |
+
return 0
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
if __name__ == "__main__":
|
| 58 |
+
sys.exit(main())
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/profraw_update.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Helper script for upgrading a profraw file to latest version."""
|
| 18 |
+
|
| 19 |
+
from collections import namedtuple
|
| 20 |
+
import struct
|
| 21 |
+
import subprocess
|
| 22 |
+
import sys
|
| 23 |
+
|
| 24 |
+
HeaderGeneric = namedtuple('HeaderGeneric', 'magic version')
|
| 25 |
+
HeaderVersion9 = namedtuple(
|
| 26 |
+
'HeaderVersion9',
|
| 27 |
+
'BinaryIdsSize DataSize PaddingBytesBeforeCounters CountersSize \
|
| 28 |
+
PaddingBytesAfterCounters NumBitmapBytes PaddingBytesAfterBitmapBytes NamesSize CountersDelta BitmapDelta NamesDelta ValueKindLast'
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
PROFRAW_MAGIC = 0xff6c70726f667281
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def relativize_address(data, offset, databegin, sect_prf_cnts, sect_prf_data):
|
| 35 |
+
"""Turns an absolute offset into a relative one."""
|
| 36 |
+
value = struct.unpack('Q', data[offset:offset + 8])[0]
|
| 37 |
+
if sect_prf_cnts <= value < sect_prf_data:
|
| 38 |
+
# If the value is an address in the right section, make it relative.
|
| 39 |
+
value = (value - databegin) & 0xffffffffffffffff
|
| 40 |
+
value = struct.pack('Q', value)
|
| 41 |
+
for i in range(8):
|
| 42 |
+
data[offset + i] = value[i]
|
| 43 |
+
# address was made relative
|
| 44 |
+
return True
|
| 45 |
+
# no changes done
|
| 46 |
+
return False
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def upgrade(data, sect_prf_cnts, sect_prf_data):
|
| 50 |
+
"""Upgrades profraw data, knowing the sections addresses."""
|
| 51 |
+
generic_header = HeaderGeneric._make(struct.unpack('QQ', data[:16]))
|
| 52 |
+
if generic_header.magic != PROFRAW_MAGIC:
|
| 53 |
+
raise Exception('Bad magic.')
|
| 54 |
+
base_version = generic_header.version
|
| 55 |
+
|
| 56 |
+
if base_version >= 9:
|
| 57 |
+
# Nothing to do.
|
| 58 |
+
return data
|
| 59 |
+
if base_version < 5 or base_version == 6:
|
| 60 |
+
raise Exception('Unhandled version.')
|
| 61 |
+
|
| 62 |
+
if generic_header.version == 5:
|
| 63 |
+
generic_header = generic_header._replace(version=7)
|
| 64 |
+
# Upgrade from version 5 to 7 by adding binaryids field.
|
| 65 |
+
data = data[:8] + struct.pack('Q', generic_header.version) + struct.pack(
|
| 66 |
+
'Q', 0) + data[16:]
|
| 67 |
+
if generic_header.version == 7:
|
| 68 |
+
# cf https://reviews.llvm.org/D111123
|
| 69 |
+
generic_header = generic_header._replace(version=8)
|
| 70 |
+
data = data[:8] + struct.pack('Q', generic_header.version) + data[16:]
|
| 71 |
+
if generic_header.version == 8:
|
| 72 |
+
# see https://reviews.llvm.org/D138846
|
| 73 |
+
generic_header = generic_header._replace(version=9)
|
| 74 |
+
# Upgrade from version 8 to 9 by adding NumBitmapBytes, PaddingBytesAfterBitmapBytes and BitmapDelta fields.
|
| 75 |
+
data = data[:8] + struct.pack(
|
| 76 |
+
'Q', generic_header.version) + data[16:56] + struct.pack(
|
| 77 |
+
'QQ', 0, 0) + data[56:72] + struct.pack('Q', 0) + data[72:]
|
| 78 |
+
|
| 79 |
+
v9_header = HeaderVersion9._make(struct.unpack('QQQQQQQQQQQQ', data[16:112]))
|
| 80 |
+
|
| 81 |
+
if base_version <= 8 and v9_header.BinaryIdsSize % 8 != 0:
|
| 82 |
+
# Adds padding for binary ids.
|
| 83 |
+
# cf commit b9f547e8e51182d32f1912f97a3e53f4899ea6be
|
| 84 |
+
# cf https://reviews.llvm.org/D110365
|
| 85 |
+
padlen = 8 - (v9_header.BinaryIdsSize % 8)
|
| 86 |
+
v7_header = v9_header._replace(BinaryIdsSize=v9_header.BinaryIdsSize +
|
| 87 |
+
padlen)
|
| 88 |
+
data = data[:16] + struct.pack('Q', v9_header.BinaryIdsSize) + data[24:]
|
| 89 |
+
data = data[:112 + v9_header.BinaryIdsSize] + bytes(
|
| 90 |
+
padlen) + data[112 + v9_header.BinaryIdsSize:]
|
| 91 |
+
|
| 92 |
+
if base_version <= 8:
|
| 93 |
+
offset = 112 + v9_header.BinaryIdsSize
|
| 94 |
+
for d in range(v9_header.DataSize):
|
| 95 |
+
# Add BitmapPtr and aligned u32(NumBitmapBytes)
|
| 96 |
+
data = data[:offset + 3 * 8] + struct.pack(
|
| 97 |
+
'Q', 0) + data[offset + 3 * 8:offset + 6 * 8] + struct.pack(
|
| 98 |
+
'Q', 0) + data[offset + 6 * 8:]
|
| 99 |
+
value = struct.unpack('Q',
|
| 100 |
+
data[offset + 2 * 8:offset + 3 * 8])[0] - 16 * d
|
| 101 |
+
data = data[:offset + 2 * 8] + struct.pack('Q',
|
| 102 |
+
value) + data[offset + 3 * 8:]
|
| 103 |
+
offset += 8 * 8
|
| 104 |
+
|
| 105 |
+
if base_version >= 8:
|
| 106 |
+
# Nothing more to do.
|
| 107 |
+
return data
|
| 108 |
+
|
| 109 |
+
# Last changes are relaed to bump from 7 to version 8 making CountersPtr relative.
|
| 110 |
+
dataref = sect_prf_data
|
| 111 |
+
# 80 is offset of CountersDelta.
|
| 112 |
+
if not relativize_address(data, 80, dataref, sect_prf_cnts, sect_prf_data):
|
| 113 |
+
return data
|
| 114 |
+
|
| 115 |
+
offset = 112 + v9_header.BinaryIdsSize
|
| 116 |
+
# This also works for C+Rust binaries compiled with
|
| 117 |
+
# clang-14/rust-nightly-clang-13.
|
| 118 |
+
for _ in range(v9_header.DataSize):
|
| 119 |
+
# 16 is the offset of CounterPtr in ProfrawData structure.
|
| 120 |
+
relativize_address(data, offset + 16, dataref, sect_prf_cnts, sect_prf_data)
|
| 121 |
+
# We need this because of CountersDelta -= sizeof(*SrcData);
|
| 122 |
+
# seen in __llvm_profile_merge_from_buffer.
|
| 123 |
+
dataref += 44 + 2 * (v9_header.ValueKindLast + 1)
|
| 124 |
+
if was8:
|
| 125 |
+
#profraw9 added RelativeBitmapPtr and NumBitmapBytes (8+4 rounded up to 16)
|
| 126 |
+
dataref -= 16
|
| 127 |
+
# This is the size of one ProfrawData structure.
|
| 128 |
+
offset += 44 + 2 * (v9_header.ValueKindLast + 1)
|
| 129 |
+
|
| 130 |
+
return data
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def main():
|
| 134 |
+
"""Helper script for upgrading a profraw file to latest version."""
|
| 135 |
+
if len(sys.argv) < 3:
|
| 136 |
+
sys.stderr.write('Usage: %s <binary> options? <profraw>...\n' % sys.argv[0])
|
| 137 |
+
return 1
|
| 138 |
+
|
| 139 |
+
# First find llvm profile sections addresses in the elf, quick and dirty.
|
| 140 |
+
process = subprocess.Popen(['readelf', '-S', sys.argv[1]],
|
| 141 |
+
stdout=subprocess.PIPE)
|
| 142 |
+
output, err = process.communicate()
|
| 143 |
+
if err:
|
| 144 |
+
print('readelf failed')
|
| 145 |
+
return 2
|
| 146 |
+
for line in iter(output.split(b'\n')):
|
| 147 |
+
if b'__llvm_prf_cnts' in line:
|
| 148 |
+
sect_prf_cnts = int(line.split()[3], 16)
|
| 149 |
+
elif b'__llvm_prf_data' in line:
|
| 150 |
+
sect_prf_data = int(line.split()[3], 16)
|
| 151 |
+
|
| 152 |
+
out_name = "default.profup"
|
| 153 |
+
in_place = False
|
| 154 |
+
start = 2
|
| 155 |
+
if sys.argv[2] == "-i":
|
| 156 |
+
in_place = True
|
| 157 |
+
start = start + 1
|
| 158 |
+
elif sys.argv[2] == "-o":
|
| 159 |
+
out_name = sys.argv[3]
|
| 160 |
+
start = 4
|
| 161 |
+
|
| 162 |
+
if len(sys.argv) < start:
|
| 163 |
+
sys.stderr.write('Usage: %s <binary> options <profraw>...\n' % sys.argv[0])
|
| 164 |
+
return 1
|
| 165 |
+
|
| 166 |
+
for i in range(start, len(sys.argv)):
|
| 167 |
+
# Then open and read the input profraw file.
|
| 168 |
+
with open(sys.argv[i], 'rb') as input_file:
|
| 169 |
+
profraw_base = bytearray(input_file.read())
|
| 170 |
+
# Do the upgrade, returning a bytes object.
|
| 171 |
+
profraw_latest = upgrade(profraw_base, sect_prf_cnts, sect_prf_data)
|
| 172 |
+
# Write the output to the file given to the command line.
|
| 173 |
+
if in_place:
|
| 174 |
+
out_name = sys.argv[i]
|
| 175 |
+
with open(out_name, 'wb') as output_file:
|
| 176 |
+
output_file.write(profraw_latest)
|
| 177 |
+
|
| 178 |
+
return 0
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
if __name__ == '__main__':
|
| 182 |
+
sys.exit(main())
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/python_coverage_runner_help.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2022 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""
|
| 15 |
+
Helper to manage coverage.py related operations. Does two main
|
| 16 |
+
things: (1) pass commands into the coverage.py library and (2)
|
| 17 |
+
translate .coverage created from a pyinstaller executable into
|
| 18 |
+
paths that match local files. This is needed for html report creation.
|
| 19 |
+
"""
|
| 20 |
+
import os
|
| 21 |
+
import re
|
| 22 |
+
import json
|
| 23 |
+
import sys
|
| 24 |
+
from coverage.cmdline import main as coverage_main
|
| 25 |
+
from coverage.data import CoverageData
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def should_exclude_file(filepath):
|
| 29 |
+
"""Returns whether the path should be excluded from the coverage report."""
|
| 30 |
+
# Skip all atheris code
|
| 31 |
+
if "atheris" in filepath:
|
| 32 |
+
return True
|
| 33 |
+
|
| 34 |
+
# Filter out all standard python libraries
|
| 35 |
+
if ('/usr/local/lib/python' in filepath and
|
| 36 |
+
'site-packages' not in filepath and 'dist-packages' not in filepath):
|
| 37 |
+
return True
|
| 38 |
+
|
| 39 |
+
# Avoid all PyInstaller modules.
|
| 40 |
+
if 'PyInstaller' in filepath:
|
| 41 |
+
return True
|
| 42 |
+
|
| 43 |
+
return False
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def translate_lines(cov_data, new_cov_data, all_file_paths):
|
| 47 |
+
"""
|
| 48 |
+
Translate lines in a .coverage file created by coverage.py such that
|
| 49 |
+
the file paths points to local files instead. This is needed when collecting
|
| 50 |
+
coverage from executables created by pyinstaller.
|
| 51 |
+
"""
|
| 52 |
+
for pyinstaller_file_path in cov_data.measured_files():
|
| 53 |
+
stripped_py_file_path = pyinstaller_file_path
|
| 54 |
+
if stripped_py_file_path.startswith('/tmp/_MEI'):
|
| 55 |
+
stripped_py_file_path = '/'.join(stripped_py_file_path.split('/')[3:])
|
| 56 |
+
if stripped_py_file_path.startswith('/out/'):
|
| 57 |
+
stripped_py_file_path = stripped_py_file_path.replace('/out/', '')
|
| 58 |
+
|
| 59 |
+
# Check if this file exists in our file paths:
|
| 60 |
+
for local_file_path in all_file_paths:
|
| 61 |
+
if should_exclude_file(local_file_path):
|
| 62 |
+
continue
|
| 63 |
+
if local_file_path.endswith(stripped_py_file_path):
|
| 64 |
+
print('Found matching: %s' % (local_file_path))
|
| 65 |
+
new_cov_data.add_lines(
|
| 66 |
+
{local_file_path: cov_data.lines(pyinstaller_file_path)})
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def translate_coverage(all_file_paths):
|
| 70 |
+
"""
|
| 71 |
+
Translate pyinstaller-generated file paths in .coverage (produced by
|
| 72 |
+
coverage.py) into local file paths. Place result in .new_coverage.
|
| 73 |
+
"""
|
| 74 |
+
covdata_pre_translation = CoverageData('.coverage')
|
| 75 |
+
covdata_post_translation = CoverageData('.new_coverage')
|
| 76 |
+
|
| 77 |
+
covdata_pre_translation.read()
|
| 78 |
+
translate_lines(covdata_pre_translation, covdata_post_translation,
|
| 79 |
+
all_file_paths)
|
| 80 |
+
covdata_post_translation.write()
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def convert_coveragepy_cov_to_summary_json(src, dst):
|
| 84 |
+
"""
|
| 85 |
+
Converts a json file produced by coveragepy into a summary.json file
|
| 86 |
+
similary to llvm-cov output. `src` is the source coveragepy json file,
|
| 87 |
+
`dst` is the destination json file, which will be overwritten.
|
| 88 |
+
"""
|
| 89 |
+
dst_dict = {'data': [{'files': {}}]}
|
| 90 |
+
lines_covered = 0
|
| 91 |
+
lines_count = 0
|
| 92 |
+
with open(src, "r") as src_f:
|
| 93 |
+
src_json = json.loads(src_f.read())
|
| 94 |
+
if 'files' in src_json:
|
| 95 |
+
for elem in src_json.get('files'):
|
| 96 |
+
if 'summary' not in src_json['files'][elem]:
|
| 97 |
+
continue
|
| 98 |
+
src_dict = src_json['files'][elem]['summary']
|
| 99 |
+
count = src_dict['covered_lines'] + src_dict['missing_lines']
|
| 100 |
+
covered = src_dict['covered_lines']
|
| 101 |
+
notcovered = src_dict['missing_lines']
|
| 102 |
+
percent = src_dict['percent_covered']
|
| 103 |
+
|
| 104 |
+
# Accumulate line coverage
|
| 105 |
+
lines_covered += covered
|
| 106 |
+
lines_count += count
|
| 107 |
+
|
| 108 |
+
dst_dict['data'][0]['files'][elem] = {
|
| 109 |
+
'summary': {
|
| 110 |
+
'lines': {
|
| 111 |
+
'count': count,
|
| 112 |
+
'covered': covered,
|
| 113 |
+
'notcovered': notcovered,
|
| 114 |
+
'percent': percent
|
| 115 |
+
}
|
| 116 |
+
}
|
| 117 |
+
}
|
| 118 |
+
if lines_count > 0:
|
| 119 |
+
lines_covered_percent = lines_covered / lines_count
|
| 120 |
+
else:
|
| 121 |
+
lines_covered_percent = 0.0
|
| 122 |
+
dst_dict['data'][0]['totals'] = {
|
| 123 |
+
'branches': {
|
| 124 |
+
'count': 0,
|
| 125 |
+
'covered': 0,
|
| 126 |
+
'notcovered': 0,
|
| 127 |
+
'percent': 0.0
|
| 128 |
+
},
|
| 129 |
+
'functions': {
|
| 130 |
+
'count': 0,
|
| 131 |
+
'covered': 0,
|
| 132 |
+
'percent': 0.0
|
| 133 |
+
},
|
| 134 |
+
'instantiations': {
|
| 135 |
+
'count': 0,
|
| 136 |
+
'covered': 0,
|
| 137 |
+
'percent': 0.0
|
| 138 |
+
},
|
| 139 |
+
'lines': {
|
| 140 |
+
'count': lines_count,
|
| 141 |
+
'covered': lines_covered,
|
| 142 |
+
'percent': lines_covered_percent
|
| 143 |
+
},
|
| 144 |
+
'regions': {
|
| 145 |
+
'count': 0,
|
| 146 |
+
'covered': 0,
|
| 147 |
+
'notcovered': 0,
|
| 148 |
+
'percent': 0.0
|
| 149 |
+
}
|
| 150 |
+
}
|
| 151 |
+
|
| 152 |
+
with open(dst, 'w') as dst_f:
|
| 153 |
+
dst_f.write(json.dumps(dst_dict))
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def main():
|
| 157 |
+
"""
|
| 158 |
+
Main handler.
|
| 159 |
+
"""
|
| 160 |
+
if sys.argv[1] == 'translate':
|
| 161 |
+
print('Translating the coverage')
|
| 162 |
+
files_path = sys.argv[2]
|
| 163 |
+
all_file_paths = list()
|
| 164 |
+
for root, _, files in os.walk(files_path):
|
| 165 |
+
for relative_file_path in files:
|
| 166 |
+
abs_file_path = os.path.abspath(os.path.join(root, relative_file_path))
|
| 167 |
+
all_file_paths.append(abs_file_path)
|
| 168 |
+
print('Done with path walk')
|
| 169 |
+
translate_coverage(all_file_paths)
|
| 170 |
+
elif sys.argv[1] == 'convert-to-summary-json':
|
| 171 |
+
src = sys.argv[2]
|
| 172 |
+
dst = sys.argv[3]
|
| 173 |
+
convert_coveragepy_cov_to_summary_json(src, dst)
|
| 174 |
+
else:
|
| 175 |
+
# Pass commands into coverage package
|
| 176 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
| 177 |
+
sys.exit(coverage_main())
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
if __name__ == '__main__':
|
| 181 |
+
main()
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/rcfilt
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -u
|
| 2 |
+
# Copyright 2020 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
# Symbol demangling for both C++ and Rust
|
| 17 |
+
#
|
| 18 |
+
################################################################################
|
| 19 |
+
|
| 20 |
+
# simply pipe
|
| 21 |
+
rustfilt | c++filt -n
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/reproduce
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2016 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
FUZZER=$1
|
| 19 |
+
shift
|
| 20 |
+
|
| 21 |
+
if [ ! -v TESTCASE ]; then
|
| 22 |
+
TESTCASE="/testcase"
|
| 23 |
+
fi
|
| 24 |
+
|
| 25 |
+
if [ ! -f $TESTCASE ]; then
|
| 26 |
+
echo "Error: $TESTCASE not found, use: docker run -v <path>:$TESTCASE ..."
|
| 27 |
+
exit 1
|
| 28 |
+
fi
|
| 29 |
+
|
| 30 |
+
export RUN_FUZZER_MODE="interactive"
|
| 31 |
+
export FUZZING_ENGINE="libfuzzer"
|
| 32 |
+
export SKIP_SEED_CORPUS="1"
|
| 33 |
+
|
| 34 |
+
run_fuzzer $FUZZER $@ $TESTCASE
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/run_fuzzer
ADDED
|
@@ -0,0 +1,228 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Fuzzer runner. Appends .options arguments and seed corpus to users args.
|
| 19 |
+
# Usage: $0 <fuzzer_name> <fuzzer_args>
|
| 20 |
+
|
| 21 |
+
sysctl -w vm.mmap_rnd_bits=28
|
| 22 |
+
|
| 23 |
+
export PATH=$OUT:$PATH
|
| 24 |
+
cd $OUT
|
| 25 |
+
|
| 26 |
+
DEBUGGER=${DEBUGGER:-}
|
| 27 |
+
|
| 28 |
+
FUZZER=$1
|
| 29 |
+
shift
|
| 30 |
+
|
| 31 |
+
# This env var is set by CIFuzz. CIFuzz fills this directory with the corpus
|
| 32 |
+
# from ClusterFuzz.
|
| 33 |
+
CORPUS_DIR=${CORPUS_DIR:-}
|
| 34 |
+
if [ -z "$CORPUS_DIR" ]
|
| 35 |
+
then
|
| 36 |
+
CORPUS_DIR="/tmp/${FUZZER}_corpus"
|
| 37 |
+
rm -rf $CORPUS_DIR && mkdir -p $CORPUS_DIR
|
| 38 |
+
fi
|
| 39 |
+
|
| 40 |
+
SANITIZER=${SANITIZER:-}
|
| 41 |
+
if [ -z $SANITIZER ]; then
|
| 42 |
+
# If $SANITIZER is not specified (e.g. calling from `reproduce` command), it
|
| 43 |
+
# is not important and can be set to any value.
|
| 44 |
+
SANITIZER="default"
|
| 45 |
+
fi
|
| 46 |
+
|
| 47 |
+
if [[ "$RUN_FUZZER_MODE" = interactive ]]; then
|
| 48 |
+
FUZZER_OUT="$OUT/${FUZZER}_${FUZZING_ENGINE}_${SANITIZER}_out"
|
| 49 |
+
else
|
| 50 |
+
FUZZER_OUT="/tmp/${FUZZER}_${FUZZING_ENGINE}_${SANITIZER}_out"
|
| 51 |
+
fi
|
| 52 |
+
|
| 53 |
+
function get_dictionary() {
|
| 54 |
+
local options_file="$FUZZER.options"
|
| 55 |
+
local dict_file="$FUZZER.dict"
|
| 56 |
+
local dict=""
|
| 57 |
+
if [[ -f "$options_file" ]]; then
|
| 58 |
+
dict=$(sed -n 's/^\s*dict\s*=\s*\(.*\)/\1/p' "$options_file" | tail -1)
|
| 59 |
+
fi
|
| 60 |
+
if [[ -z "$dict" && -f "$dict_file" ]]; then
|
| 61 |
+
dict="$dict_file"
|
| 62 |
+
fi
|
| 63 |
+
[[ -z "$dict" ]] && return
|
| 64 |
+
if [[ "$FUZZING_ENGINE" = "libfuzzer" ]]; then
|
| 65 |
+
printf -- "-dict=%s" "$dict"
|
| 66 |
+
elif [[ "$FUZZING_ENGINE" = "afl" ]]; then
|
| 67 |
+
printf -- "-x %s" "$dict"
|
| 68 |
+
elif [[ "$FUZZING_ENGINE" = "honggfuzz" ]]; then
|
| 69 |
+
printf -- "--dict %s" "$dict"
|
| 70 |
+
elif [[ "$FUZZING_ENGINE" = "centipede" ]]; then
|
| 71 |
+
printf -- "--dictionary %s" "$dict"
|
| 72 |
+
else
|
| 73 |
+
printf "Unexpected FUZZING_ENGINE: $FUZZING_ENGINE, ignoring\n" >&2
|
| 74 |
+
fi
|
| 75 |
+
}
|
| 76 |
+
|
| 77 |
+
function get_extra_binaries() {
|
| 78 |
+
[[ "$FUZZING_ENGINE" != "centipede" ]] && return
|
| 79 |
+
|
| 80 |
+
extra_binaries="$OUT/__centipede_${SANITIZER}/${FUZZER}"
|
| 81 |
+
if compgen -G "$extra_binaries" >> /dev/null; then
|
| 82 |
+
printf -- "--extra_binaries %s" \""$extra_binaries\""
|
| 83 |
+
fi
|
| 84 |
+
|
| 85 |
+
}
|
| 86 |
+
|
| 87 |
+
rm -rf $FUZZER_OUT && mkdir -p $FUZZER_OUT
|
| 88 |
+
|
| 89 |
+
SEED_CORPUS="${FUZZER}_seed_corpus.zip"
|
| 90 |
+
|
| 91 |
+
# TODO: Investigate why this code block is skipped
|
| 92 |
+
# by all default fuzzers in bad_build_check.
|
| 93 |
+
# They all set SKIP_SEED_CORPUS=1.
|
| 94 |
+
if [ -f $SEED_CORPUS ] && [ -z ${SKIP_SEED_CORPUS:-} ]; then
|
| 95 |
+
echo "Using seed corpus: $SEED_CORPUS"
|
| 96 |
+
unzip -o -d ${CORPUS_DIR}/ $SEED_CORPUS > /dev/null
|
| 97 |
+
fi
|
| 98 |
+
|
| 99 |
+
OPTIONS_FILE="${FUZZER}.options"
|
| 100 |
+
CUSTOM_LIBFUZZER_OPTIONS=""
|
| 101 |
+
|
| 102 |
+
if [ -f $OPTIONS_FILE ]; then
|
| 103 |
+
custom_asan_options=$(parse_options.py $OPTIONS_FILE asan)
|
| 104 |
+
if [ ! -z $custom_asan_options ]; then
|
| 105 |
+
export ASAN_OPTIONS="$ASAN_OPTIONS:$custom_asan_options"
|
| 106 |
+
fi
|
| 107 |
+
|
| 108 |
+
custom_msan_options=$(parse_options.py $OPTIONS_FILE msan)
|
| 109 |
+
if [ ! -z $custom_msan_options ]; then
|
| 110 |
+
export MSAN_OPTIONS="$MSAN_OPTIONS:$custom_msan_options"
|
| 111 |
+
fi
|
| 112 |
+
|
| 113 |
+
custom_ubsan_options=$(parse_options.py $OPTIONS_FILE ubsan)
|
| 114 |
+
if [ ! -z $custom_ubsan_options ]; then
|
| 115 |
+
export UBSAN_OPTIONS="$UBSAN_OPTIONS:$custom_ubsan_options"
|
| 116 |
+
fi
|
| 117 |
+
|
| 118 |
+
CUSTOM_LIBFUZZER_OPTIONS=$(parse_options.py $OPTIONS_FILE libfuzzer)
|
| 119 |
+
fi
|
| 120 |
+
|
| 121 |
+
if [[ "$FUZZING_ENGINE" = afl ]]; then
|
| 122 |
+
|
| 123 |
+
# Set afl++ environment options.
|
| 124 |
+
export ASAN_OPTIONS="$ASAN_OPTIONS:abort_on_error=1:symbolize=0:detect_odr_violation=0:"
|
| 125 |
+
export MSAN_OPTIONS="$MSAN_OPTIONS:exit_code=86:symbolize=0"
|
| 126 |
+
export UBSAN_OPTIONS="$UBSAN_OPTIONS:symbolize=0"
|
| 127 |
+
export AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES=1
|
| 128 |
+
export AFL_SKIP_CPUFREQ=1
|
| 129 |
+
export AFL_TRY_AFFINITY=1
|
| 130 |
+
export AFL_FAST_CAL=1
|
| 131 |
+
export AFL_CMPLOG_ONLY_NEW=1
|
| 132 |
+
export AFL_FORKSRV_INIT_TMOUT=30000
|
| 133 |
+
export AFL_IGNORE_PROBLEMS=1
|
| 134 |
+
export AFL_IGNORE_UNKNOWN_ENVS=1
|
| 135 |
+
|
| 136 |
+
# If $OUT/afl_cmplog.txt is present this means the target was compiled for
|
| 137 |
+
# CMPLOG. So we have to add the proper parameters to afl-fuzz.
|
| 138 |
+
test -e "$OUT/afl_cmplog.txt" && AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -c $OUT/$FUZZER"
|
| 139 |
+
|
| 140 |
+
# If $OUT/afl++.dict we load it as a dictionary for afl-fuzz.
|
| 141 |
+
test -e "$OUT/afl++.dict" && AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -x $OUT/afl++.dict"
|
| 142 |
+
|
| 143 |
+
# Ensure timeout is a bit larger than 1sec as some of the OSS-Fuzz fuzzers
|
| 144 |
+
# are slower than this.
|
| 145 |
+
AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -t 5000+"
|
| 146 |
+
|
| 147 |
+
# AFL expects at least 1 file in the input dir.
|
| 148 |
+
echo input > ${CORPUS_DIR}/input
|
| 149 |
+
|
| 150 |
+
CMD_LINE="$OUT/afl-fuzz $AFL_FUZZER_ARGS -i $CORPUS_DIR -o $FUZZER_OUT $(get_dictionary) $* -- $OUT/$FUZZER"
|
| 151 |
+
|
| 152 |
+
echo afl++ setup:
|
| 153 |
+
env|grep AFL_
|
| 154 |
+
cat "$OUT/afl_options.txt"
|
| 155 |
+
|
| 156 |
+
elif [[ "$FUZZING_ENGINE" = honggfuzz ]]; then
|
| 157 |
+
|
| 158 |
+
# Honggfuzz expects at least 1 file in the input dir.
|
| 159 |
+
echo input > $CORPUS_DIR/input
|
| 160 |
+
# --exit_upon_crash: exit whith a first crash seen
|
| 161 |
+
# -V: verify crashes
|
| 162 |
+
# -R (report): save report file to this location
|
| 163 |
+
# -W (working dir): where the crashes go
|
| 164 |
+
# -v (verbose): don't use VTE UI, just stderr
|
| 165 |
+
# -z: use software-instrumentation of clang (trace-pc-guard....)
|
| 166 |
+
# -P: use persistent mode of fuzzing (i.e. LLVMFuzzerTestOneInput)
|
| 167 |
+
# -f: location of the initial (and destination) file corpus
|
| 168 |
+
# -n: number of fuzzing threads (and processes)
|
| 169 |
+
CMD_LINE="$OUT/honggfuzz -n 1 --exit_upon_crash -V -R /tmp/${FUZZER}_honggfuzz.report -W $FUZZER_OUT -v -z -P -f \"$CORPUS_DIR\" $(get_dictionary) $* -- \"$OUT/$FUZZER\""
|
| 170 |
+
|
| 171 |
+
if [[ $(LC_ALL=C grep -P "\x01_LIBHFUZZ_NETDRIVER_BINARY_SIGNATURE_\x02\xFF" "$FUZZER" ) ]]; then
|
| 172 |
+
# Honggfuzz Netdriver port. This must match the port in Clusterfuzz.
|
| 173 |
+
export HFND_TCP_PORT=8666
|
| 174 |
+
fi
|
| 175 |
+
elif [[ "$FUZZING_ENGINE" = centipede ]]; then
|
| 176 |
+
|
| 177 |
+
# Create the work and corpus directory for Centipede.
|
| 178 |
+
CENTIPEDE_WORKDIR="${CENTIPEDE_WORKDIR:-$OUT}"
|
| 179 |
+
|
| 180 |
+
# Centipede only saves crashes to crashes/ in workdir.
|
| 181 |
+
rm -rf $FUZZER_OUT
|
| 182 |
+
|
| 183 |
+
# --workdir: Dir that stores corpus&features in Centipede's own format.
|
| 184 |
+
# --corpus_dir: Location of the initial (and destination) file corpus.
|
| 185 |
+
# --fork_server: Execute the target(s) via the fork server.
|
| 186 |
+
# Run in fork mode to continue fuzzing indefinitely in case of
|
| 187 |
+
# OOMs, timeouts, and crashes.
|
| 188 |
+
# --exit_on_crash=0: Do not exit on crash.
|
| 189 |
+
# --timeout=1200: The process that executes target binary will abort
|
| 190 |
+
# if an input runs >= 1200s.
|
| 191 |
+
# --rss_limit_mb=0: Disable RSS limit.
|
| 192 |
+
# --address_space_limit_mb=0: No address space limit.
|
| 193 |
+
# --binary: The target binary under test without sanitizer.
|
| 194 |
+
# --extra_binary: The target binaries under test with sanitizers.
|
| 195 |
+
CMD_LINE="$OUT/centipede --workdir=$CENTIPEDE_WORKDIR --corpus_dir=\"$CORPUS_DIR\" --fork_server=1 --exit_on_crash=1 --timeout=1200 --rss_limit_mb=4096 --address_space_limit_mb=5120 $(get_dictionary) --binary=\"$OUT/${FUZZER}\" $(get_extra_binaries) $*"
|
| 196 |
+
else
|
| 197 |
+
|
| 198 |
+
CMD_LINE="$OUT/$FUZZER $FUZZER_ARGS $*"
|
| 199 |
+
|
| 200 |
+
if [ -z ${SKIP_SEED_CORPUS:-} ]; then
|
| 201 |
+
CMD_LINE="$CMD_LINE $CORPUS_DIR"
|
| 202 |
+
fi
|
| 203 |
+
|
| 204 |
+
if [[ ! -z ${CUSTOM_LIBFUZZER_OPTIONS} ]]; then
|
| 205 |
+
CMD_LINE="$CMD_LINE $CUSTOM_LIBFUZZER_OPTIONS"
|
| 206 |
+
fi
|
| 207 |
+
|
| 208 |
+
if [[ ! "$CMD_LINE" =~ "-dict=" ]]; then
|
| 209 |
+
if [ -f "$FUZZER.dict" ]; then
|
| 210 |
+
CMD_LINE="$CMD_LINE -dict=$FUZZER.dict"
|
| 211 |
+
fi
|
| 212 |
+
fi
|
| 213 |
+
|
| 214 |
+
CMD_LINE="$CMD_LINE < /dev/null"
|
| 215 |
+
|
| 216 |
+
fi
|
| 217 |
+
|
| 218 |
+
echo $CMD_LINE
|
| 219 |
+
|
| 220 |
+
# Unset OUT so the fuzz target can't rely on it.
|
| 221 |
+
unset OUT
|
| 222 |
+
|
| 223 |
+
if [ ! -z "$DEBUGGER" ]; then
|
| 224 |
+
CMD_LINE="$DEBUGGER $CMD_LINE"
|
| 225 |
+
fi
|
| 226 |
+
|
| 227 |
+
bash -c "$CMD_LINE"
|
| 228 |
+
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/ruzzy
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
ASAN_OPTIONS="allocator_may_return_null=1:detect_leaks=0:use_sigaltstack=0" LD_PRELOAD=$(ruby -e 'require "ruzzy"; print Ruzzy::ASAN_PATH') \
|
| 19 |
+
ruby $@
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/targets_list
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
for binary in $(find $OUT/ -executable -type f); do
|
| 4 |
+
[[ "$binary" != *.so ]] || continue
|
| 5 |
+
[[ $(basename "$binary") != jazzer_driver* ]] || continue
|
| 6 |
+
file "$binary" | grep -e ELF -e "shell script" > /dev/null 2>&1 || continue
|
| 7 |
+
grep "LLVMFuzzerTestOneInput" "$binary" > /dev/null 2>&1 || continue
|
| 8 |
+
|
| 9 |
+
basename "$binary"
|
| 10 |
+
done
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all.py
ADDED
|
@@ -0,0 +1,295 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2020 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Does bad_build_check on all fuzz targets in $OUT."""
|
| 18 |
+
|
| 19 |
+
import contextlib
|
| 20 |
+
import multiprocessing
|
| 21 |
+
import os
|
| 22 |
+
import re
|
| 23 |
+
import subprocess
|
| 24 |
+
import stat
|
| 25 |
+
import sys
|
| 26 |
+
import tempfile
|
| 27 |
+
|
| 28 |
+
BASE_TMP_FUZZER_DIR = '/tmp/not-out'
|
| 29 |
+
|
| 30 |
+
EXECUTABLE = stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH
|
| 31 |
+
|
| 32 |
+
IGNORED_TARGETS = [
|
| 33 |
+
r'do_stuff_fuzzer', r'checksum_fuzzer', r'fuzz_dump', r'fuzz_keyring',
|
| 34 |
+
r'xmltest', r'fuzz_compression_sas_rle', r'ares_*_fuzzer'
|
| 35 |
+
]
|
| 36 |
+
|
| 37 |
+
IGNORED_TARGETS_RE = re.compile('^' + r'$|^'.join(IGNORED_TARGETS) + '$')
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def move_directory_contents(src_directory, dst_directory):
|
| 41 |
+
"""Moves contents of |src_directory| to |dst_directory|."""
|
| 42 |
+
# Use mv because mv preserves file permissions. If we don't preserve file
|
| 43 |
+
# permissions that can mess up CheckFuzzerBuildTest in cifuzz_test.py and
|
| 44 |
+
# other cases where one is calling test_all on files not in OSS-Fuzz's real
|
| 45 |
+
# out directory.
|
| 46 |
+
src_contents = [
|
| 47 |
+
os.path.join(src_directory, filename)
|
| 48 |
+
for filename in os.listdir(src_directory)
|
| 49 |
+
]
|
| 50 |
+
command = ['mv'] + src_contents + [dst_directory]
|
| 51 |
+
subprocess.check_call(command)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def is_elf(filepath):
|
| 55 |
+
"""Returns True if |filepath| is an ELF file."""
|
| 56 |
+
result = subprocess.run(['file', filepath],
|
| 57 |
+
stdout=subprocess.PIPE,
|
| 58 |
+
check=False)
|
| 59 |
+
return b'ELF' in result.stdout
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def is_shell_script(filepath):
|
| 63 |
+
"""Returns True if |filepath| is a shell script."""
|
| 64 |
+
result = subprocess.run(['file', filepath],
|
| 65 |
+
stdout=subprocess.PIPE,
|
| 66 |
+
check=False)
|
| 67 |
+
return b'shell script' in result.stdout
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def find_fuzz_targets(directory):
|
| 71 |
+
"""Returns paths to fuzz targets in |directory|."""
|
| 72 |
+
# TODO(https://github.com/google/oss-fuzz/issues/4585): Use libClusterFuzz for
|
| 73 |
+
# this.
|
| 74 |
+
fuzz_targets = []
|
| 75 |
+
for filename in os.listdir(directory):
|
| 76 |
+
path = os.path.join(directory, filename)
|
| 77 |
+
if filename == 'llvm-symbolizer':
|
| 78 |
+
continue
|
| 79 |
+
if filename.startswith('afl-'):
|
| 80 |
+
continue
|
| 81 |
+
if filename.startswith('jazzer_'):
|
| 82 |
+
continue
|
| 83 |
+
if not os.path.isfile(path):
|
| 84 |
+
continue
|
| 85 |
+
if not os.stat(path).st_mode & EXECUTABLE:
|
| 86 |
+
continue
|
| 87 |
+
# Fuzz targets can either be ELF binaries or shell scripts (e.g. wrapper
|
| 88 |
+
# scripts for Python and JVM targets or rules_fuzzing builds with runfiles
|
| 89 |
+
# trees).
|
| 90 |
+
if not is_elf(path) and not is_shell_script(path):
|
| 91 |
+
continue
|
| 92 |
+
if os.getenv('FUZZING_ENGINE') not in {'none', 'wycheproof'}:
|
| 93 |
+
with open(path, 'rb') as file_handle:
|
| 94 |
+
binary_contents = file_handle.read()
|
| 95 |
+
if b'LLVMFuzzerTestOneInput' not in binary_contents:
|
| 96 |
+
continue
|
| 97 |
+
fuzz_targets.append(path)
|
| 98 |
+
return fuzz_targets
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def do_bad_build_check(fuzz_target):
|
| 102 |
+
"""Runs bad_build_check on |fuzz_target|. Returns a
|
| 103 |
+
Subprocess.ProcessResult."""
|
| 104 |
+
print('INFO: performing bad build checks for', fuzz_target)
|
| 105 |
+
if centipede_needs_auxiliaries():
|
| 106 |
+
print('INFO: Finding Centipede\'s auxiliary for target', fuzz_target)
|
| 107 |
+
auxiliary_path = find_centipede_auxiliary(fuzz_target)
|
| 108 |
+
print('INFO: Using auxiliary binary:', auxiliary_path)
|
| 109 |
+
auxiliary = [auxiliary_path]
|
| 110 |
+
else:
|
| 111 |
+
auxiliary = []
|
| 112 |
+
|
| 113 |
+
command = ['bad_build_check', fuzz_target] + auxiliary
|
| 114 |
+
with tempfile.TemporaryDirectory() as temp_centipede_workdir:
|
| 115 |
+
# Do this so that centipede doesn't fill up the disk during bad build check
|
| 116 |
+
env = os.environ.copy()
|
| 117 |
+
env['CENTIPEDE_WORKDIR'] = temp_centipede_workdir
|
| 118 |
+
return subprocess.run(command,
|
| 119 |
+
stderr=subprocess.PIPE,
|
| 120 |
+
stdout=subprocess.PIPE,
|
| 121 |
+
env=env,
|
| 122 |
+
check=False)
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def get_broken_fuzz_targets(bad_build_results, fuzz_targets):
|
| 126 |
+
"""Returns a list of broken fuzz targets and their process results in
|
| 127 |
+
|fuzz_targets| where each item in |bad_build_results| is the result of
|
| 128 |
+
bad_build_check on the corresponding element in |fuzz_targets|."""
|
| 129 |
+
broken = []
|
| 130 |
+
for result, fuzz_target in zip(bad_build_results, fuzz_targets):
|
| 131 |
+
if result.returncode != 0:
|
| 132 |
+
broken.append((fuzz_target, result))
|
| 133 |
+
return broken
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def has_ignored_targets(out_dir):
|
| 137 |
+
"""Returns True if |out_dir| has any fuzz targets we are supposed to ignore
|
| 138 |
+
bad build checks of."""
|
| 139 |
+
out_files = set(os.listdir(out_dir))
|
| 140 |
+
for filename in out_files:
|
| 141 |
+
if re.match(IGNORED_TARGETS_RE, filename):
|
| 142 |
+
return True
|
| 143 |
+
return False
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
@contextlib.contextmanager
|
| 147 |
+
def use_different_out_dir():
|
| 148 |
+
"""Context manager that moves OUT to subdirectory of BASE_TMP_FUZZER_DIR. This
|
| 149 |
+
is useful for catching hardcoding. Note that this sets the environment
|
| 150 |
+
variable OUT and therefore must be run before multiprocessing.Pool is created.
|
| 151 |
+
Resets OUT at the end."""
|
| 152 |
+
# Use a fake OUT directory to catch path hardcoding that breaks on
|
| 153 |
+
# ClusterFuzz.
|
| 154 |
+
initial_out = os.getenv('OUT')
|
| 155 |
+
os.makedirs(BASE_TMP_FUZZER_DIR, exist_ok=True)
|
| 156 |
+
# Use a random subdirectory of BASE_TMP_FUZZER_DIR to allow running multiple
|
| 157 |
+
# instances of test_all in parallel (useful for integration testing).
|
| 158 |
+
with tempfile.TemporaryDirectory(dir=BASE_TMP_FUZZER_DIR) as out:
|
| 159 |
+
# Set this so that run_fuzzer which is called by bad_build_check works
|
| 160 |
+
# properly.
|
| 161 |
+
os.environ['OUT'] = out
|
| 162 |
+
# We move the contents of the directory because we can't move the
|
| 163 |
+
# directory itself because it is a mount.
|
| 164 |
+
move_directory_contents(initial_out, out)
|
| 165 |
+
try:
|
| 166 |
+
yield out
|
| 167 |
+
finally:
|
| 168 |
+
move_directory_contents(out, initial_out)
|
| 169 |
+
os.environ['OUT'] = initial_out
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def test_all_outside_out(allowed_broken_targets_percentage):
|
| 173 |
+
"""Wrapper around test_all that changes OUT and returns the result."""
|
| 174 |
+
with use_different_out_dir() as out:
|
| 175 |
+
return test_all(out, allowed_broken_targets_percentage)
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
def centipede_needs_auxiliaries():
|
| 179 |
+
"""Checks if auxiliaries are needed for Centipede."""
|
| 180 |
+
# Centipede always requires unsanitized binaries as the main fuzz targets,
|
| 181 |
+
# and separate sanitized binaries as auxiliaries.
|
| 182 |
+
# 1. Building sanitized binaries with helper.py (i.e., local or GitHub CI):
|
| 183 |
+
# Unsanitized ones will be built automatically into the same docker container.
|
| 184 |
+
# Script bad_build_check tests both
|
| 185 |
+
# a) If main fuzz targets can run with the auxiliaries, and
|
| 186 |
+
# b) If the auxiliaries are built with the correct sanitizers.
|
| 187 |
+
# 2. In Trial build and production build:
|
| 188 |
+
# Two kinds of binaries will be in separated buckets / docker containers.
|
| 189 |
+
# Script bad_build_check tests either
|
| 190 |
+
# a) If the unsanitized binaries can run without the sanitized ones, or
|
| 191 |
+
# b) If the sanitized binaries are built with the correct sanitizers.
|
| 192 |
+
return (os.getenv('FUZZING_ENGINE') == 'centipede' and
|
| 193 |
+
os.getenv('SANITIZER') != 'none' and os.getenv('HELPER') == 'True')
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def find_centipede_auxiliary(main_fuzz_target_path):
|
| 197 |
+
"""Finds the sanitized binary path that corresponds to |main_fuzz_target| for
|
| 198 |
+
bad_build_check."""
|
| 199 |
+
target_dir, target_name = os.path.split(main_fuzz_target_path)
|
| 200 |
+
sanitized_binary_dir = os.path.join(target_dir,
|
| 201 |
+
f'__centipede_{os.getenv("SANITIZER")}')
|
| 202 |
+
sanitized_binary_path = os.path.join(sanitized_binary_dir, target_name)
|
| 203 |
+
|
| 204 |
+
if os.path.isfile(sanitized_binary_path):
|
| 205 |
+
return sanitized_binary_path
|
| 206 |
+
|
| 207 |
+
# Neither of the following two should ever happen, returns None to indicate
|
| 208 |
+
# an error.
|
| 209 |
+
if os.path.isdir(sanitized_binary_dir):
|
| 210 |
+
print('ERROR: Unable to identify Centipede\'s sanitized target'
|
| 211 |
+
f'{sanitized_binary_path} in {os.listdir(sanitized_binary_dir)}')
|
| 212 |
+
else:
|
| 213 |
+
print('ERROR: Unable to identify Centipede\'s sanitized target directory'
|
| 214 |
+
f'{sanitized_binary_dir} in {os.listdir(target_dir)}')
|
| 215 |
+
return None
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
def test_all(out, allowed_broken_targets_percentage): # pylint: disable=too-many-return-statements
|
| 219 |
+
"""Do bad_build_check on all fuzz targets."""
|
| 220 |
+
# TODO(metzman): Refactor so that we can convert test_one to python.
|
| 221 |
+
fuzz_targets = find_fuzz_targets(out)
|
| 222 |
+
if not fuzz_targets:
|
| 223 |
+
print('ERROR: No fuzz targets found.')
|
| 224 |
+
return False
|
| 225 |
+
|
| 226 |
+
if centipede_needs_auxiliaries():
|
| 227 |
+
for fuzz_target in fuzz_targets:
|
| 228 |
+
if not find_centipede_auxiliary(fuzz_target):
|
| 229 |
+
print(f'ERROR: Couldn\'t find auxiliary for {fuzz_target}.')
|
| 230 |
+
return False
|
| 231 |
+
|
| 232 |
+
pool = multiprocessing.Pool()
|
| 233 |
+
bad_build_results = pool.map(do_bad_build_check, fuzz_targets)
|
| 234 |
+
pool.close()
|
| 235 |
+
pool.join()
|
| 236 |
+
broken_targets = get_broken_fuzz_targets(bad_build_results, fuzz_targets)
|
| 237 |
+
broken_targets_count = len(broken_targets)
|
| 238 |
+
if not broken_targets_count:
|
| 239 |
+
return True
|
| 240 |
+
|
| 241 |
+
print('Retrying failed fuzz targets sequentially', broken_targets_count)
|
| 242 |
+
pool = multiprocessing.Pool(1)
|
| 243 |
+
retry_targets = []
|
| 244 |
+
for broken_target, result in broken_targets:
|
| 245 |
+
retry_targets.append(broken_target)
|
| 246 |
+
bad_build_results = pool.map(do_bad_build_check, retry_targets)
|
| 247 |
+
pool.close()
|
| 248 |
+
pool.join()
|
| 249 |
+
broken_targets = get_broken_fuzz_targets(bad_build_results, broken_targets)
|
| 250 |
+
broken_targets_count = len(broken_targets)
|
| 251 |
+
if not broken_targets_count:
|
| 252 |
+
return True
|
| 253 |
+
|
| 254 |
+
print('Broken fuzz targets', broken_targets_count)
|
| 255 |
+
total_targets_count = len(fuzz_targets)
|
| 256 |
+
broken_targets_percentage = 100 * broken_targets_count / total_targets_count
|
| 257 |
+
for broken_target, result in broken_targets:
|
| 258 |
+
print(broken_target)
|
| 259 |
+
# Use write because we can't print binary strings.
|
| 260 |
+
sys.stdout.buffer.write(result.stdout + result.stderr + b'\n')
|
| 261 |
+
|
| 262 |
+
if broken_targets_percentage > allowed_broken_targets_percentage:
|
| 263 |
+
print('ERROR: {broken_targets_percentage}% of fuzz targets seem to be '
|
| 264 |
+
'broken. See the list above for a detailed information.'.format(
|
| 265 |
+
broken_targets_percentage=broken_targets_percentage))
|
| 266 |
+
if has_ignored_targets(out):
|
| 267 |
+
print('Build check automatically passing because of ignored targets.')
|
| 268 |
+
return True
|
| 269 |
+
return False
|
| 270 |
+
print('{total_targets_count} fuzzers total, {broken_targets_count} '
|
| 271 |
+
'seem to be broken ({broken_targets_percentage}%).'.format(
|
| 272 |
+
total_targets_count=total_targets_count,
|
| 273 |
+
broken_targets_count=broken_targets_count,
|
| 274 |
+
broken_targets_percentage=broken_targets_percentage))
|
| 275 |
+
return True
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
def get_allowed_broken_targets_percentage():
|
| 279 |
+
"""Returns the value of the environment value
|
| 280 |
+
'ALLOWED_BROKEN_TARGETS_PERCENTAGE' as an int or returns a reasonable
|
| 281 |
+
default."""
|
| 282 |
+
return int(os.getenv('ALLOWED_BROKEN_TARGETS_PERCENTAGE') or '10')
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
def main():
|
| 286 |
+
"""Does bad_build_check on all fuzz targets in parallel. Returns 0 on success.
|
| 287 |
+
Returns 1 on failure."""
|
| 288 |
+
allowed_broken_targets_percentage = get_allowed_broken_targets_percentage()
|
| 289 |
+
if not test_all_outside_out(allowed_broken_targets_percentage):
|
| 290 |
+
return 1
|
| 291 |
+
return 0
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
if __name__ == '__main__':
|
| 295 |
+
sys.exit(main())
|
local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all_test.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
"""Tests test_all.py"""
|
| 17 |
+
import unittest
|
| 18 |
+
from unittest import mock
|
| 19 |
+
|
| 20 |
+
import test_all
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class TestTestAll(unittest.TestCase):
|
| 24 |
+
"""Tests for the test_all_function."""
|
| 25 |
+
|
| 26 |
+
@mock.patch('test_all.find_fuzz_targets', return_value=[])
|
| 27 |
+
@mock.patch('builtins.print')
|
| 28 |
+
def test_test_all_no_fuzz_targets(self, mock_print, _):
|
| 29 |
+
"""Tests that test_all returns False when there are no fuzz targets."""
|
| 30 |
+
outdir = '/out'
|
| 31 |
+
allowed_broken_targets_percentage = 0
|
| 32 |
+
self.assertFalse(
|
| 33 |
+
test_all.test_all(outdir, allowed_broken_targets_percentage))
|
| 34 |
+
mock_print.assert_called_with('ERROR: No fuzz targets found.')
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
if __name__ == '__main__':
|
| 38 |
+
unittest.main()
|