Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-fuzzbench/Dockerfile +26 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_build +28 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_install_dependencies +22 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_measure +32 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_run_fuzzer +41 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-go/Dockerfile +33 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-go/gosigfuzz.c +47 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go +80 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-swift/Dockerfile +22 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-swift/precompile_swift +33 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder/compile +420 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder/compile_afl +53 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder/compile_go_fuzzer +69 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder/detect_repo_test.py +121 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder/install_go.sh +43 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder/install_java.sh +31 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner-debug/Dockerfile +26 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/README.md +31 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/coverage +549 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/coverage_helper +17 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/download_corpus +30 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/generate_differential_cov_report.py +228 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/install_deps.sh +37 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/install_go.sh +41 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/install_java.sh +46 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/jacoco_report_converter.py +174 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/nyc_report_converter.py +80 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/profraw_update.py +182 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/python_coverage_runner_help.py +181 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/rcfilt +21 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/reproduce +34 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/run_fuzzer +228 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/ruzzy +19 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/targets_list +10 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/test_one.py +47 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/actions/build_fuzzers/action.yml +55 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/actions/run_fuzzers/action.yml +82 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/cifuzz-base/Dockerfile +44 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/external-actions/build_fuzzers/action.yml +75 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/external-actions/run_fuzzers/action.yml +97 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/__init__.py +54 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/filesystem/__init__.py +107 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/git/__init__.py +158 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/git/git_test.py +122 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/github_actions/__init__.py +179 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_actions_test.py +283 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_api.py +109 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_api_test.py +41 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/github_actions/upload.js +45 -0
- local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/gitlab/__init__.py +133 -0
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-fuzzbench/Dockerfile
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2023 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
|
| 17 |
+
ARG IMG_TAG=latest
|
| 18 |
+
FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG}
|
| 19 |
+
|
| 20 |
+
# Copy/Run this now to make the cache more resilient.
|
| 21 |
+
COPY fuzzbench_install_dependencies /usr/local/bin
|
| 22 |
+
RUN fuzzbench_install_dependencies
|
| 23 |
+
|
| 24 |
+
ENV OSS_FUZZ_ON_DEMAND=1
|
| 25 |
+
|
| 26 |
+
COPY fuzzbench_build fuzzbench_run_fuzzer fuzzbench_measure /usr/local/bin/
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_build
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /bin/bash -eux
|
| 2 |
+
# Copyright 2023 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# TODO(metzman): Do this in a docket image so we don't need to waste time
|
| 19 |
+
# reinstalling.
|
| 20 |
+
PYTHONPATH=$FUZZBENCH_PATH python3 -B -u -c "from fuzzers.$FUZZING_ENGINE import fuzzer; fuzzer.build()"
|
| 21 |
+
|
| 22 |
+
if [ "$FUZZING_ENGINE" = "coverage" ]; then
|
| 23 |
+
cd $OUT
|
| 24 |
+
mkdir -p filestore/oss-fuzz-on-demand/coverage-binaries
|
| 25 |
+
# We expect an error regarding leading slashes. Just assume this step succeeds.
|
| 26 |
+
# TODO(metzman): Fix this when I get a chance.
|
| 27 |
+
tar -czvf filestore/oss-fuzz-on-demand/coverage-binaries/coverage-build-$PROJECT.tar.gz * /src /work || exit 0
|
| 28 |
+
fi
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_install_dependencies
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /bin/bash -eux
|
| 2 |
+
# Copyright 2023 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
apt-get update && apt-get install -y gcc gfortran python-dev libopenblas-dev liblapack-dev cython libpq-dev
|
| 19 |
+
wget -O /tmp/requirements.txt https://raw.githubusercontent.com/google/fuzzbench/master/requirements.txt
|
| 20 |
+
pip3 install pip --upgrade
|
| 21 |
+
CFLAGS= CXXFLAGS= pip3 install -r /tmp/requirements.txt
|
| 22 |
+
rm /tmp/requirements.txt
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_measure
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /bin/bash -eux
|
| 2 |
+
# Copyright 2023 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# TODO(metzman): Make these configurable.
|
| 19 |
+
export DB_PATH=$OUT/experiment.db
|
| 20 |
+
export SNAPSHOT_PERIOD=30
|
| 21 |
+
export EXPERIMENT_FILESTORE=$OUT/filestore
|
| 22 |
+
export MAX_TOTAL_TIME=120
|
| 23 |
+
export EXPERIMENT=oss-fuzz-on-demand
|
| 24 |
+
|
| 25 |
+
rm -f $DB_PATH
|
| 26 |
+
|
| 27 |
+
# FUZZER=mopt BENCHMARK=skcms
|
| 28 |
+
|
| 29 |
+
export SQL_DATABASE_URL=sqlite:///$DB_PATH
|
| 30 |
+
|
| 31 |
+
cd $FUZZBENCH_PATH
|
| 32 |
+
PYTHONPATH=. python3 -B experiment/measurer/standalone.py $MAX_TOTAL_TIME
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_run_fuzzer
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /bin/bash -eux
|
| 2 |
+
# Copyright 2023 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
export RUNNER_NICENESS="-5"
|
| 19 |
+
export EXPERIMENT_FILESTORE=$OUT/filestore
|
| 20 |
+
export EXPERIMENT=oss-fuzz-on-demand
|
| 21 |
+
export OSS_FUZZ_ON_DEMAND=1
|
| 22 |
+
export OUTPUT_CORPUS_DIR=/output-corpus
|
| 23 |
+
export SEED_CORPUS_DIR=/input-corpus
|
| 24 |
+
mkdir $SEED_CORPUS_DIR
|
| 25 |
+
rm -rf $OUTPUT_CORPUS_DIR
|
| 26 |
+
mkdir $OUTPUT_CORPUS_DIR
|
| 27 |
+
export FUZZER=$FUZZING_ENGINE
|
| 28 |
+
# TODO(metzman): Make this configurable.
|
| 29 |
+
export MAX_TOTAL_TIME=120
|
| 30 |
+
export SNAPSHOT_PERIOD=30
|
| 31 |
+
export TRIAL_ID=1
|
| 32 |
+
export FORCE_LOCAL=1
|
| 33 |
+
|
| 34 |
+
# BENCHMARK, FUZZ_TARGET
|
| 35 |
+
cd $OUT
|
| 36 |
+
|
| 37 |
+
# Prevent permissions issues with pyc files and docker.
|
| 38 |
+
cp -r $FUZZBENCH_PATH /tmp/fuzzbench
|
| 39 |
+
|
| 40 |
+
PYTHONPATH=/tmp/fuzzbench nice -n $RUNNER_NICENESS python3 -B -u /tmp/fuzzbench/experiment/runner.py
|
| 41 |
+
cat $EXPERIMENT_FILESTORE/$EXPERIMENT/experiment-folders/$BENCHMARK-$FUZZER/trial-$TRIAL_ID/results/fuzzer-log.txt
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-go/Dockerfile
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
|
| 17 |
+
ARG IMG_TAG=latest
|
| 18 |
+
FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG}
|
| 19 |
+
|
| 20 |
+
# Set up Golang environment variables (copied from /root/.bash_profile).
|
| 21 |
+
ENV GOPATH /root/go
|
| 22 |
+
|
| 23 |
+
# /root/.go/bin is for the standard Go binaries (i.e. go, gofmt, etc).
|
| 24 |
+
# $GOPATH/bin is for the binaries from the dependencies installed via "go get".
|
| 25 |
+
ENV PATH $PATH:/root/.go/bin:$GOPATH/bin
|
| 26 |
+
|
| 27 |
+
COPY gosigfuzz.c $GOPATH/gosigfuzz/
|
| 28 |
+
|
| 29 |
+
RUN install_go.sh
|
| 30 |
+
|
| 31 |
+
# TODO(jonathanmetzman): Install this file using install_go.sh.
|
| 32 |
+
COPY ossfuzz_coverage_runner.go \
|
| 33 |
+
$GOPATH/
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-go/gosigfuzz.c
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/*
|
| 2 |
+
* Copyright 2023 Google LLC
|
| 3 |
+
|
| 4 |
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
* you may not use this file except in compliance with the License.
|
| 6 |
+
* You may obtain a copy of the License at
|
| 7 |
+
|
| 8 |
+
* http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
|
| 10 |
+
* Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
* See the License for the specific language governing permissions and
|
| 14 |
+
* limitations under the License.
|
| 15 |
+
*/
|
| 16 |
+
|
| 17 |
+
#include<stdlib.h>
|
| 18 |
+
#include<signal.h>
|
| 19 |
+
|
| 20 |
+
static void fixSignalHandler(int signum) {
|
| 21 |
+
struct sigaction new_action;
|
| 22 |
+
struct sigaction old_action;
|
| 23 |
+
sigemptyset (&new_action.sa_mask);
|
| 24 |
+
sigaction (signum, NULL, &old_action);
|
| 25 |
+
new_action.sa_flags = old_action.sa_flags | SA_ONSTACK;
|
| 26 |
+
new_action.sa_sigaction = old_action.sa_sigaction;
|
| 27 |
+
new_action.sa_handler = old_action.sa_handler;
|
| 28 |
+
sigaction (signum, &new_action, NULL);
|
| 29 |
+
}
|
| 30 |
+
|
| 31 |
+
static void FixStackSignalHandler() {
|
| 32 |
+
fixSignalHandler(SIGSEGV);
|
| 33 |
+
fixSignalHandler(SIGABRT);
|
| 34 |
+
fixSignalHandler(SIGALRM);
|
| 35 |
+
fixSignalHandler(SIGINT);
|
| 36 |
+
fixSignalHandler(SIGTERM);
|
| 37 |
+
fixSignalHandler(SIGBUS);
|
| 38 |
+
fixSignalHandler(SIGFPE);
|
| 39 |
+
fixSignalHandler(SIGXFSZ);
|
| 40 |
+
fixSignalHandler(SIGUSR1);
|
| 41 |
+
fixSignalHandler(SIGUSR2);
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
int LLVMFuzzerInitialize(int *argc, char ***argv) {
|
| 45 |
+
FixStackSignalHandler();
|
| 46 |
+
return 0;
|
| 47 |
+
}
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
// Copyright 2020 Google LLC
|
| 2 |
+
//
|
| 3 |
+
// Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
// you may not use this file except in compliance with the License.
|
| 5 |
+
// You may obtain a copy of the License at
|
| 6 |
+
//
|
| 7 |
+
// http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
//
|
| 9 |
+
// Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
// distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
// See the License for the specific language governing permissions and
|
| 13 |
+
// limitations under the License.
|
| 14 |
+
|
| 15 |
+
package mypackagebeingfuzzed
|
| 16 |
+
|
| 17 |
+
import (
|
| 18 |
+
"io/fs"
|
| 19 |
+
"io/ioutil"
|
| 20 |
+
"os"
|
| 21 |
+
"path/filepath"
|
| 22 |
+
"runtime/pprof"
|
| 23 |
+
"testing"
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
func TestFuzzCorpus(t *testing.T) {
|
| 27 |
+
dir := os.Getenv("FUZZ_CORPUS_DIR")
|
| 28 |
+
if dir == "" {
|
| 29 |
+
t.Logf("No fuzzing corpus directory set")
|
| 30 |
+
return
|
| 31 |
+
}
|
| 32 |
+
filename := ""
|
| 33 |
+
defer func() {
|
| 34 |
+
if r := recover(); r != nil {
|
| 35 |
+
t.Error("Fuzz panicked in "+filename, r)
|
| 36 |
+
}
|
| 37 |
+
}()
|
| 38 |
+
profname := os.Getenv("FUZZ_PROFILE_NAME")
|
| 39 |
+
if profname != "" {
|
| 40 |
+
f, err := os.Create(profname + ".cpu.prof")
|
| 41 |
+
if err != nil {
|
| 42 |
+
t.Logf("error creating profile file %s\n", err)
|
| 43 |
+
} else {
|
| 44 |
+
_ = pprof.StartCPUProfile(f)
|
| 45 |
+
}
|
| 46 |
+
}
|
| 47 |
+
_, err := ioutil.ReadDir(dir)
|
| 48 |
+
if err != nil {
|
| 49 |
+
t.Logf("Not fuzzing corpus directory %s", err)
|
| 50 |
+
return
|
| 51 |
+
}
|
| 52 |
+
// recurse for regressions subdirectory
|
| 53 |
+
err = filepath.Walk(dir, func(fname string, info fs.FileInfo, err error) error {
|
| 54 |
+
if info.IsDir() {
|
| 55 |
+
return nil
|
| 56 |
+
}
|
| 57 |
+
data, err := ioutil.ReadFile(fname)
|
| 58 |
+
if err != nil {
|
| 59 |
+
t.Error("Failed to read corpus file", err)
|
| 60 |
+
return err
|
| 61 |
+
}
|
| 62 |
+
filename = fname
|
| 63 |
+
FuzzFunction(data)
|
| 64 |
+
return nil
|
| 65 |
+
})
|
| 66 |
+
if err != nil {
|
| 67 |
+
t.Error("Failed to run corpus", err)
|
| 68 |
+
}
|
| 69 |
+
if profname != "" {
|
| 70 |
+
pprof.StopCPUProfile()
|
| 71 |
+
f, err := os.Create(profname + ".heap.prof")
|
| 72 |
+
if err != nil {
|
| 73 |
+
t.Logf("error creating heap profile file %s\n", err)
|
| 74 |
+
}
|
| 75 |
+
if err = pprof.WriteHeapProfile(f); err != nil {
|
| 76 |
+
t.Logf("error writing heap profile file %s\n", err)
|
| 77 |
+
}
|
| 78 |
+
f.Close()
|
| 79 |
+
}
|
| 80 |
+
}
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-swift/Dockerfile
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
|
| 17 |
+
ARG IMG_TAG=latest
|
| 18 |
+
FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG}
|
| 19 |
+
|
| 20 |
+
RUN install_swift.sh
|
| 21 |
+
|
| 22 |
+
COPY precompile_swift /usr/local/bin/
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder-swift/precompile_swift
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
cp /usr/local/bin/llvm-symbolizer-swift $OUT/llvm-symbolizer
|
| 19 |
+
|
| 20 |
+
export SWIFTFLAGS="-Xswiftc -parse-as-library -Xswiftc -static-stdlib --static-swift-stdlib"
|
| 21 |
+
if [ "$SANITIZER" = "coverage" ]
|
| 22 |
+
then
|
| 23 |
+
export SWIFTFLAGS="$SWIFTFLAGS -Xswiftc -profile-generate -Xswiftc -profile-coverage-mapping -Xswiftc -sanitize=fuzzer"
|
| 24 |
+
else
|
| 25 |
+
export SWIFTFLAGS="$SWIFTFLAGS -Xswiftc -sanitize=fuzzer,$SANITIZER --sanitize=$SANITIZER"
|
| 26 |
+
for f in $CFLAGS; do
|
| 27 |
+
export SWIFTFLAGS="$SWIFTFLAGS -Xcc=$f"
|
| 28 |
+
done
|
| 29 |
+
|
| 30 |
+
for f in $CXXFLAGS; do
|
| 31 |
+
export SWIFTFLAGS="$SWIFTFLAGS -Xcxx=$f"
|
| 32 |
+
done
|
| 33 |
+
fi
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder/compile
ADDED
|
@@ -0,0 +1,420 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2016 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
echo "---------------------------------------------------------------"
|
| 19 |
+
|
| 20 |
+
sysctl -w vm.mmap_rnd_bits=28
|
| 21 |
+
|
| 22 |
+
OSS_FUZZ_ON_DEMAND="${OSS_FUZZ_ON_DEMAND:-0}"
|
| 23 |
+
|
| 24 |
+
# Used for Rust introspector builds
|
| 25 |
+
RUST_SANITIZER=$SANITIZER
|
| 26 |
+
|
| 27 |
+
if [ "$FUZZING_LANGUAGE" = "jvm" ]; then
|
| 28 |
+
if [ "$FUZZING_ENGINE" != "libfuzzer" ] && [ "$FUZZING_ENGINE" != "wycheproof" ]; then
|
| 29 |
+
echo "ERROR: JVM projects can be fuzzed with libFuzzer or tested with wycheproof engines only."
|
| 30 |
+
exit 1
|
| 31 |
+
fi
|
| 32 |
+
if [ "$SANITIZER" != "address" ] && [ "$SANITIZER" != "coverage" ] && [ "$SANITIZER" != "undefined" ] && [ "$SANITIZER" != "none" ] && [ "$SANITIZER" != "introspector" ]; then
|
| 33 |
+
echo "ERROR: JVM projects can be fuzzed with AddressSanitizer or UndefinedBehaviorSanitizer or Introspector only."
|
| 34 |
+
exit 1
|
| 35 |
+
fi
|
| 36 |
+
if [ "$ARCHITECTURE" != "x86_64" ]; then
|
| 37 |
+
echo "ERROR: JVM projects can be fuzzed on x86_64 architecture only."
|
| 38 |
+
exit 1
|
| 39 |
+
fi
|
| 40 |
+
fi
|
| 41 |
+
|
| 42 |
+
if [ "$FUZZING_LANGUAGE" = "rust" ]; then
|
| 43 |
+
if [ "$SANITIZER" = "introspector" ]; then
|
| 44 |
+
# introspector sanitizer flag will cause cargo build to fail. Rremove it
|
| 45 |
+
# temporarily, RUST_SANITIZER will hold the original sanitizer.
|
| 46 |
+
export SANITIZER=address
|
| 47 |
+
fi
|
| 48 |
+
fi
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
if [ "$FUZZING_LANGUAGE" = "javascript" ]; then
|
| 52 |
+
if [ "$FUZZING_ENGINE" != "libfuzzer" ]; then
|
| 53 |
+
echo "ERROR: JavaScript projects can be fuzzed with libFuzzer engine only."
|
| 54 |
+
exit 1
|
| 55 |
+
fi
|
| 56 |
+
if [ "$SANITIZER" != "coverage" ] && [ "$SANITIZER" != "none" ]; then
|
| 57 |
+
echo "ERROR: JavaScript projects cannot be fuzzed with sanitizers."
|
| 58 |
+
exit 1
|
| 59 |
+
fi
|
| 60 |
+
if [ "$ARCHITECTURE" != "x86_64" ]; then
|
| 61 |
+
echo "ERROR: JavaScript projects can be fuzzed on x86_64 architecture only."
|
| 62 |
+
exit 1
|
| 63 |
+
fi
|
| 64 |
+
fi
|
| 65 |
+
|
| 66 |
+
if [ "$FUZZING_LANGUAGE" = "python" ]; then
|
| 67 |
+
if [ "$FUZZING_ENGINE" != "libfuzzer" ]; then
|
| 68 |
+
echo "ERROR: Python projects can be fuzzed with libFuzzer engine only."
|
| 69 |
+
exit 1
|
| 70 |
+
fi
|
| 71 |
+
if [ "$SANITIZER" != "address" ] && [ "$SANITIZER" != "undefined" ] && [ "$SANITIZER" != "coverage" ] && [ "$SANITIZER" != "introspector" ]; then
|
| 72 |
+
echo "ERROR: Python projects can be fuzzed with AddressSanitizer or UndefinedBehaviorSanitizer or Coverage or Fuzz Introspector only."
|
| 73 |
+
exit 1
|
| 74 |
+
fi
|
| 75 |
+
if [ "$ARCHITECTURE" != "x86_64" ]; then
|
| 76 |
+
echo "ERROR: Python projects can be fuzzed on x86_64 architecture only."
|
| 77 |
+
exit 1
|
| 78 |
+
fi
|
| 79 |
+
fi
|
| 80 |
+
|
| 81 |
+
if [ -z "${SANITIZER_FLAGS-}" ]; then
|
| 82 |
+
FLAGS_VAR="SANITIZER_FLAGS_${SANITIZER}"
|
| 83 |
+
export SANITIZER_FLAGS=${!FLAGS_VAR-}
|
| 84 |
+
fi
|
| 85 |
+
|
| 86 |
+
if [[ $ARCHITECTURE == "i386" ]]; then
|
| 87 |
+
export CFLAGS="-m32 $CFLAGS"
|
| 88 |
+
cp -R /usr/i386/lib/* /usr/local/lib
|
| 89 |
+
cp -R /usr/i386/include/* /usr/local/include
|
| 90 |
+
fi
|
| 91 |
+
|
| 92 |
+
# Don't use a fuzzing engine with Jazzer which has libFuzzer built-in or with
|
| 93 |
+
# FuzzBench which will provide the fuzzing engine.
|
| 94 |
+
if [[ $FUZZING_ENGINE != "none" ]] && [[ $FUZZING_LANGUAGE != "jvm" ]] && [[ "${OSS_FUZZ_ON_DEMAND}" == "0" ]] ; then
|
| 95 |
+
# compile script might override environment, use . to call it.
|
| 96 |
+
. compile_${FUZZING_ENGINE}
|
| 97 |
+
fi
|
| 98 |
+
|
| 99 |
+
if [[ $SANITIZER_FLAGS = *sanitize=memory* ]]
|
| 100 |
+
then
|
| 101 |
+
# Take all libraries from lib/msan
|
| 102 |
+
# export CXXFLAGS_EXTRA="-L/usr/msan/lib $CXXFLAGS_EXTRA"
|
| 103 |
+
cp -R /usr/msan/lib/* /usr/local/lib/x86_64-unknown-linux-gnu/
|
| 104 |
+
cp -R /usr/msan/include/* /usr/local/include
|
| 105 |
+
|
| 106 |
+
echo 'Building without MSan instrumented libraries.'
|
| 107 |
+
fi
|
| 108 |
+
|
| 109 |
+
# Coverage flag overrides.
|
| 110 |
+
COVERAGE_FLAGS_VAR="COVERAGE_FLAGS_${SANITIZER}"
|
| 111 |
+
if [[ -n ${!COVERAGE_FLAGS_VAR+x} ]]
|
| 112 |
+
then
|
| 113 |
+
export COVERAGE_FLAGS="${!COVERAGE_FLAGS_VAR}"
|
| 114 |
+
fi
|
| 115 |
+
|
| 116 |
+
# Only need the default coverage instrumentation for libFuzzer or honggfuzz.
|
| 117 |
+
# Other engines bring their own.
|
| 118 |
+
if [ $FUZZING_ENGINE = "none" ] || [ $FUZZING_ENGINE = "afl" ] || [ $FUZZING_ENGINE = "centipede" ] || [ "${OSS_FUZZ_ON_DEMAND}" != "0" ]; then
|
| 119 |
+
export COVERAGE_FLAGS=
|
| 120 |
+
fi
|
| 121 |
+
|
| 122 |
+
# Rust does not support sanitizers and coverage flags via CFLAGS/CXXFLAGS, so
|
| 123 |
+
# use RUSTFLAGS.
|
| 124 |
+
# FIXME: Support code coverage once support is in.
|
| 125 |
+
# See https://github.com/rust-lang/rust/issues/34701.
|
| 126 |
+
if [ "$RUST_SANITIZER" == "introspector" ]; then
|
| 127 |
+
export RUSTFLAGS="-Cdebuginfo=2 -Cforce-frame-pointers"
|
| 128 |
+
elif [ "$SANITIZER" != "undefined" ] && [ "$SANITIZER" != "coverage" ] && [ "$SANITIZER" != "none" ] && [ "$ARCHITECTURE" != 'i386' ]; then
|
| 129 |
+
export RUSTFLAGS="--cfg fuzzing -Zsanitizer=${SANITIZER} -Cdebuginfo=1 -Cforce-frame-pointers"
|
| 130 |
+
else
|
| 131 |
+
export RUSTFLAGS="--cfg fuzzing -Cdebuginfo=1 -Cforce-frame-pointers"
|
| 132 |
+
fi
|
| 133 |
+
if [ "$SANITIZER" = "coverage" ]
|
| 134 |
+
then
|
| 135 |
+
# link to C++ from comment in f5098035eb1a14aa966c8651d88ea3d64323823d
|
| 136 |
+
export RUSTFLAGS="$RUSTFLAGS -Cinstrument-coverage -C link-arg=-lc++"
|
| 137 |
+
fi
|
| 138 |
+
|
| 139 |
+
# Add Rust libfuzzer flags.
|
| 140 |
+
# See https://github.com/rust-fuzz/libfuzzer/blob/master/build.rs#L12.
|
| 141 |
+
export CUSTOM_LIBFUZZER_PATH="$LIB_FUZZING_ENGINE_DEPRECATED"
|
| 142 |
+
export CUSTOM_LIBFUZZER_STD_CXX=c++
|
| 143 |
+
|
| 144 |
+
export CFLAGS="$CFLAGS $SANITIZER_FLAGS $COVERAGE_FLAGS"
|
| 145 |
+
export CXXFLAGS="$CFLAGS $CXXFLAGS_EXTRA"
|
| 146 |
+
|
| 147 |
+
if [ "$SANITIZER" = "undefined" ]; then
|
| 148 |
+
# Disable "function" sanitizer for C code for now, because many projects,
|
| 149 |
+
# possibly via legacy C code are affected.
|
| 150 |
+
# The projects should be fixed and this workaround be removed in the future.
|
| 151 |
+
# TODO(#11778):
|
| 152 |
+
# https://github.com/google/oss-fuzz/issues/11778
|
| 153 |
+
export CFLAGS="$CFLAGS -fno-sanitize=function"
|
| 154 |
+
fi
|
| 155 |
+
|
| 156 |
+
if [ "$FUZZING_LANGUAGE" = "go" ]; then
|
| 157 |
+
# required by Go 1.20
|
| 158 |
+
export CXX="${CXX} -lresolv"
|
| 159 |
+
fi
|
| 160 |
+
|
| 161 |
+
if [ "$FUZZING_LANGUAGE" = "python" ]; then
|
| 162 |
+
sanitizer_with_fuzzer_lib_dir=`python3 -c "import atheris; import os; print(atheris.path())"`
|
| 163 |
+
sanitizer_with_fuzzer_output_lib=$OUT/sanitizer_with_fuzzer.so
|
| 164 |
+
if [ "$SANITIZER" = "address" ]; then
|
| 165 |
+
cp $sanitizer_with_fuzzer_lib_dir/asan_with_fuzzer.so $sanitizer_with_fuzzer_output_lib
|
| 166 |
+
elif [ "$SANITIZER" = "undefined" ]; then
|
| 167 |
+
cp $sanitizer_with_fuzzer_lib_dir/ubsan_with_fuzzer.so $sanitizer_with_fuzzer_output_lib
|
| 168 |
+
fi
|
| 169 |
+
|
| 170 |
+
# Disable leak checking as it is unsupported.
|
| 171 |
+
export CFLAGS="$CFLAGS -fno-sanitize=function,leak,vptr,"
|
| 172 |
+
export CXXFLAGS="$CXXFLAGS -fno-sanitize=function,leak,vptr"
|
| 173 |
+
fi
|
| 174 |
+
|
| 175 |
+
# Copy latest llvm-symbolizer in $OUT for stack symbolization.
|
| 176 |
+
cp $(which llvm-symbolizer) $OUT/
|
| 177 |
+
|
| 178 |
+
# Copy Jazzer to $OUT if needed.
|
| 179 |
+
if [ "$FUZZING_LANGUAGE" = "jvm" ]; then
|
| 180 |
+
cp $(which jazzer_agent_deploy.jar) $(which jazzer_driver) $(which jazzer_junit.jar) $OUT/
|
| 181 |
+
jazzer_driver_with_sanitizer=$OUT/jazzer_driver_with_sanitizer
|
| 182 |
+
if [ "$SANITIZER" = "address" ]; then
|
| 183 |
+
cat > $jazzer_driver_with_sanitizer << 'EOF'
|
| 184 |
+
#!/bin/bash
|
| 185 |
+
this_dir=$(dirname "$0")
|
| 186 |
+
"$this_dir/jazzer_driver" --asan "$@"
|
| 187 |
+
EOF
|
| 188 |
+
elif [ "$SANITIZER" = "undefined" ]; then
|
| 189 |
+
cat > $jazzer_driver_with_sanitizer << 'EOF'
|
| 190 |
+
#!/bin/bash
|
| 191 |
+
this_dir=$(dirname "$0")
|
| 192 |
+
"$this_dir/jazzer_driver" --ubsan "$@"
|
| 193 |
+
EOF
|
| 194 |
+
elif [ "$SANITIZER" = "coverage" ] || [ "$SANITIZER" = "introspector" ]; then
|
| 195 |
+
# Coverage & introspector builds require no instrumentation.
|
| 196 |
+
cp $(which jazzer_driver) $jazzer_driver_with_sanitizer
|
| 197 |
+
fi
|
| 198 |
+
chmod +x $jazzer_driver_with_sanitizer
|
| 199 |
+
|
| 200 |
+
# Disable leak checking since the JVM triggers too many false positives.
|
| 201 |
+
export CFLAGS="$CFLAGS -fno-sanitize=leak"
|
| 202 |
+
export CXXFLAGS="$CXXFLAGS -fno-sanitize=leak"
|
| 203 |
+
fi
|
| 204 |
+
|
| 205 |
+
if [ "$SANITIZER" = "introspector" ] || [ "$RUST_SANITIZER" = "introspector" ]; then
|
| 206 |
+
export AR=llvm-ar
|
| 207 |
+
export NM=llvm-nm
|
| 208 |
+
export RANLIB=llvm-ranlib
|
| 209 |
+
|
| 210 |
+
export CFLAGS="$CFLAGS -g"
|
| 211 |
+
export CXXFLAGS="$CXXFLAGS -g"
|
| 212 |
+
export FI_BRANCH_PROFILE=1
|
| 213 |
+
export FUZZ_INTROSPECTOR=1
|
| 214 |
+
export FUZZ_INTROSPECTOR_AUTO_FUZZ=1
|
| 215 |
+
|
| 216 |
+
# Move ar and ranlib
|
| 217 |
+
mv /usr/bin/ar /usr/bin/old-ar
|
| 218 |
+
mv /usr/bin/nm /usr/bin/old-nm
|
| 219 |
+
mv /usr/bin/ranlib /usr/bin/old-ranlib
|
| 220 |
+
|
| 221 |
+
ln -sf /usr/local/bin/llvm-ar /usr/bin/ar
|
| 222 |
+
ln -sf /usr/local/bin/llvm-nm /usr/bin/nm
|
| 223 |
+
ln -sf /usr/local/bin/llvm-ranlib /usr/bin/ranlib
|
| 224 |
+
|
| 225 |
+
apt-get install -y libjpeg-dev zlib1g-dev libyaml-dev
|
| 226 |
+
python3 -m pip install --upgrade pip setuptools
|
| 227 |
+
python3 -m pip install cxxfilt pyyaml beautifulsoup4 lxml soupsieve rust-demangler
|
| 228 |
+
python3 -m pip install --prefer-binary matplotlib
|
| 229 |
+
|
| 230 |
+
# Install Fuzz-Introspector
|
| 231 |
+
pushd /fuzz-introspector/src
|
| 232 |
+
python3 -m pip install -e .
|
| 233 |
+
popd
|
| 234 |
+
|
| 235 |
+
if [ "$FUZZING_LANGUAGE" = "python" ]; then
|
| 236 |
+
python3 /fuzz-introspector/src/main.py light --language=python
|
| 237 |
+
cp -rf $SRC/inspector/ /tmp/inspector-saved
|
| 238 |
+
elif [ "$FUZZING_LANGUAGE" = "jvm" ]; then
|
| 239 |
+
python3 /fuzz-introspector/src/main.py light --language=jvm
|
| 240 |
+
cp -rf $SRC/inspector/ /tmp/inspector-saved
|
| 241 |
+
elif [ "$FUZZING_LANGUAGE" = "rust" ]; then
|
| 242 |
+
python3 /fuzz-introspector/src/main.py light --language=rust
|
| 243 |
+
cp -rf $SRC/inspector/ /tmp/inspector-saved
|
| 244 |
+
else
|
| 245 |
+
python3 /fuzz-introspector/src/main.py light
|
| 246 |
+
|
| 247 |
+
# Make a copy of the light. This is needed because we run two versions of
|
| 248 |
+
# introspector: one based on pure statis analysis and one based on
|
| 249 |
+
# regular LTO.
|
| 250 |
+
cp -rf $SRC/inspector/ /tmp/inspector-saved
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
# Move coverage report.
|
| 254 |
+
if [ -d "$OUT/textcov_reports" ]
|
| 255 |
+
then
|
| 256 |
+
find $OUT/textcov_reports/ -name "*.covreport" -exec cp {} $SRC/inspector/ \;
|
| 257 |
+
find $OUT/textcov_reports/ -name "*.json" -exec cp {} $SRC/inspector/ \;
|
| 258 |
+
fi
|
| 259 |
+
|
| 260 |
+
# Make fuzz-introspector HTML report using light approach.
|
| 261 |
+
REPORT_ARGS="--name=$PROJECT_NAME"
|
| 262 |
+
|
| 263 |
+
# Only pass coverage_url when COVERAGE_URL is set (in cloud builds)
|
| 264 |
+
if [[ ! -z "${COVERAGE_URL+x}" ]]; then
|
| 265 |
+
REPORT_ARGS="$REPORT_ARGS --coverage-url=${COVERAGE_URL}"
|
| 266 |
+
fi
|
| 267 |
+
|
| 268 |
+
# Run pure static analysis fuzz introspector
|
| 269 |
+
fuzz-introspector full --target-dir=$SRC \
|
| 270 |
+
--language=${FUZZING_LANGUAGE} \
|
| 271 |
+
--out-dir=$SRC/inspector \
|
| 272 |
+
${REPORT_ARGS}
|
| 273 |
+
fi
|
| 274 |
+
|
| 275 |
+
rsync -avu --delete "$SRC/inspector/" "$OUT/inspector"
|
| 276 |
+
fi
|
| 277 |
+
|
| 278 |
+
echo "---------------------------------------------------------------"
|
| 279 |
+
echo "CC=$CC"
|
| 280 |
+
echo "CXX=$CXX"
|
| 281 |
+
echo "CFLAGS=$CFLAGS"
|
| 282 |
+
echo "CXXFLAGS=$CXXFLAGS"
|
| 283 |
+
echo "RUSTFLAGS=$RUSTFLAGS"
|
| 284 |
+
echo "---------------------------------------------------------------"
|
| 285 |
+
|
| 286 |
+
if [ "${OSS_FUZZ_ON_DEMAND}" != "0" ]; then
|
| 287 |
+
fuzzbench_build
|
| 288 |
+
cp $(which llvm-symbolizer) $OUT/
|
| 289 |
+
exit 0
|
| 290 |
+
fi
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
if [[ ! -z "${CAPTURE_REPLAY_SCRIPT-}" ]]; then
|
| 294 |
+
# Capture a replaying build script which can be used for replaying the build
|
| 295 |
+
# after a vanilla build. This script is meant to be used in a cached
|
| 296 |
+
# container.
|
| 297 |
+
python3 -m pip install bashlex
|
| 298 |
+
python3 /usr/local/bin/bash_parser.py $SRC/build.sh
|
| 299 |
+
fi
|
| 300 |
+
|
| 301 |
+
# Prepare the build command to run the project's build script.
|
| 302 |
+
if [[ ! -z "${REPLAY_ENABLED-}" ]]; then
|
| 303 |
+
# If this is a replay, then use replay_build.sh. This is expected to be
|
| 304 |
+
# running in a cached container where a build has already happened prior.
|
| 305 |
+
BUILD_CMD="bash -eux $SRC/replay_build.sh"
|
| 306 |
+
else
|
| 307 |
+
BUILD_CMD="bash -eux $SRC/build.sh"
|
| 308 |
+
fi
|
| 309 |
+
|
| 310 |
+
# Set +u temporarily to continue even if GOPATH and OSSFUZZ_RUSTPATH are undefined.
|
| 311 |
+
set +u
|
| 312 |
+
# We need to preserve source code files for generating a code coverage report.
|
| 313 |
+
# We need exact files that were compiled, so copy both $SRC and $WORK dirs.
|
| 314 |
+
COPY_SOURCES_CMD="cp -rL --parents $SRC $WORK /usr/include /usr/local/include $GOPATH $OSSFUZZ_RUSTPATH /rustc $OUT"
|
| 315 |
+
set -u
|
| 316 |
+
|
| 317 |
+
if [ "$FUZZING_LANGUAGE" = "rust" ]; then
|
| 318 |
+
# Copy rust std lib to its path with a hash.
|
| 319 |
+
export rustch=`rustc --version --verbose | grep commit-hash | cut -d' ' -f2`
|
| 320 |
+
mkdir -p /rustc/$rustch/
|
| 321 |
+
export rustdef=`rustup toolchain list | grep default | cut -d' ' -f1`
|
| 322 |
+
cp -r /rust/rustup/toolchains/$rustdef/lib/rustlib/src/rust/library/ /rustc/$rustch/
|
| 323 |
+
fi
|
| 324 |
+
|
| 325 |
+
if [ "${BUILD_UID-0}" -ne "0" ]; then
|
| 326 |
+
adduser -u $BUILD_UID --disabled-password --gecos '' builder
|
| 327 |
+
chown -R builder $SRC $OUT $WORK
|
| 328 |
+
su -c "$BUILD_CMD" builder
|
| 329 |
+
if [ "$SANITIZER" = "coverage" ]; then
|
| 330 |
+
# Some directories have broken symlinks (e.g. honggfuzz), ignore the errors.
|
| 331 |
+
su -c "$COPY_SOURCES_CMD" builder 2>/dev/null || true
|
| 332 |
+
fi
|
| 333 |
+
else
|
| 334 |
+
$BUILD_CMD
|
| 335 |
+
if [ "$SANITIZER" = "coverage" ]; then
|
| 336 |
+
# Some directories have broken symlinks (e.g. honggfuzz), ignore the errors.
|
| 337 |
+
$COPY_SOURCES_CMD 2>/dev/null || true
|
| 338 |
+
fi
|
| 339 |
+
fi
|
| 340 |
+
|
| 341 |
+
if [ "$SANITIZER" = "introspector" ] || [ "$RUST_SANITIZER" = "introspector" ]; then
|
| 342 |
+
unset CXXFLAGS
|
| 343 |
+
unset CFLAGS
|
| 344 |
+
export G_ANALYTICS_TAG="G-8WTFM1Y62J"
|
| 345 |
+
|
| 346 |
+
# If we get to here, it means the e.g. LTO had no problems and succeeded.
|
| 347 |
+
# TO this end, we wlil restore the original light analysis and used the
|
| 348 |
+
# LTO processing itself.
|
| 349 |
+
rm -rf $SRC/inspector
|
| 350 |
+
cp -rf /tmp/inspector-saved $SRC/inspector
|
| 351 |
+
|
| 352 |
+
cd /fuzz-introspector/src
|
| 353 |
+
python3 -m pip install -e .
|
| 354 |
+
cd /src/
|
| 355 |
+
|
| 356 |
+
if [ "$FUZZING_LANGUAGE" = "rust" ]; then
|
| 357 |
+
# Restore the sanitizer flag for rust
|
| 358 |
+
export SANITIZER="introspector"
|
| 359 |
+
fi
|
| 360 |
+
|
| 361 |
+
mkdir -p $SRC/inspector
|
| 362 |
+
find $SRC/ -name "fuzzerLogFile-*.data" -exec cp {} $SRC/inspector/ \;
|
| 363 |
+
find $SRC/ -name "fuzzerLogFile-*.data.yaml" -exec cp {} $SRC/inspector/ \;
|
| 364 |
+
find $SRC/ -name "fuzzerLogFile-*.data.debug_*" -exec cp {} $SRC/inspector/ \;
|
| 365 |
+
find $SRC/ -name "allFunctionsWithMain-*.yaml" -exec cp {} $SRC/inspector/ \;
|
| 366 |
+
|
| 367 |
+
# Move coverage report.
|
| 368 |
+
if [ -d "$OUT/textcov_reports" ]
|
| 369 |
+
then
|
| 370 |
+
find $OUT/textcov_reports/ -name "*.covreport" -exec cp {} $SRC/inspector/ \;
|
| 371 |
+
find $OUT/textcov_reports/ -name "*.json" -exec cp {} $SRC/inspector/ \;
|
| 372 |
+
fi
|
| 373 |
+
|
| 374 |
+
cd $SRC/inspector
|
| 375 |
+
|
| 376 |
+
# Make fuzz-introspector HTML report.
|
| 377 |
+
REPORT_ARGS="--name=$PROJECT_NAME"
|
| 378 |
+
# Only pass coverage_url when COVERAGE_URL is set (in cloud builds)
|
| 379 |
+
if [[ ! -z "${COVERAGE_URL+x}" ]]; then
|
| 380 |
+
REPORT_ARGS="$REPORT_ARGS --coverage-url=${COVERAGE_URL}"
|
| 381 |
+
fi
|
| 382 |
+
|
| 383 |
+
# Do different things depending on languages
|
| 384 |
+
if [ "$FUZZING_LANGUAGE" = "python" ]; then
|
| 385 |
+
echo "GOING python route"
|
| 386 |
+
set -x
|
| 387 |
+
REPORT_ARGS="$REPORT_ARGS --target-dir=$SRC/inspector"
|
| 388 |
+
REPORT_ARGS="$REPORT_ARGS --language=python"
|
| 389 |
+
fuzz-introspector report $REPORT_ARGS
|
| 390 |
+
rsync -avu --delete "$SRC/inspector/" "$OUT/inspector"
|
| 391 |
+
elif [ "$FUZZING_LANGUAGE" = "jvm" ]; then
|
| 392 |
+
echo "GOING jvm route"
|
| 393 |
+
set -x
|
| 394 |
+
find $OUT/ -name "jacoco.xml" -exec cp {} $SRC/inspector/ \;
|
| 395 |
+
REPORT_ARGS="$REPORT_ARGS --target-dir=$SRC --out-dir=$SRC/inspector"
|
| 396 |
+
REPORT_ARGS="$REPORT_ARGS --language=jvm"
|
| 397 |
+
fuzz-introspector full $REPORT_ARGS
|
| 398 |
+
rsync -avu --delete "$SRC/inspector/" "$OUT/inspector"
|
| 399 |
+
elif [ "$FUZZING_LANGUAGE" = "rust" ]; then
|
| 400 |
+
echo "GOING rust route"
|
| 401 |
+
REPORT_ARGS="$REPORT_ARGS --target-dir=$SRC --out-dir=$SRC/inspector"
|
| 402 |
+
REPORT_ARGS="$REPORT_ARGS --language=rust"
|
| 403 |
+
fuzz-introspector full $REPORT_ARGS
|
| 404 |
+
rsync -avu --delete "$SRC/inspector/" "$OUT/inspector"
|
| 405 |
+
else
|
| 406 |
+
# C/C++
|
| 407 |
+
mkdir -p $SRC/inspector
|
| 408 |
+
# Correlate fuzzer binaries to fuzz-introspector's raw data
|
| 409 |
+
fuzz-introspector correlate --binaries-dir=$OUT/
|
| 410 |
+
|
| 411 |
+
# Generate fuzz-introspector HTML report, this generates
|
| 412 |
+
# the file exe_to_fuzz_introspector_logs.yaml
|
| 413 |
+
REPORT_ARGS="$REPORT_ARGS --target-dir=$SRC/inspector"
|
| 414 |
+
# Use the just-generated correlation file
|
| 415 |
+
REPORT_ARGS="$REPORT_ARGS --correlation-file=exe_to_fuzz_introspector_logs.yaml"
|
| 416 |
+
fuzz-introspector report $REPORT_ARGS
|
| 417 |
+
|
| 418 |
+
rsync -avu --delete "$SRC/inspector/" "$OUT/inspector"
|
| 419 |
+
fi
|
| 420 |
+
fi
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder/compile_afl
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2016 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# If LLVM once again does weird changes then enable this:
|
| 19 |
+
#export AFL_LLVM_INSTRUMENT=LLVM-NATIVE
|
| 20 |
+
|
| 21 |
+
# AFL++ setup
|
| 22 |
+
echo "Copying precompiled AFL++"
|
| 23 |
+
|
| 24 |
+
# Copy AFL++ tools necessary for fuzzing.
|
| 25 |
+
pushd $SRC/aflplusplus > /dev/null
|
| 26 |
+
|
| 27 |
+
cp -f libAFLDriver.a $LIB_FUZZING_ENGINE
|
| 28 |
+
|
| 29 |
+
# Some important projects include libraries, copy those even when they don't
|
| 30 |
+
# start with "afl-". Use "sort -u" to avoid a warning about duplicates.
|
| 31 |
+
ls afl-* *.txt *.a *.o *.so | sort -u | xargs cp -t $OUT
|
| 32 |
+
export CC="$SRC/aflplusplus/afl-clang-fast"
|
| 33 |
+
export CXX="$SRC/aflplusplus/afl-clang-fast++"
|
| 34 |
+
|
| 35 |
+
# Set sane AFL++ environment defaults:
|
| 36 |
+
# Be quiet, otherwise this can break some builds.
|
| 37 |
+
export AFL_QUIET=1
|
| 38 |
+
# No leak errors during builds.
|
| 39 |
+
export ASAN_OPTIONS="detect_leaks=0:symbolize=0:detect_odr_violation=0:abort_on_error=1"
|
| 40 |
+
# Do not abort on any problems (because this is during build where it is ok)
|
| 41 |
+
export AFL_IGNORE_PROBLEMS=1
|
| 42 |
+
# No complain on unknown AFL environment variables
|
| 43 |
+
export AFL_IGNORE_UNKNOWN_ENVS=1
|
| 44 |
+
|
| 45 |
+
# Provide a way to document the AFL++ options used in this build:
|
| 46 |
+
echo
|
| 47 |
+
echo AFL++ target compilation setup:
|
| 48 |
+
env | egrep '^AFL_' | tee "$OUT/afl_options.txt"
|
| 49 |
+
echo
|
| 50 |
+
|
| 51 |
+
popd > /dev/null
|
| 52 |
+
|
| 53 |
+
echo " done."
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder/compile_go_fuzzer
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2020 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
path=$1
|
| 19 |
+
function=$2
|
| 20 |
+
fuzzer=$3
|
| 21 |
+
tags="-tags gofuzz"
|
| 22 |
+
if [[ $# -eq 4 ]]; then
|
| 23 |
+
tags="-tags $4"
|
| 24 |
+
fi
|
| 25 |
+
|
| 26 |
+
# makes directory change temporary
|
| 27 |
+
(
|
| 28 |
+
cd $GOPATH/src/$path || true
|
| 29 |
+
# in the case we are in the right directory, with go.mod but no go.sum
|
| 30 |
+
go mod tidy || true
|
| 31 |
+
# project was downloaded with go get if go list fails
|
| 32 |
+
go list $tags $path || { cd $GOPATH/pkg/mod/ && cd `echo $path | cut -d/ -f1-3 | awk '{print $1"@*"}'`; } || cd -
|
| 33 |
+
# project does not have go.mod if go list fails again
|
| 34 |
+
go list $tags $path || { go mod init $path && go mod tidy ;}
|
| 35 |
+
|
| 36 |
+
if [[ $SANITIZER = *coverage* ]]; then
|
| 37 |
+
fuzzed_package=`go list $tags -f '{{.Name}}' $path`
|
| 38 |
+
abspath=`go list $tags -f {{.Dir}} $path`
|
| 39 |
+
cd $abspath
|
| 40 |
+
cp $GOPATH/ossfuzz_coverage_runner.go ./"${function,,}"_test.go
|
| 41 |
+
sed -i -e 's/FuzzFunction/'$function'/' ./"${function,,}"_test.go
|
| 42 |
+
sed -i -e 's/mypackagebeingfuzzed/'$fuzzed_package'/' ./"${function,,}"_test.go
|
| 43 |
+
sed -i -e 's/TestFuzzCorpus/Test'$function'Corpus/' ./"${function,,}"_test.go
|
| 44 |
+
|
| 45 |
+
# The repo is the module path/name, which is already created above in case it doesn't exist,
|
| 46 |
+
# but not always the same as the module path. This is necessary to handle SIV properly.
|
| 47 |
+
fuzzed_repo=$(go list $tags -f {{.Module}} "$path")
|
| 48 |
+
abspath_repo=`go list -m $tags -f {{.Dir}} $fuzzed_repo || go list $tags -f {{.Dir}} $fuzzed_repo`
|
| 49 |
+
# give equivalence to absolute paths in another file, as go test -cover uses golangish pkg.Dir
|
| 50 |
+
echo "s=$fuzzed_repo"="$abspath_repo"= > $OUT/$fuzzer.gocovpath
|
| 51 |
+
# Additional packages for which to get coverage.
|
| 52 |
+
pkgaddcov=""
|
| 53 |
+
# to prevent bash from failing about unbound variable
|
| 54 |
+
GO_COV_ADD_PKG_SET=${GO_COV_ADD_PKG:-}
|
| 55 |
+
if [[ -n "${GO_COV_ADD_PKG_SET}" ]]; then
|
| 56 |
+
pkgaddcov=","$GO_COV_ADD_PKG
|
| 57 |
+
abspath_repo=`go list -m $tags -f {{.Dir}} $GO_COV_ADD_PKG || go list $tags -f {{.Dir}} $GO_COV_ADD_PKG`
|
| 58 |
+
echo "s=^$GO_COV_ADD_PKG"="$abspath_repo"= >> $OUT/$fuzzer.gocovpath
|
| 59 |
+
fi
|
| 60 |
+
go test -run Test${function}Corpus -v $tags -coverpkg $fuzzed_repo/...$pkgaddcov -c -o $OUT/$fuzzer $path
|
| 61 |
+
else
|
| 62 |
+
# Compile and instrument all Go files relevant to this fuzz target.
|
| 63 |
+
echo "Running go-fuzz $tags -func $function -o $fuzzer.a $path"
|
| 64 |
+
go-fuzz $tags -func $function -o $fuzzer.a $path
|
| 65 |
+
|
| 66 |
+
# Link Go code ($fuzzer.a) with fuzzing engine to produce fuzz target binary.
|
| 67 |
+
$CXX $CXXFLAGS $LIB_FUZZING_ENGINE $fuzzer.a -o $OUT/$fuzzer
|
| 68 |
+
fi
|
| 69 |
+
)
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder/detect_repo_test.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Test the functionality of the detect_repo module.
|
| 15 |
+
This will consist of the following functional test:
|
| 16 |
+
1. Determine if an OSS-Fuzz projects main repo can be detected from example
|
| 17 |
+
commits.
|
| 18 |
+
2. Determine if an OSS-Fuzz project main repo can be detected from a
|
| 19 |
+
repo name.
|
| 20 |
+
"""
|
| 21 |
+
import os
|
| 22 |
+
import re
|
| 23 |
+
import sys
|
| 24 |
+
import tempfile
|
| 25 |
+
import unittest
|
| 26 |
+
from unittest import mock
|
| 27 |
+
|
| 28 |
+
import detect_repo
|
| 29 |
+
|
| 30 |
+
# Appending to path for access to repo_manager module.
|
| 31 |
+
# pylint: disable=wrong-import-position
|
| 32 |
+
sys.path.append(
|
| 33 |
+
os.path.dirname(os.path.dirname(os.path.dirname(
|
| 34 |
+
os.path.abspath(__file__)))))
|
| 35 |
+
import repo_manager
|
| 36 |
+
import test_repos
|
| 37 |
+
# pylint: enable=wrong-import-position
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class TestCheckForRepoName(unittest.TestCase):
|
| 41 |
+
"""Tests for check_for_repo_name."""
|
| 42 |
+
|
| 43 |
+
@mock.patch('os.path.exists', return_value=True)
|
| 44 |
+
@mock.patch('detect_repo.execute',
|
| 45 |
+
return_value=('https://github.com/google/syzkaller/', None))
|
| 46 |
+
def test_go_get_style_url(self, _, __):
|
| 47 |
+
"""Tests that check_for_repo_name works on repos that were downloaded using
|
| 48 |
+
go get."""
|
| 49 |
+
self.assertTrue(detect_repo.check_for_repo_name('fake-path', 'syzkaller'))
|
| 50 |
+
|
| 51 |
+
@mock.patch('os.path.exists', return_value=True)
|
| 52 |
+
@mock.patch('detect_repo.execute',
|
| 53 |
+
return_value=('https://github.com/google/syzkaller', None))
|
| 54 |
+
def test_missing_git_and_slash_url(self, _, __):
|
| 55 |
+
"""Tests that check_for_repo_name works on repos who's URLs do not end in
|
| 56 |
+
".git" or "/"."""
|
| 57 |
+
self.assertTrue(detect_repo.check_for_repo_name('fake-path', 'syzkaller'))
|
| 58 |
+
|
| 59 |
+
@mock.patch('os.path.exists', return_value=True)
|
| 60 |
+
@mock.patch('detect_repo.execute',
|
| 61 |
+
return_value=('https://github.com/google/syzkaller.git', None))
|
| 62 |
+
def test_normal_style_repo_url(self, _, __):
|
| 63 |
+
"""Tests that check_for_repo_name works on normally cloned repos."""
|
| 64 |
+
self.assertTrue(detect_repo.check_for_repo_name('fake-path', 'syzkaller'))
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'),
|
| 68 |
+
'INTEGRATION_TESTS=1 not set')
|
| 69 |
+
class DetectRepoIntegrationTest(unittest.TestCase):
|
| 70 |
+
"""Class to test the functionality of the detect_repo module."""
|
| 71 |
+
|
| 72 |
+
def test_infer_main_repo_from_commit(self):
|
| 73 |
+
"""Tests that the main repo can be inferred based on an example commit."""
|
| 74 |
+
|
| 75 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 76 |
+
# Construct example repo's to check for commits.
|
| 77 |
+
for test_repo in test_repos.TEST_REPOS:
|
| 78 |
+
repo_manager.clone_repo_and_get_manager(test_repo.git_url, tmp_dir)
|
| 79 |
+
self.check_with_repo(test_repo.git_url,
|
| 80 |
+
test_repo.git_repo_name,
|
| 81 |
+
tmp_dir,
|
| 82 |
+
commit=test_repo.old_commit)
|
| 83 |
+
|
| 84 |
+
def test_infer_main_repo_from_name(self):
|
| 85 |
+
"""Tests that the main project repo can be inferred from a repo name."""
|
| 86 |
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
| 87 |
+
for test_repo in test_repos.TEST_REPOS:
|
| 88 |
+
repo_manager.clone_repo_and_get_manager(test_repo.git_url, tmp_dir)
|
| 89 |
+
self.check_with_repo(test_repo.git_url, test_repo.git_repo_name,
|
| 90 |
+
tmp_dir)
|
| 91 |
+
|
| 92 |
+
def check_with_repo(self, repo_origin, repo_name, tmp_dir, commit=None):
|
| 93 |
+
"""Checks the detect repo's main method for a specific set of inputs.
|
| 94 |
+
|
| 95 |
+
Args:
|
| 96 |
+
repo_origin: URL of the git repo.
|
| 97 |
+
repo_name: The name of the directory it is cloned to.
|
| 98 |
+
tmp_dir: The location of the directory of git repos to be searched.
|
| 99 |
+
commit: The commit that should be used to look up the repo.
|
| 100 |
+
"""
|
| 101 |
+
command = ['python3', 'detect_repo.py', '--src_dir', tmp_dir]
|
| 102 |
+
|
| 103 |
+
if commit:
|
| 104 |
+
command += ['--example_commit', commit]
|
| 105 |
+
else:
|
| 106 |
+
command += ['--repo_name', repo_name]
|
| 107 |
+
|
| 108 |
+
out, _ = detect_repo.execute(command,
|
| 109 |
+
location=os.path.dirname(
|
| 110 |
+
os.path.realpath(__file__)))
|
| 111 |
+
match = re.search(r'\bDetected repo: ([^ ]+) ([^ ]+)', out.rstrip())
|
| 112 |
+
if match and match.group(1) and match.group(2):
|
| 113 |
+
self.assertEqual(match.group(1), repo_origin)
|
| 114 |
+
self.assertEqual(match.group(2), os.path.join(tmp_dir, repo_name))
|
| 115 |
+
else:
|
| 116 |
+
self.assertIsNone(repo_origin)
|
| 117 |
+
self.assertIsNone(repo_name)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
if __name__ == '__main__':
|
| 121 |
+
unittest.main()
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder/install_go.sh
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
cd /tmp
|
| 19 |
+
|
| 20 |
+
wget https://go.dev/dl/go1.23.4.linux-amd64.tar.gz
|
| 21 |
+
mkdir temp-go
|
| 22 |
+
tar -C temp-go/ -xzf go1.23.4.linux-amd64.tar.gz
|
| 23 |
+
|
| 24 |
+
mkdir /root/.go/
|
| 25 |
+
mv temp-go/go/* /root/.go/
|
| 26 |
+
rm -rf temp-go
|
| 27 |
+
|
| 28 |
+
echo 'Set "GOPATH=/root/go"'
|
| 29 |
+
echo 'Set "PATH=$PATH:/root/.go/bin:$GOPATH/bin"'
|
| 30 |
+
|
| 31 |
+
go install github.com/mdempsky/go114-fuzz-build@latest
|
| 32 |
+
ln -s $GOPATH/bin/go114-fuzz-build $GOPATH/bin/go-fuzz
|
| 33 |
+
|
| 34 |
+
# Build signal handler
|
| 35 |
+
if [ -f "$GOPATH/gosigfuzz/gosigfuzz.c" ]; then
|
| 36 |
+
clang -c $GOPATH/gosigfuzz/gosigfuzz.c -o $GOPATH/gosigfuzz/gosigfuzz.o
|
| 37 |
+
fi
|
| 38 |
+
|
| 39 |
+
cd /tmp
|
| 40 |
+
git clone https://github.com/AdamKorcz/go-118-fuzz-build
|
| 41 |
+
cd go-118-fuzz-build
|
| 42 |
+
go build
|
| 43 |
+
mv go-118-fuzz-build $GOPATH/bin/
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-builder/install_java.sh
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Install OpenJDK 17 and trim its size by removing unused components. This enables using Jazzer's mutation framework.
|
| 19 |
+
cd /tmp
|
| 20 |
+
curl --silent -L -O https://download.java.net/java/GA/jdk17.0.2/dfd4a8d0985749f896bed50d7138ee7f/8/GPL/openjdk-17.0.2_linux-x64_bin.tar.gz && \
|
| 21 |
+
mkdir -p $JAVA_HOME
|
| 22 |
+
tar -xz --strip-components=1 -f openjdk-17.0.2_linux-x64_bin.tar.gz --directory $JAVA_HOME && \
|
| 23 |
+
rm -f openjdk-17.0.2_linux-x64_bin.tar.gz
|
| 24 |
+
rm -rf $JAVA_HOME/jmods $JAVA_HOME/lib/src.zip
|
| 25 |
+
|
| 26 |
+
# Install OpenJDK 15 and trim its size by removing unused components. Some projects only run with Java 15.
|
| 27 |
+
curl --silent -L -O https://download.java.net/java/GA/jdk15.0.2/0d1cfde4252546c6931946de8db48ee2/7/GPL/openjdk-15.0.2_linux-x64_bin.tar.gz && \
|
| 28 |
+
mkdir -p $JAVA_15_HOME
|
| 29 |
+
tar -xz --strip-components=1 -f openjdk-15.0.2_linux-x64_bin.tar.gz --directory $JAVA_15_HOME && \
|
| 30 |
+
rm -f openjdk-15.0.2_linux-x64_bin.tar.gz
|
| 31 |
+
rm -rf $JAVA_15_HOME/jmods $JAVA_15_HOME/lib/src.zip
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner-debug/Dockerfile
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2016 Google Inc.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
|
| 17 |
+
ARG IMG_TAG=latest
|
| 18 |
+
FROM ghcr.io/aixcc-finals/base-runner:${IMG_TAG}
|
| 19 |
+
RUN apt-get update && apt-get install -y valgrind zip
|
| 20 |
+
|
| 21 |
+
# Installing GDB 12, re https://github.com/google/oss-fuzz/issues/7513.
|
| 22 |
+
RUN apt-get install -y build-essential libgmp-dev && \
|
| 23 |
+
wget https://ftp.gnu.org/gnu/gdb/gdb-12.1.tar.xz && \
|
| 24 |
+
tar -xf gdb-12.1.tar.xz && cd gdb-12.1 && ./configure && \
|
| 25 |
+
make -j $(expr $(nproc) / 2) && make install && cd .. && \
|
| 26 |
+
rm -rf gdb-12.1* && apt-get remove --purge -y build-essential libgmp-dev
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/README.md
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# base-runner
|
| 2 |
+
> Base image for fuzzer runners.
|
| 3 |
+
|
| 4 |
+
```bash
|
| 5 |
+
docker run -ti ghcr.io/aixcc-finals/base-runner <command> <args>
|
| 6 |
+
```
|
| 7 |
+
|
| 8 |
+
## Commands
|
| 9 |
+
|
| 10 |
+
| Command | Description |
|
| 11 |
+
|---------|-------------|
|
| 12 |
+
| `reproduce <fuzzer_name> <fuzzer_options>` | build all fuzz targets and run specified one with testcase `/testcase` and given options.
|
| 13 |
+
| `run_fuzzer <fuzzer_name> <fuzzer_options>` | runs specified fuzzer combining options with `.options` file |
|
| 14 |
+
| `test_all.py` | runs every binary in `/out` as a fuzzer for a while to ensure it works. |
|
| 15 |
+
| `coverage <fuzzer_name>` | generate a coverage report for the given fuzzer. |
|
| 16 |
+
|
| 17 |
+
# Examples
|
| 18 |
+
|
| 19 |
+
- *Reproduce using latest OSS-Fuzz build:*
|
| 20 |
+
|
| 21 |
+
<pre>
|
| 22 |
+
docker run --rm -ti -v <b><i><testcase_path></i></b>:/testcase gcr.io/oss-fuzz/<b><i>$PROJECT_NAME</i></b> reproduce <b><i><fuzzer_name></i></b>
|
| 23 |
+
</pre>
|
| 24 |
+
|
| 25 |
+
- *Reproduce using local source checkout:*
|
| 26 |
+
|
| 27 |
+
<pre>
|
| 28 |
+
docker run --rm -ti -v <b><i><source_path></i></b>:/src/<b><i>$PROJECT_NAME</i></b> \
|
| 29 |
+
-v <b><i><testcase_path></i></b>:/testcase gcr.io/oss-fuzz/<b><i>$PROJECT_NAME</i></b> \
|
| 30 |
+
reproduce <b><i><fuzzer_name></i></b>
|
| 31 |
+
</pre>
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/coverage
ADDED
|
@@ -0,0 +1,549 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -u
|
| 2 |
+
# Copyright 2018 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
cd $OUT
|
| 18 |
+
|
| 19 |
+
if (( $# > 0 )); then
|
| 20 |
+
FUZZ_TARGETS="$@"
|
| 21 |
+
else
|
| 22 |
+
FUZZ_TARGETS="$(find . -maxdepth 1 -type f -executable -printf '%P\n' | \
|
| 23 |
+
grep -v -x -F \
|
| 24 |
+
-e 'llvm-symbolizer' \
|
| 25 |
+
-e 'jazzer_agent_deploy.jar' \
|
| 26 |
+
-e 'jazzer_driver' \
|
| 27 |
+
-e 'jazzer_driver_with_sanitizer' \
|
| 28 |
+
-e 'sanitizer_with_fuzzer.so')"
|
| 29 |
+
fi
|
| 30 |
+
|
| 31 |
+
COVERAGE_OUTPUT_DIR=${COVERAGE_OUTPUT_DIR:-$OUT}
|
| 32 |
+
|
| 33 |
+
DUMPS_DIR="$COVERAGE_OUTPUT_DIR/dumps"
|
| 34 |
+
FUZZERS_COVERAGE_DUMPS_DIR="$DUMPS_DIR/fuzzers_coverage"
|
| 35 |
+
MERGED_COVERAGE_DIR="$COVERAGE_OUTPUT_DIR/merged_coverage"
|
| 36 |
+
FUZZER_STATS_DIR="$COVERAGE_OUTPUT_DIR/fuzzer_stats"
|
| 37 |
+
TEXTCOV_REPORT_DIR="$COVERAGE_OUTPUT_DIR/textcov_reports"
|
| 38 |
+
LOGS_DIR="$COVERAGE_OUTPUT_DIR/logs"
|
| 39 |
+
REPORT_ROOT_DIR="$COVERAGE_OUTPUT_DIR/report"
|
| 40 |
+
REPORT_BY_TARGET_ROOT_DIR="$COVERAGE_OUTPUT_DIR/report_target"
|
| 41 |
+
PLATFORM=linux
|
| 42 |
+
REPORT_PLATFORM_DIR="$COVERAGE_OUTPUT_DIR/report/$PLATFORM"
|
| 43 |
+
|
| 44 |
+
for directory in $DUMPS_DIR $FUZZER_STATS_DIR $LOGS_DIR $REPORT_ROOT_DIR $TEXTCOV_REPORT_DIR\
|
| 45 |
+
$REPORT_PLATFORM_DIR $REPORT_BY_TARGET_ROOT_DIR $FUZZERS_COVERAGE_DUMPS_DIR $MERGED_COVERAGE_DIR; do
|
| 46 |
+
rm -rf $directory
|
| 47 |
+
mkdir -p $directory
|
| 48 |
+
done
|
| 49 |
+
|
| 50 |
+
PROFILE_FILE="$DUMPS_DIR/merged.profdata"
|
| 51 |
+
SUMMARY_FILE="$REPORT_PLATFORM_DIR/summary.json"
|
| 52 |
+
COVERAGE_TARGET_FILE="$FUZZER_STATS_DIR/coverage_targets.txt"
|
| 53 |
+
|
| 54 |
+
# Use path mapping, as $SRC directory from the builder is copied into $OUT/$SRC.
|
| 55 |
+
PATH_EQUIVALENCE_ARGS="-path-equivalence=/,$OUT"
|
| 56 |
+
|
| 57 |
+
# It's important to use $COVERAGE_EXTRA_ARGS as the last argument, because it
|
| 58 |
+
# can contain paths to source files / directories which are positional args.
|
| 59 |
+
LLVM_COV_COMMON_ARGS="$PATH_EQUIVALENCE_ARGS \
|
| 60 |
+
-ignore-filename-regex=.*src/libfuzzer/.* $COVERAGE_EXTRA_ARGS"
|
| 61 |
+
|
| 62 |
+
# Options to extract branch coverage.
|
| 63 |
+
BRANCH_COV_ARGS="--show-branches=count --show-expansions"
|
| 64 |
+
|
| 65 |
+
# Timeout for running a single fuzz target.
|
| 66 |
+
TIMEOUT=1h
|
| 67 |
+
|
| 68 |
+
# This will be used by llvm-cov command to generate the actual report.
|
| 69 |
+
objects=""
|
| 70 |
+
|
| 71 |
+
# Number of CPUs available, this is needed for running tests in parallel.
|
| 72 |
+
# Set the max number of parallel jobs to be the CPU count and a max of 10.
|
| 73 |
+
NPROC=$(nproc)
|
| 74 |
+
MAX_PARALLEL_COUNT=10
|
| 75 |
+
|
| 76 |
+
CORPUS_DIR=${CORPUS_DIR:-"/corpus"}
|
| 77 |
+
|
| 78 |
+
function run_fuzz_target {
|
| 79 |
+
local target=$1
|
| 80 |
+
|
| 81 |
+
# '%1m' will produce separate dump files for every object. For example, if a
|
| 82 |
+
# fuzz target loads a shared library, we will have dumps for both of them.
|
| 83 |
+
local profraw_file="$DUMPS_DIR/$target.%1m.profraw"
|
| 84 |
+
local profraw_file_mask="$DUMPS_DIR/$target.*.profraw"
|
| 85 |
+
local profdata_file="$DUMPS_DIR/$target.profdata"
|
| 86 |
+
local corpus_real="$CORPUS_DIR/${target}"
|
| 87 |
+
|
| 88 |
+
# -merge=1 requires an output directory, create a new, empty dir for that.
|
| 89 |
+
local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}"
|
| 90 |
+
rm -rf $corpus_dummy && mkdir -p $corpus_dummy
|
| 91 |
+
|
| 92 |
+
# Use -merge=1 instead of -runs=0 because merge is crash resistant and would
|
| 93 |
+
# let to get coverage using all corpus files even if there are crash inputs.
|
| 94 |
+
# Merge should not introduce any significant overhead compared to -runs=0,
|
| 95 |
+
# because (A) corpuses are already minimized; (B) we do not use sancov, and so
|
| 96 |
+
# libFuzzer always finishes merge with an empty output dir.
|
| 97 |
+
# Use 100s timeout instead of 25s as code coverage builds can be very slow.
|
| 98 |
+
local args="-merge=1 -timeout=100 $corpus_dummy $corpus_real"
|
| 99 |
+
|
| 100 |
+
export LLVM_PROFILE_FILE=$profraw_file
|
| 101 |
+
timeout $TIMEOUT $OUT/$target $args &> $LOGS_DIR/$target.log
|
| 102 |
+
if (( $? != 0 )); then
|
| 103 |
+
echo "Error occured while running $target:"
|
| 104 |
+
cat $LOGS_DIR/$target.log
|
| 105 |
+
fi
|
| 106 |
+
|
| 107 |
+
rm -rf $corpus_dummy
|
| 108 |
+
if (( $(du -c $profraw_file_mask | tail -n 1 | cut -f 1) == 0 )); then
|
| 109 |
+
# Skip fuzz targets that failed to produce profile dumps.
|
| 110 |
+
return 0
|
| 111 |
+
fi
|
| 112 |
+
|
| 113 |
+
# If necessary translate to latest profraw version.
|
| 114 |
+
if [[ $target == *"@"* ]]; then
|
| 115 |
+
# Extract fuzztest binary name from fuzztest wrapper script.
|
| 116 |
+
target=(${target//@/ }[0])
|
| 117 |
+
fi
|
| 118 |
+
profraw_update.py $OUT/$target -i $profraw_file_mask
|
| 119 |
+
llvm-profdata merge -j=1 -sparse $profraw_file_mask -o $profdata_file
|
| 120 |
+
|
| 121 |
+
# Delete unnecessary and (potentially) large .profraw files.
|
| 122 |
+
rm $profraw_file_mask
|
| 123 |
+
|
| 124 |
+
shared_libraries=$(coverage_helper shared_libs -build-dir=$OUT -object=$target)
|
| 125 |
+
|
| 126 |
+
llvm-cov export -summary-only -instr-profile=$profdata_file -object=$target \
|
| 127 |
+
$shared_libraries $LLVM_COV_COMMON_ARGS > $FUZZER_STATS_DIR/$target.json
|
| 128 |
+
|
| 129 |
+
# For introspector.
|
| 130 |
+
llvm-cov show -instr-profile=$profdata_file -object=$target -line-coverage-gt=0 $shared_libraries $BRANCH_COV_ARGS $LLVM_COV_COMMON_ARGS > ${TEXTCOV_REPORT_DIR}/$target.covreport
|
| 131 |
+
}
|
| 132 |
+
|
| 133 |
+
function run_go_fuzz_target {
|
| 134 |
+
local target=$1
|
| 135 |
+
|
| 136 |
+
echo "Running go target $target"
|
| 137 |
+
export FUZZ_CORPUS_DIR="$CORPUS_DIR/${target}/"
|
| 138 |
+
export FUZZ_PROFILE_NAME="$DUMPS_DIR/$target.perf"
|
| 139 |
+
|
| 140 |
+
# setup for native go fuzzers
|
| 141 |
+
cd $OUT
|
| 142 |
+
mkdir -p "testdata/fuzz/${target}"
|
| 143 |
+
cp -r "${FUZZ_CORPUS_DIR}" "testdata/fuzz/"
|
| 144 |
+
|
| 145 |
+
# rewrite libFuzzer corpus to Std Go corpus if native fuzzing
|
| 146 |
+
grep "TestFuzzCorpus" $target > /dev/null 2>&1 && $SYSGOPATH/bin/convertcorpus $target "testdata/fuzz/${target}"
|
| 147 |
+
cd -
|
| 148 |
+
|
| 149 |
+
timeout $TIMEOUT $OUT/$target -test.coverprofile $DUMPS_DIR/$target.profdata &> $LOGS_DIR/$target.log
|
| 150 |
+
if (( $? != 0 )); then
|
| 151 |
+
echo "Error occured while running $target:"
|
| 152 |
+
cat $LOGS_DIR/$target.log
|
| 153 |
+
fi
|
| 154 |
+
|
| 155 |
+
# cleanup after native go fuzzers
|
| 156 |
+
rm -r "${OUT}/testdata/fuzz/${target}"
|
| 157 |
+
|
| 158 |
+
# The Go 1.18 fuzzers are renamed to "*_fuzz_.go" during "infra/helper.py build_fuzzers".
|
| 159 |
+
# They are are therefore refered to as "*_fuzz_.go" in the profdata files.
|
| 160 |
+
# Since the copies named "*_fuzz_.go" do not exist in the file tree during
|
| 161 |
+
# the coverage build, we change the references in the .profdata files
|
| 162 |
+
# to the original file names.
|
| 163 |
+
#sed -i "s/_test.go_fuzz_.go/_test.go/g" $DUMPS_DIR/$target.profdata
|
| 164 |
+
# translate from golangish paths to current absolute paths
|
| 165 |
+
cat $OUT/$target.gocovpath | while read i; do sed -i $i $DUMPS_DIR/$target.profdata; done
|
| 166 |
+
# cf PATH_EQUIVALENCE_ARGS
|
| 167 |
+
sed -i 's=/='$OUT'/=' $DUMPS_DIR/$target.profdata
|
| 168 |
+
$SYSGOPATH/bin/gocovsum $DUMPS_DIR/$target.profdata > $FUZZER_STATS_DIR/$target.json
|
| 169 |
+
}
|
| 170 |
+
|
| 171 |
+
function run_python_fuzz_target {
|
| 172 |
+
local target=$1
|
| 173 |
+
local zipped_sources="$DUMPS_DIR/$target.deps.zip"
|
| 174 |
+
local corpus_real="$CORPUS_DIR/${target}"
|
| 175 |
+
# Write dummy stats file
|
| 176 |
+
echo "{}" > "$FUZZER_STATS_DIR/$target.json"
|
| 177 |
+
|
| 178 |
+
# Run fuzzer
|
| 179 |
+
$OUT/$target $corpus_real -atheris_runs=$(ls -la $corpus_real | wc -l) > $LOGS_DIR/$target.log 2>&1
|
| 180 |
+
if (( $? != 0 )); then
|
| 181 |
+
echo "Error happened getting coverage of $target"
|
| 182 |
+
echo "This is likely because Atheris did not exit gracefully"
|
| 183 |
+
cat $LOGS_DIR/$target.log
|
| 184 |
+
return 0
|
| 185 |
+
fi
|
| 186 |
+
mv .coverage $OUT/.coverage_$target
|
| 187 |
+
}
|
| 188 |
+
|
| 189 |
+
function run_java_fuzz_target {
|
| 190 |
+
local target=$1
|
| 191 |
+
|
| 192 |
+
local exec_file="$DUMPS_DIR/$target.exec"
|
| 193 |
+
local class_dump_dir="$DUMPS_DIR/${target}_classes/"
|
| 194 |
+
mkdir "$class_dump_dir"
|
| 195 |
+
local corpus_real="$CORPUS_DIR/${target}"
|
| 196 |
+
|
| 197 |
+
# -merge=1 requires an output directory, create a new, empty dir for that.
|
| 198 |
+
local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}"
|
| 199 |
+
rm -rf $corpus_dummy && mkdir -p $corpus_dummy
|
| 200 |
+
|
| 201 |
+
# Use 100s timeout instead of 25s as code coverage builds can be very slow.
|
| 202 |
+
local jacoco_args="destfile=$exec_file,classdumpdir=$class_dump_dir,excludes=com.code_intelligence.jazzer.*\\:com.sun.tools.attach.VirtualMachine"
|
| 203 |
+
local args="-merge=1 -timeout=100 --nohooks \
|
| 204 |
+
--additional_jvm_args=-javaagent\\:/opt/jacoco-agent.jar=$jacoco_args \
|
| 205 |
+
$corpus_dummy $corpus_real"
|
| 206 |
+
|
| 207 |
+
timeout $TIMEOUT $OUT/$target $args &> $LOGS_DIR/$target.log
|
| 208 |
+
if (( $? != 0 )); then
|
| 209 |
+
echo "Error occured while running $target:"
|
| 210 |
+
cat $LOGS_DIR/$target.log
|
| 211 |
+
fi
|
| 212 |
+
|
| 213 |
+
if (( $(du -c $exec_file | tail -n 1 | cut -f 1) == 0 )); then
|
| 214 |
+
# Skip fuzz targets that failed to produce .exec files.
|
| 215 |
+
echo "$target failed to produce .exec file."
|
| 216 |
+
return 0
|
| 217 |
+
fi
|
| 218 |
+
|
| 219 |
+
# Generate XML report only as input to jacoco_report_converter.
|
| 220 |
+
# Source files are not needed for the summary.
|
| 221 |
+
local xml_report="$DUMPS_DIR/${target}.xml"
|
| 222 |
+
local summary_file="$FUZZER_STATS_DIR/$target.json"
|
| 223 |
+
java -jar /opt/jacoco-cli.jar report $exec_file \
|
| 224 |
+
--xml $xml_report \
|
| 225 |
+
--classfiles $class_dump_dir
|
| 226 |
+
|
| 227 |
+
# Write llvm-cov summary file.
|
| 228 |
+
jacoco_report_converter.py $xml_report $summary_file
|
| 229 |
+
}
|
| 230 |
+
|
| 231 |
+
function run_javascript_fuzz_target {
|
| 232 |
+
local target=$1
|
| 233 |
+
local corpus_real="$CORPUS_DIR/${target}"
|
| 234 |
+
|
| 235 |
+
# -merge=1 requires an output directory, create a new, empty dir for that.
|
| 236 |
+
local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}"
|
| 237 |
+
rm -rf $corpus_dummy && mkdir -p $corpus_dummy
|
| 238 |
+
|
| 239 |
+
# IstanbulJS currently does not work when the tested program creates
|
| 240 |
+
# subprocesses. For this reason, we first minimize the corpus removing
|
| 241 |
+
# any crashing inputs so that we can report source-based code coverage
|
| 242 |
+
# with a single sweep over the minimized corpus
|
| 243 |
+
local merge_args="-merge=1 -timeout=100 $corpus_dummy $corpus_real"
|
| 244 |
+
timeout $TIMEOUT $OUT/$target $merge_args &> $LOGS_DIR/$target.log
|
| 245 |
+
|
| 246 |
+
# nyc saves the coverage reports in a directory with the default name "coverage"
|
| 247 |
+
local coverage_dir="$DUMPS_DIR/coverage_dir_for_${target}"
|
| 248 |
+
rm -rf $coverage_dir && mkdir -p $coverage_dir
|
| 249 |
+
|
| 250 |
+
local nyc_json_coverage_file="$coverage_dir/coverage-final.json"
|
| 251 |
+
local nyc_json_summary_file="$coverage_dir/coverage-summary.json"
|
| 252 |
+
|
| 253 |
+
local args="-runs=0 $corpus_dummy"
|
| 254 |
+
local jazzerjs_args="--coverage --coverageDirectory $coverage_dir --coverageReporters json --coverageReporters json-summary"
|
| 255 |
+
|
| 256 |
+
JAZZERJS_EXTRA_ARGS=$jazzerjs_args $OUT/$target $args &> $LOGS_DIR/$target.log
|
| 257 |
+
|
| 258 |
+
if (( $? != 0 )); then
|
| 259 |
+
echo "Error occured while running $target:"
|
| 260 |
+
cat $LOGS_DIR/$target.log
|
| 261 |
+
fi
|
| 262 |
+
|
| 263 |
+
if [ ! -s $nyc_json_coverage_file ]; then
|
| 264 |
+
# Skip fuzz targets that failed to produce coverage-final.json file.
|
| 265 |
+
echo "$target failed to produce coverage-final.json file."
|
| 266 |
+
return 0
|
| 267 |
+
fi
|
| 268 |
+
|
| 269 |
+
cp $nyc_json_coverage_file $FUZZERS_COVERAGE_DUMPS_DIR/$target.json
|
| 270 |
+
|
| 271 |
+
local summary_file="$FUZZER_STATS_DIR/$target.json"
|
| 272 |
+
|
| 273 |
+
nyc_report_converter.py $nyc_json_summary_file $summary_file
|
| 274 |
+
}
|
| 275 |
+
|
| 276 |
+
function generate_html {
|
| 277 |
+
local profdata=$1
|
| 278 |
+
local shared_libraries=$2
|
| 279 |
+
local objects=$3
|
| 280 |
+
local output_dir=$4
|
| 281 |
+
|
| 282 |
+
rm -rf "$output_dir"
|
| 283 |
+
mkdir -p "$output_dir/$PLATFORM"
|
| 284 |
+
|
| 285 |
+
local llvm_cov_args="-instr-profile=$profdata $objects $LLVM_COV_COMMON_ARGS"
|
| 286 |
+
llvm-cov show -format=html -output-dir=$output_dir -Xdemangler rcfilt $llvm_cov_args
|
| 287 |
+
|
| 288 |
+
# Export coverage summary in JSON format.
|
| 289 |
+
local summary_file=$output_dir/$PLATFORM/summary.json
|
| 290 |
+
|
| 291 |
+
llvm-cov export -summary-only $llvm_cov_args > $summary_file
|
| 292 |
+
|
| 293 |
+
coverage_helper -v post_process -src-root-dir=/ -summary-file=$summary_file \
|
| 294 |
+
-output-dir=$output_dir $PATH_EQUIVALENCE_ARGS
|
| 295 |
+
}
|
| 296 |
+
|
| 297 |
+
export SYSGOPATH=$GOPATH
|
| 298 |
+
export GOPATH=$OUT/$GOPATH
|
| 299 |
+
# Run each fuzz target, generate raw coverage dumps.
|
| 300 |
+
for fuzz_target in $FUZZ_TARGETS; do
|
| 301 |
+
# Test if fuzz target is a golang one.
|
| 302 |
+
if [[ $FUZZING_LANGUAGE == "go" ]]; then
|
| 303 |
+
# Continue if not a fuzz target.
|
| 304 |
+
if [[ $FUZZING_ENGINE != "none" ]]; then
|
| 305 |
+
grep "FUZZ_CORPUS_DIR" $fuzz_target > /dev/null 2>&1 || grep "testing\.T" $fuzz_target > /dev/null 2>&1 || continue
|
| 306 |
+
fi
|
| 307 |
+
# Log the target in the targets file.
|
| 308 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 309 |
+
|
| 310 |
+
# Run the coverage collection.
|
| 311 |
+
run_go_fuzz_target $fuzz_target &
|
| 312 |
+
elif [[ $FUZZING_LANGUAGE == "python" ]]; then
|
| 313 |
+
echo "Entering python fuzzing"
|
| 314 |
+
# Log the target in the targets file.
|
| 315 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 316 |
+
|
| 317 |
+
# Run the coverage collection.
|
| 318 |
+
run_python_fuzz_target $fuzz_target
|
| 319 |
+
elif [[ $FUZZING_LANGUAGE == "jvm" ]]; then
|
| 320 |
+
# Continue if not a fuzz target.
|
| 321 |
+
if [[ $FUZZING_ENGINE != "none" ]]; then
|
| 322 |
+
grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue
|
| 323 |
+
fi
|
| 324 |
+
|
| 325 |
+
echo "Running $fuzz_target"
|
| 326 |
+
# Log the target in the targets file.
|
| 327 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 328 |
+
|
| 329 |
+
# Run the coverage collection.
|
| 330 |
+
run_java_fuzz_target $fuzz_target &
|
| 331 |
+
elif [[ $FUZZING_LANGUAGE == "javascript" ]]; then
|
| 332 |
+
# Continue if not a fuzz target.
|
| 333 |
+
if [[ $FUZZING_ENGINE != "none" ]]; then
|
| 334 |
+
grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue
|
| 335 |
+
fi
|
| 336 |
+
|
| 337 |
+
echo "Running $fuzz_target"
|
| 338 |
+
# Log the target in the targets file.
|
| 339 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 340 |
+
|
| 341 |
+
# Run the coverage collection.
|
| 342 |
+
run_javascript_fuzz_target $fuzz_target &
|
| 343 |
+
else
|
| 344 |
+
# Continue if not a fuzz target.
|
| 345 |
+
if [[ $FUZZING_ENGINE != "none" ]]; then
|
| 346 |
+
grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue
|
| 347 |
+
fi
|
| 348 |
+
|
| 349 |
+
echo "Running $fuzz_target"
|
| 350 |
+
# Log the target in the targets file.
|
| 351 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 352 |
+
|
| 353 |
+
# Run the coverage collection.
|
| 354 |
+
run_fuzz_target $fuzz_target &
|
| 355 |
+
|
| 356 |
+
# Rewrite object if its a FUZZTEST target
|
| 357 |
+
if [[ $fuzz_target == *"@"* ]]; then
|
| 358 |
+
# Extract fuzztest binary name from fuzztest wrapper script.
|
| 359 |
+
fuzz_target=(${fuzz_target//@/ }[0])
|
| 360 |
+
fi
|
| 361 |
+
if [[ -z $objects ]]; then
|
| 362 |
+
# The first object needs to be passed without -object= flag.
|
| 363 |
+
objects="$fuzz_target"
|
| 364 |
+
else
|
| 365 |
+
objects="$objects -object=$fuzz_target"
|
| 366 |
+
fi
|
| 367 |
+
fi
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
# Limit the number of processes to be spawned.
|
| 371 |
+
n_child_proc=$(jobs -rp | wc -l)
|
| 372 |
+
while [[ "$n_child_proc" -eq "$NPROC" || "$n_child_proc" -gt "$MAX_PARALLEL_COUNT" ]]; do
|
| 373 |
+
sleep 4
|
| 374 |
+
n_child_proc=$(jobs -rp | wc -l)
|
| 375 |
+
done
|
| 376 |
+
done
|
| 377 |
+
|
| 378 |
+
# Wait for background processes to finish.
|
| 379 |
+
wait
|
| 380 |
+
|
| 381 |
+
if [[ $FUZZING_LANGUAGE == "go" ]]; then
|
| 382 |
+
echo $DUMPS_DIR
|
| 383 |
+
$SYSGOPATH/bin/gocovmerge $DUMPS_DIR/*.profdata > fuzz.cov
|
| 384 |
+
gotoolcover -html=fuzz.cov -o $REPORT_ROOT_DIR/index.html
|
| 385 |
+
$SYSGOPATH/bin/gocovsum fuzz.cov > $SUMMARY_FILE
|
| 386 |
+
cp $REPORT_ROOT_DIR/index.html $REPORT_PLATFORM_DIR/index.html
|
| 387 |
+
$SYSGOPATH/bin/pprof-merge $DUMPS_DIR/*.perf.cpu.prof
|
| 388 |
+
mv merged.data $REPORT_ROOT_DIR/cpu.prof
|
| 389 |
+
$SYSGOPATH/bin/pprof-merge $DUMPS_DIR/*.perf.heap.prof
|
| 390 |
+
mv merged.data $REPORT_ROOT_DIR/heap.prof
|
| 391 |
+
#TODO some proxy for go tool pprof -http=127.0.0.1:8001 $DUMPS_DIR/cpu.prof
|
| 392 |
+
echo "Finished generating code coverage report for Go fuzz targets."
|
| 393 |
+
elif [[ $FUZZING_LANGUAGE == "python" ]]; then
|
| 394 |
+
# Extract source files from all dependency zip folders
|
| 395 |
+
mkdir -p /pythoncovmergedfiles/medio
|
| 396 |
+
PYCOVDIR=/pycovdir/
|
| 397 |
+
mkdir $PYCOVDIR
|
| 398 |
+
for fuzzer in $FUZZ_TARGETS; do
|
| 399 |
+
fuzzer_deps=${fuzzer}.pkg.deps.zip
|
| 400 |
+
unzip $OUT/${fuzzer_deps}
|
| 401 |
+
rsync -r ./medio /pythoncovmergedfiles/medio
|
| 402 |
+
rm -rf ./medio
|
| 403 |
+
|
| 404 |
+
# Translate paths in unzipped folders to paths that we can use
|
| 405 |
+
mv $OUT/.coverage_$fuzzer .coverage
|
| 406 |
+
python3 /usr/local/bin/python_coverage_runner_help.py translate /pythoncovmergedfiles/medio
|
| 407 |
+
cp .new_coverage $PYCOVDIR/.coverage_$fuzzer
|
| 408 |
+
cp .new_coverage $OUT/coverage_d_$fuzzer
|
| 409 |
+
done
|
| 410 |
+
|
| 411 |
+
# Combine coverage
|
| 412 |
+
cd $PYCOVDIR
|
| 413 |
+
python3 /usr/local/bin/python_coverage_runner_help.py combine .coverage_*
|
| 414 |
+
python3 /usr/local/bin/python_coverage_runner_help.py html
|
| 415 |
+
# Produce all_cov file used by fuzz introspector.
|
| 416 |
+
python3 /usr/local/bin/python_coverage_runner_help.py json -o ${TEXTCOV_REPORT_DIR}/all_cov.json
|
| 417 |
+
|
| 418 |
+
# Generate .json with similar format to llvm-cov output.
|
| 419 |
+
python3 /usr/local/bin/python_coverage_runner_help.py \
|
| 420 |
+
convert-to-summary-json ${TEXTCOV_REPORT_DIR}/all_cov.json $SUMMARY_FILE
|
| 421 |
+
|
| 422 |
+
# Copy coverage date out
|
| 423 |
+
cp htmlcov/status.json ${TEXTCOV_REPORT_DIR}/html_status.json
|
| 424 |
+
|
| 425 |
+
mv htmlcov/* $REPORT_PLATFORM_DIR/
|
| 426 |
+
mv .coverage_* $REPORT_PLATFORM_DIR/
|
| 427 |
+
elif [[ $FUZZING_LANGUAGE == "jvm" ]]; then
|
| 428 |
+
|
| 429 |
+
# From this point on the script does not tolerate any errors.
|
| 430 |
+
set -e
|
| 431 |
+
|
| 432 |
+
# Merge .exec files from the individual targets.
|
| 433 |
+
jacoco_merged_exec=$DUMPS_DIR/jacoco.merged.exec
|
| 434 |
+
java -jar /opt/jacoco-cli.jar merge $DUMPS_DIR/*.exec \
|
| 435 |
+
--destfile $jacoco_merged_exec
|
| 436 |
+
|
| 437 |
+
# Prepare classes directory for jacoco process
|
| 438 |
+
classes_dir=$DUMPS_DIR/classes
|
| 439 |
+
mkdir $classes_dir
|
| 440 |
+
|
| 441 |
+
# Only copy class files found in $OUT/$SRC to ensure they are
|
| 442 |
+
# lively compiled from the project, avoiding inclusion of
|
| 443 |
+
# dependency classes. This also includes the fuzzer classes.
|
| 444 |
+
find "$OUT/$SRC" -type f -name "*.class" | while read -r class_file; do
|
| 445 |
+
# Skip module-info.class
|
| 446 |
+
if [[ "$(basename "$class_file")" == "module-info.class" ]]; then
|
| 447 |
+
continue
|
| 448 |
+
fi
|
| 449 |
+
|
| 450 |
+
# Use javap to extract the fully qualified name of the class and copy it to $classes_dir
|
| 451 |
+
fqn=$(javap -verbose "$class_file" 2>/dev/null | grep "this_class:" | grep -oP '(?<=// ).*')
|
| 452 |
+
if [ -n "$fqn" ]; then
|
| 453 |
+
mkdir -p $classes_dir/$(dirname $fqn)
|
| 454 |
+
cp $class_file $classes_dir/$fqn.class
|
| 455 |
+
fi
|
| 456 |
+
done
|
| 457 |
+
|
| 458 |
+
# Heuristically determine source directories based on Maven structure.
|
| 459 |
+
# Always include the $SRC root as it likely contains the fuzzer sources.
|
| 460 |
+
sourcefiles_args=(--sourcefiles $OUT/$SRC)
|
| 461 |
+
source_dirs=$(find $OUT/$SRC -type d -name 'java')
|
| 462 |
+
for source_dir in $source_dirs; do
|
| 463 |
+
sourcefiles_args+=(--sourcefiles "$source_dir")
|
| 464 |
+
done
|
| 465 |
+
|
| 466 |
+
# Generate HTML and XML reports.
|
| 467 |
+
xml_report=$REPORT_PLATFORM_DIR/index.xml
|
| 468 |
+
java -jar /opt/jacoco-cli.jar report $jacoco_merged_exec \
|
| 469 |
+
--html $REPORT_PLATFORM_DIR \
|
| 470 |
+
--xml $xml_report \
|
| 471 |
+
--classfiles $classes_dir \
|
| 472 |
+
"${sourcefiles_args[@]}"
|
| 473 |
+
|
| 474 |
+
# Also serve the raw exec file and XML report, which can be useful for
|
| 475 |
+
# automated analysis.
|
| 476 |
+
cp $jacoco_merged_exec $REPORT_PLATFORM_DIR/jacoco.exec
|
| 477 |
+
cp $xml_report $REPORT_PLATFORM_DIR/jacoco.xml
|
| 478 |
+
cp $xml_report $TEXTCOV_REPORT_DIR/jacoco.xml
|
| 479 |
+
|
| 480 |
+
# Write llvm-cov summary file.
|
| 481 |
+
jacoco_report_converter.py $xml_report $SUMMARY_FILE
|
| 482 |
+
|
| 483 |
+
set +e
|
| 484 |
+
elif [[ $FUZZING_LANGUAGE == "javascript" ]]; then
|
| 485 |
+
|
| 486 |
+
# From this point on the script does not tolerate any errors.
|
| 487 |
+
set -e
|
| 488 |
+
|
| 489 |
+
json_report=$MERGED_COVERAGE_DIR/coverage.json
|
| 490 |
+
nyc merge $FUZZERS_COVERAGE_DUMPS_DIR $json_report
|
| 491 |
+
|
| 492 |
+
nyc report -t $MERGED_COVERAGE_DIR --report-dir $REPORT_PLATFORM_DIR --reporter=html --reporter=json-summary
|
| 493 |
+
|
| 494 |
+
nyc_json_summary_file=$REPORT_PLATFORM_DIR/coverage-summary.json
|
| 495 |
+
|
| 496 |
+
# Write llvm-cov summary file.
|
| 497 |
+
nyc_report_converter.py $nyc_json_summary_file $SUMMARY_FILE
|
| 498 |
+
|
| 499 |
+
set +e
|
| 500 |
+
else
|
| 501 |
+
|
| 502 |
+
# From this point on the script does not tolerate any errors.
|
| 503 |
+
set -e
|
| 504 |
+
|
| 505 |
+
# Merge all dumps from the individual targets.
|
| 506 |
+
rm -f $PROFILE_FILE
|
| 507 |
+
llvm-profdata merge -sparse $DUMPS_DIR/*.profdata -o $PROFILE_FILE
|
| 508 |
+
|
| 509 |
+
# TODO(mmoroz): add script from Chromium for rendering directory view reports.
|
| 510 |
+
# The first path in $objects does not have -object= prefix (llvm-cov format).
|
| 511 |
+
shared_libraries=$(coverage_helper shared_libs -build-dir=$OUT -object=$objects)
|
| 512 |
+
objects="$objects $shared_libraries"
|
| 513 |
+
|
| 514 |
+
generate_html $PROFILE_FILE "$shared_libraries" "$objects" "$REPORT_ROOT_DIR"
|
| 515 |
+
|
| 516 |
+
# Per target reports.
|
| 517 |
+
for fuzz_target in $FUZZ_TARGETS; do
|
| 518 |
+
if [[ $fuzz_target == *"@"* ]]; then
|
| 519 |
+
profdata_path=$DUMPS_DIR/$fuzz_target.profdata
|
| 520 |
+
report_dir=$REPORT_BY_TARGET_ROOT_DIR/$fuzz_target
|
| 521 |
+
# Extract fuzztest binary name from fuzztest wrapper script.
|
| 522 |
+
fuzz_target=(${fuzz_target//@/ }[0])
|
| 523 |
+
else
|
| 524 |
+
profdata_path=$DUMPS_DIR/$fuzz_target.profdata
|
| 525 |
+
report_dir=$REPORT_BY_TARGET_ROOT_DIR/$fuzz_target
|
| 526 |
+
fi
|
| 527 |
+
if [[ ! -f "$profdata_path" ]]; then
|
| 528 |
+
echo "WARNING: $fuzz_target has no profdata generated."
|
| 529 |
+
continue
|
| 530 |
+
fi
|
| 531 |
+
|
| 532 |
+
generate_html $profdata_path "$shared_libraries" "$fuzz_target" "$report_dir"
|
| 533 |
+
done
|
| 534 |
+
|
| 535 |
+
set +e
|
| 536 |
+
fi
|
| 537 |
+
|
| 538 |
+
# Make sure report is readable.
|
| 539 |
+
chmod -R +r $REPORT_ROOT_DIR $REPORT_BY_TARGET_ROOT_DIR
|
| 540 |
+
find $REPORT_ROOT_DIR $REPORT_BY_TARGET_ROOT_DIR -type d -exec chmod +x {} +
|
| 541 |
+
|
| 542 |
+
# HTTP_PORT is optional.
|
| 543 |
+
set +u
|
| 544 |
+
if [[ -n $HTTP_PORT ]]; then
|
| 545 |
+
# Serve the report locally.
|
| 546 |
+
echo "Serving the report on http://127.0.0.1:$HTTP_PORT/linux/index.html"
|
| 547 |
+
cd $REPORT_ROOT_DIR
|
| 548 |
+
python3 -m http.server $HTTP_PORT
|
| 549 |
+
fi
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/coverage_helper
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -u
|
| 2 |
+
# Copyright 2018 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
python3 $CODE_COVERAGE_SRC/coverage_utils.py $@
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/download_corpus
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -u
|
| 2 |
+
# Copyright 2018 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
if (( $# < 1 )); then
|
| 19 |
+
echo "Usage: $0 \"path_download_to url_download_from\" (can be repeated)" >&2
|
| 20 |
+
exit 1
|
| 21 |
+
fi
|
| 22 |
+
|
| 23 |
+
for pair in "$@"; do
|
| 24 |
+
read path url <<< "$pair"
|
| 25 |
+
wget -q -O $path $url
|
| 26 |
+
done
|
| 27 |
+
|
| 28 |
+
# Always exit with 0 as we do not track wget return codes and should not rely
|
| 29 |
+
# on the latest command execution.
|
| 30 |
+
exit 0
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/generate_differential_cov_report.py
ADDED
|
@@ -0,0 +1,228 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
#
|
| 3 |
+
# Copyright 2023 Google LLC
|
| 4 |
+
#
|
| 5 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 6 |
+
# you may not use this file except in compliance with the License.
|
| 7 |
+
# You may obtain a copy of the License at
|
| 8 |
+
#
|
| 9 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 10 |
+
#
|
| 11 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 12 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 13 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 14 |
+
# See the License for the specific language governing permissions and
|
| 15 |
+
# limitations under the License.
|
| 16 |
+
#
|
| 17 |
+
################################################################################
|
| 18 |
+
"""Script for generating differential coverage reports.
|
| 19 |
+
generate_differential_cov_report.py <profdata-dump-directory> \
|
| 20 |
+
<profdata-directory-to-subtract-from-first> <output-directory>
|
| 21 |
+
"""
|
| 22 |
+
import os
|
| 23 |
+
import shutil
|
| 24 |
+
import subprocess
|
| 25 |
+
import sys
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class ProfData:
|
| 29 |
+
"""Class representing a profdata file."""
|
| 30 |
+
|
| 31 |
+
def __init__(self, text):
|
| 32 |
+
self.function_profs = []
|
| 33 |
+
for function_prof in text.split('\n\n'):
|
| 34 |
+
if not function_prof:
|
| 35 |
+
continue
|
| 36 |
+
self.function_profs.append(FunctionProf(function_prof))
|
| 37 |
+
|
| 38 |
+
def to_string(self):
|
| 39 |
+
"""Convert back to a string."""
|
| 40 |
+
return '\n'.join(
|
| 41 |
+
[function_prof.to_string() for function_prof in self.function_profs])
|
| 42 |
+
|
| 43 |
+
def find_function(self, function, idx=None):
|
| 44 |
+
"""Find the same function in this profdata."""
|
| 45 |
+
if idx is not None:
|
| 46 |
+
try:
|
| 47 |
+
possibility = self.function_profs[idx]
|
| 48 |
+
if function.func_hash == possibility.func_hash:
|
| 49 |
+
return possibility
|
| 50 |
+
except IndexError:
|
| 51 |
+
pass
|
| 52 |
+
for function_prof in self.function_profs:
|
| 53 |
+
if function_prof.func_hash == function.func_hash:
|
| 54 |
+
return function_prof
|
| 55 |
+
return None
|
| 56 |
+
|
| 57 |
+
def subtract(self, subtrahend):
|
| 58 |
+
"""Subtract subtrahend from this profdata."""
|
| 59 |
+
for idx, function_prof in enumerate(self.function_profs):
|
| 60 |
+
subtrahend_function_prof = subtrahend.find_function(function_prof, idx)
|
| 61 |
+
function_prof.subtract(subtrahend_function_prof)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class FunctionProf:
|
| 65 |
+
"""Profile of a function."""
|
| 66 |
+
FUNC_HASH_COMMENT_LINE = '# Func Hash:'
|
| 67 |
+
NUM_COUNTERS_COMMENT_LINE = '# Num Counters:'
|
| 68 |
+
COUNTER_VALUES_COMMENT_LINE = '# Counter Values:'
|
| 69 |
+
|
| 70 |
+
def __init__(self, text):
|
| 71 |
+
print(text)
|
| 72 |
+
lines = text.splitlines()
|
| 73 |
+
self.function = lines[0]
|
| 74 |
+
assert self.FUNC_HASH_COMMENT_LINE == lines[1]
|
| 75 |
+
self.func_hash = lines[2]
|
| 76 |
+
assert self.NUM_COUNTERS_COMMENT_LINE == lines[3]
|
| 77 |
+
self.num_counters = int(lines[4])
|
| 78 |
+
assert self.COUNTER_VALUES_COMMENT_LINE == lines[5]
|
| 79 |
+
self.counter_values = [1 if int(line) else 0 for line in lines[6:]]
|
| 80 |
+
|
| 81 |
+
def to_string(self):
|
| 82 |
+
"""Convert back to text."""
|
| 83 |
+
lines = [
|
| 84 |
+
self.function,
|
| 85 |
+
self.FUNC_HASH_COMMENT_LINE,
|
| 86 |
+
self.func_hash,
|
| 87 |
+
self.NUM_COUNTERS_COMMENT_LINE,
|
| 88 |
+
str(self.num_counters),
|
| 89 |
+
self.COUNTER_VALUES_COMMENT_LINE,
|
| 90 |
+
] + [str(num) for num in self.counter_values]
|
| 91 |
+
return '\n'.join(lines)
|
| 92 |
+
|
| 93 |
+
def subtract(self, subtrahend_prof):
|
| 94 |
+
"""Subtract this other function from this function."""
|
| 95 |
+
if not subtrahend_prof:
|
| 96 |
+
print(self.function, 'has no subtrahend')
|
| 97 |
+
# Nothing to subtract.
|
| 98 |
+
return
|
| 99 |
+
self.counter_values = [
|
| 100 |
+
max(counter1 - counter2, 0) for counter1, counter2 in zip(
|
| 101 |
+
self.counter_values, subtrahend_prof.counter_values)
|
| 102 |
+
]
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def get_profdata_files(directory):
|
| 106 |
+
"""Returns profdata files in |directory|."""
|
| 107 |
+
profdatas = []
|
| 108 |
+
for filename in os.listdir(directory):
|
| 109 |
+
filename = os.path.join(directory, filename)
|
| 110 |
+
if filename.endswith('.profdata'):
|
| 111 |
+
profdatas.append(filename)
|
| 112 |
+
return profdatas
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def convert_profdata_to_text(profdata):
|
| 116 |
+
"""Convert a profdata binary file to a profdata text file."""
|
| 117 |
+
profdata_text = f'{profdata}.txt'
|
| 118 |
+
if os.path.exists(profdata_text):
|
| 119 |
+
os.remove(profdata_text)
|
| 120 |
+
command = [
|
| 121 |
+
'llvm-profdata', 'merge', '-j=1', '-sparse', profdata, '--text', '-o',
|
| 122 |
+
profdata_text
|
| 123 |
+
]
|
| 124 |
+
print(command)
|
| 125 |
+
subprocess.run(command, check=True)
|
| 126 |
+
return profdata_text
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def convert_text_profdata_to_bin(profdata_text):
|
| 130 |
+
"""Convert a profdata text file to a profdata binary file."""
|
| 131 |
+
profdata = profdata_text.replace('.txt', '').replace('.profdata',
|
| 132 |
+
'') + '.profdata'
|
| 133 |
+
print('bin profdata', profdata)
|
| 134 |
+
if os.path.exists(profdata):
|
| 135 |
+
os.remove(profdata)
|
| 136 |
+
command = [
|
| 137 |
+
'llvm-profdata', 'merge', '-j=1', '-sparse', profdata_text, '-o', profdata
|
| 138 |
+
]
|
| 139 |
+
print(command)
|
| 140 |
+
subprocess.run(command, check=True)
|
| 141 |
+
return profdata
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def get_difference(minuend_filename, subtrahend_filename):
|
| 145 |
+
"""Subtract subtrahend_filename from minuend_filename."""
|
| 146 |
+
with open(minuend_filename, 'r', encoding='utf-8') as minuend_file:
|
| 147 |
+
print('minuend', minuend_filename)
|
| 148 |
+
minuend = ProfData(minuend_file.read())
|
| 149 |
+
with open(subtrahend_filename, 'r', encoding='utf-8') as subtrahend_file:
|
| 150 |
+
print('subtrahend', subtrahend_filename)
|
| 151 |
+
subtrahend = ProfData(subtrahend_file.read())
|
| 152 |
+
|
| 153 |
+
minuend.subtract(subtrahend)
|
| 154 |
+
return minuend
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def profdatas_to_objects(profdatas):
|
| 158 |
+
"""Get the corresponding objects for each profdata."""
|
| 159 |
+
return [
|
| 160 |
+
os.path.splitext(os.path.basename(profdata))[0] for profdata in profdatas
|
| 161 |
+
]
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def generate_differential_cov_reports(minuend_profdatas, subtrahend_profdatas,
|
| 165 |
+
difference_dir):
|
| 166 |
+
"""Calculate the differences between all profdatas and generate differential
|
| 167 |
+
coverage reports."""
|
| 168 |
+
profdata_objects = profdatas_to_objects(minuend_profdatas)
|
| 169 |
+
real_profdata_objects = [
|
| 170 |
+
binobject for binobject in profdata_objects if binobject != 'merged'
|
| 171 |
+
]
|
| 172 |
+
for minuend, subtrahend, binobject in zip(minuend_profdatas,
|
| 173 |
+
subtrahend_profdatas,
|
| 174 |
+
profdata_objects):
|
| 175 |
+
minuend_text = convert_profdata_to_text(minuend)
|
| 176 |
+
subtrahend_text = convert_profdata_to_text(subtrahend)
|
| 177 |
+
difference = get_difference(minuend_text, subtrahend_text)
|
| 178 |
+
basename = os.path.basename(minuend_text)
|
| 179 |
+
difference_text = os.path.join(difference_dir, basename)
|
| 180 |
+
with open(difference_text, 'w', encoding='utf-8') as file_handle:
|
| 181 |
+
file_handle.write(difference.to_string())
|
| 182 |
+
difference_profdata = convert_text_profdata_to_bin(difference_text)
|
| 183 |
+
if not difference_profdata.endswith('merged.profdata'):
|
| 184 |
+
generate_html_report(difference_profdata, [binobject],
|
| 185 |
+
os.path.join(difference_dir, binobject))
|
| 186 |
+
else:
|
| 187 |
+
generate_html_report(difference_profdata, real_profdata_objects,
|
| 188 |
+
os.path.join(difference_dir, 'merged'))
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def generate_html_report(profdata, objects, directory):
|
| 192 |
+
"""Generate an HTML coverage report."""
|
| 193 |
+
# TODO(metzman): Deal with shared libs.
|
| 194 |
+
html_dir = os.path.join(directory, 'reports')
|
| 195 |
+
if os.path.exists(html_dir):
|
| 196 |
+
os.remove(html_dir)
|
| 197 |
+
os.makedirs(html_dir)
|
| 198 |
+
out_dir = os.getenv('OUT', '/out')
|
| 199 |
+
command = [
|
| 200 |
+
'llvm-cov', 'show', f'-path-equivalence=/,{out_dir}', '-format=html',
|
| 201 |
+
'-Xdemangler', 'rcfilt', f'-instr-profile={profdata}'
|
| 202 |
+
]
|
| 203 |
+
|
| 204 |
+
objects = [os.path.join(out_dir, binobject) for binobject in objects]
|
| 205 |
+
command += objects + ['-o', html_dir]
|
| 206 |
+
print(' '.join(command))
|
| 207 |
+
subprocess.run(command, check=True)
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
def main():
|
| 211 |
+
"""Generate differential coverage reports."""
|
| 212 |
+
if len(sys.argv) != 4:
|
| 213 |
+
print(
|
| 214 |
+
f'Usage: {sys.argv[0]} <minuend_dir> <subtrahend_dir> <difference_dir>')
|
| 215 |
+
minuend_dir = sys.argv[1]
|
| 216 |
+
subtrahend_dir = sys.argv[2]
|
| 217 |
+
difference_dir = sys.argv[3]
|
| 218 |
+
if os.path.exists(difference_dir):
|
| 219 |
+
shutil.rmtree(difference_dir)
|
| 220 |
+
os.makedirs(difference_dir, exist_ok=True)
|
| 221 |
+
minuend_profdatas = get_profdata_files(minuend_dir)
|
| 222 |
+
subtrahend_profdatas = get_profdata_files(subtrahend_dir)
|
| 223 |
+
generate_differential_cov_reports(minuend_profdatas, subtrahend_profdatas,
|
| 224 |
+
difference_dir)
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
if __name__ == '__main__':
|
| 228 |
+
main()
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/install_deps.sh
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Install dependencies in a platform-aware way.
|
| 19 |
+
|
| 20 |
+
apt-get update && apt-get install -y \
|
| 21 |
+
binutils \
|
| 22 |
+
file \
|
| 23 |
+
ca-certificates \
|
| 24 |
+
fonts-dejavu \
|
| 25 |
+
git \
|
| 26 |
+
libcap2 \
|
| 27 |
+
rsync \
|
| 28 |
+
unzip \
|
| 29 |
+
wget \
|
| 30 |
+
zip --no-install-recommends
|
| 31 |
+
|
| 32 |
+
case $(uname -m) in
|
| 33 |
+
x86_64)
|
| 34 |
+
# We only need to worry about i386 if we are on x86_64.
|
| 35 |
+
apt-get install -y lib32gcc1 libc6-i386
|
| 36 |
+
;;
|
| 37 |
+
esac
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/install_go.sh
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Install go on x86_64, don't do anything on ARM.
|
| 19 |
+
|
| 20 |
+
case $(uname -m) in
|
| 21 |
+
x86_64)
|
| 22 |
+
# Download and install Go 1.19.
|
| 23 |
+
wget -q https://storage.googleapis.com/golang/getgo/installer_linux -O $SRC/installer_linux
|
| 24 |
+
chmod +x $SRC/installer_linux
|
| 25 |
+
SHELL="bash" $SRC/installer_linux -version 1.19
|
| 26 |
+
rm $SRC/installer_linux
|
| 27 |
+
# Set up Golang coverage modules.
|
| 28 |
+
printf $(find . -name gocoverage)
|
| 29 |
+
cd $GOPATH/gocoverage && /root/.go/bin/go install ./...
|
| 30 |
+
cd convertcorpus && /root/.go/bin/go install .
|
| 31 |
+
cd /root/.go/src/cmd/cover && /root/.go/bin/go build && mv cover $GOPATH/bin/gotoolcover
|
| 32 |
+
;;
|
| 33 |
+
aarch64)
|
| 34 |
+
# Don't install go because installer is not provided.
|
| 35 |
+
echo "Not installing go: aarch64."
|
| 36 |
+
;;
|
| 37 |
+
*)
|
| 38 |
+
echo "Error: unsupported architecture: $(uname -m)"
|
| 39 |
+
exit 1
|
| 40 |
+
;;
|
| 41 |
+
esac
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/install_java.sh
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Install java in a platform-aware way.
|
| 19 |
+
|
| 20 |
+
ARCHITECTURE=
|
| 21 |
+
case $(uname -m) in
|
| 22 |
+
x86_64)
|
| 23 |
+
ARCHITECTURE=x64
|
| 24 |
+
;;
|
| 25 |
+
aarch64)
|
| 26 |
+
ARCHITECTURE=aarch64
|
| 27 |
+
;;
|
| 28 |
+
*)
|
| 29 |
+
echo "Error: unsupported architecture: $(uname -m)"
|
| 30 |
+
exit 1
|
| 31 |
+
;;
|
| 32 |
+
esac
|
| 33 |
+
|
| 34 |
+
wget -q https://download.java.net/java/GA/jdk17.0.2/dfd4a8d0985749f896bed50d7138ee7f/8/GPL/openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz -O /tmp/openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz
|
| 35 |
+
wget -q https://download.java.net/java/GA/jdk15.0.2/0d1cfde4252546c6931946de8db48ee2/7/GPL/openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz -O /tmp/openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz
|
| 36 |
+
cd /tmp
|
| 37 |
+
mkdir -p $JAVA_HOME
|
| 38 |
+
tar -xz --strip-components=1 -f openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz --directory $JAVA_HOME
|
| 39 |
+
rm -f openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz
|
| 40 |
+
rm -rf $JAVA_HOME/jmods $JAVA_HOME/lib/src.zip
|
| 41 |
+
|
| 42 |
+
# Install OpenJDK 15 and trim its size by removing unused components. Some projects only run with Java 15.
|
| 43 |
+
mkdir -p $JAVA_15_HOME
|
| 44 |
+
tar -xz --strip-components=1 -f openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz --directory $JAVA_15_HOME
|
| 45 |
+
rm -f openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz
|
| 46 |
+
rm -rf $JAVA_15_HOME/jmods $JAVA_15_HOME/lib/src.zip
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/jacoco_report_converter.py
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Helper script for creating an llvm-cov style JSON summary from a JaCoCo XML
|
| 18 |
+
report."""
|
| 19 |
+
import json
|
| 20 |
+
import os
|
| 21 |
+
import sys
|
| 22 |
+
import xml.etree.ElementTree as ET
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def convert(xml):
|
| 26 |
+
"""Turns a JaCoCo XML report into an llvm-cov JSON summary."""
|
| 27 |
+
summary = {
|
| 28 |
+
'type': 'oss-fuzz.java.coverage.json.export',
|
| 29 |
+
'version': '1.0.0',
|
| 30 |
+
'data': [{
|
| 31 |
+
'totals': {},
|
| 32 |
+
'files': [],
|
| 33 |
+
}],
|
| 34 |
+
}
|
| 35 |
+
|
| 36 |
+
report = ET.fromstring(xml)
|
| 37 |
+
totals = make_element_summary(report)
|
| 38 |
+
summary['data'][0]['totals'] = totals
|
| 39 |
+
|
| 40 |
+
# Since Java compilation does not track source file location, we match
|
| 41 |
+
# coverage info to source files via the full class name, e.g. we search for
|
| 42 |
+
# a path in /out/src ending in foo/bar/Baz.java for the class foo.bar.Baz.
|
| 43 |
+
# Under the assumptions that a given project only ever contains a single
|
| 44 |
+
# version of a class and that no class name appears as a suffix of another
|
| 45 |
+
# class name, we can assign coverage info to every source file matched in that
|
| 46 |
+
# way.
|
| 47 |
+
src_files = list_src_files()
|
| 48 |
+
|
| 49 |
+
for class_element in report.findall('./package/class'):
|
| 50 |
+
# Skip fuzzer classes
|
| 51 |
+
if is_fuzzer_class(class_element):
|
| 52 |
+
continue
|
| 53 |
+
|
| 54 |
+
# Skip non class elements
|
| 55 |
+
if 'sourcefilename' not in class_element.attrib:
|
| 56 |
+
continue
|
| 57 |
+
|
| 58 |
+
class_name = class_element.attrib['name']
|
| 59 |
+
package_name = os.path.dirname(class_name)
|
| 60 |
+
basename = class_element.attrib['sourcefilename']
|
| 61 |
+
# This path is 'foo/Bar.java' for the class element
|
| 62 |
+
# <class name="foo/Bar" sourcefilename="Bar.java">.
|
| 63 |
+
canonical_path = os.path.join(package_name, basename)
|
| 64 |
+
|
| 65 |
+
class_summary = make_element_summary(class_element)
|
| 66 |
+
for src_file in relative_to_src_path(src_files, canonical_path):
|
| 67 |
+
summary['data'][0]['files'].append({
|
| 68 |
+
'filename': src_file,
|
| 69 |
+
'summary': class_summary,
|
| 70 |
+
})
|
| 71 |
+
|
| 72 |
+
return json.dumps(summary)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def list_src_files():
|
| 76 |
+
"""Returns a map from basename to full path for all files in $OUT/$SRC."""
|
| 77 |
+
filename_to_paths = {}
|
| 78 |
+
out_path = os.environ['OUT'] + '/'
|
| 79 |
+
src_path = os.environ['SRC']
|
| 80 |
+
src_in_out = out_path + src_path
|
| 81 |
+
for dirpath, _, filenames in os.walk(src_in_out):
|
| 82 |
+
for filename in filenames:
|
| 83 |
+
full_path = dirpath + '/' + filename
|
| 84 |
+
# Map /out//src/... to /src/...
|
| 85 |
+
file_path = full_path[len(out_path):]
|
| 86 |
+
filename_to_paths.setdefault(filename, []).append(file_path)
|
| 87 |
+
return filename_to_paths
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def is_fuzzer_class(class_element):
|
| 91 |
+
"""Check if the class is fuzzer class."""
|
| 92 |
+
method_elements = class_element.find('./method[@name=\"fuzzerTestOneInput\"]')
|
| 93 |
+
if method_elements:
|
| 94 |
+
return True
|
| 95 |
+
|
| 96 |
+
return False
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def relative_to_src_path(src_files, canonical_path):
|
| 100 |
+
"""Returns all paths in src_files ending in canonical_path."""
|
| 101 |
+
basename = os.path.basename(canonical_path)
|
| 102 |
+
if basename not in src_files:
|
| 103 |
+
return []
|
| 104 |
+
candidate_paths = src_files[basename]
|
| 105 |
+
return [
|
| 106 |
+
path for path in candidate_paths if path.endswith("/" + canonical_path)
|
| 107 |
+
]
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def make_element_summary(element):
|
| 111 |
+
"""Returns a coverage summary for an element in the XML report."""
|
| 112 |
+
summary = {}
|
| 113 |
+
|
| 114 |
+
function_counter = element.find('./counter[@type=\'METHOD\']')
|
| 115 |
+
summary['functions'] = make_counter_summary(function_counter)
|
| 116 |
+
|
| 117 |
+
line_counter = element.find('./counter[@type=\'LINE\']')
|
| 118 |
+
summary['lines'] = make_counter_summary(line_counter)
|
| 119 |
+
|
| 120 |
+
# JaCoCo tracks branch coverage, which counts the covered control-flow edges
|
| 121 |
+
# between llvm-cov's regions instead of the covered regions themselves. For
|
| 122 |
+
# non-trivial code parts, the difference is usually negligible. However, if
|
| 123 |
+
# all methods of a class consist of a single region only (no branches),
|
| 124 |
+
# JaCoCo does not report any branch coverage even if there is instruction
|
| 125 |
+
# coverage. Since this would give incorrect results for CI Fuzz purposes, we
|
| 126 |
+
# increase the regions counter by 1 if there is any amount of instruction
|
| 127 |
+
# coverage.
|
| 128 |
+
instruction_counter = element.find('./counter[@type=\'INSTRUCTION\']')
|
| 129 |
+
has_some_coverage = instruction_counter is not None and int(
|
| 130 |
+
instruction_counter.attrib["covered"]) > 0
|
| 131 |
+
branch_covered_adjustment = 1 if has_some_coverage else 0
|
| 132 |
+
region_counter = element.find('./counter[@type=\'BRANCH\']')
|
| 133 |
+
summary['regions'] = make_counter_summary(
|
| 134 |
+
region_counter, covered_adjustment=branch_covered_adjustment)
|
| 135 |
+
|
| 136 |
+
return summary
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def make_counter_summary(counter_element, covered_adjustment=0):
|
| 140 |
+
"""Turns a JaCoCo <counter> element into an llvm-cov totals entry."""
|
| 141 |
+
summary = {}
|
| 142 |
+
covered = covered_adjustment
|
| 143 |
+
missed = 0
|
| 144 |
+
if counter_element is not None:
|
| 145 |
+
covered += int(counter_element.attrib['covered'])
|
| 146 |
+
missed += int(counter_element.attrib['missed'])
|
| 147 |
+
summary['covered'] = covered
|
| 148 |
+
summary['notcovered'] = missed
|
| 149 |
+
summary['count'] = summary['covered'] + summary['notcovered']
|
| 150 |
+
if summary['count'] != 0:
|
| 151 |
+
summary['percent'] = (100.0 * summary['covered']) / summary['count']
|
| 152 |
+
else:
|
| 153 |
+
summary['percent'] = 0
|
| 154 |
+
return summary
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def main():
|
| 158 |
+
"""Produces an llvm-cov style JSON summary from a JaCoCo XML report."""
|
| 159 |
+
if len(sys.argv) != 3:
|
| 160 |
+
sys.stderr.write('Usage: %s <path_to_jacoco_xml> <out_path_json>\n' %
|
| 161 |
+
sys.argv[0])
|
| 162 |
+
return 1
|
| 163 |
+
|
| 164 |
+
with open(sys.argv[1], 'r') as xml_file:
|
| 165 |
+
xml_report = xml_file.read()
|
| 166 |
+
json_summary = convert(xml_report)
|
| 167 |
+
with open(sys.argv[2], 'w') as json_file:
|
| 168 |
+
json_file.write(json_summary)
|
| 169 |
+
|
| 170 |
+
return 0
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
if __name__ == '__main__':
|
| 174 |
+
sys.exit(main())
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/nyc_report_converter.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2023 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Helper script for creating a llvm-cov style JSON summary from a nyc
|
| 18 |
+
JSON summary."""
|
| 19 |
+
import json
|
| 20 |
+
import sys
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def convert(nyc_json_summary):
|
| 24 |
+
"""Turns a nyc JSON report into a llvm-cov JSON summary."""
|
| 25 |
+
summary = {
|
| 26 |
+
'type':
|
| 27 |
+
'oss-fuzz.javascript.coverage.json.export',
|
| 28 |
+
'version':
|
| 29 |
+
'1.0.0',
|
| 30 |
+
'data': [{
|
| 31 |
+
'totals':
|
| 32 |
+
file_summary(nyc_json_summary['total']),
|
| 33 |
+
'files': [{
|
| 34 |
+
'filename': src_file,
|
| 35 |
+
'summary': file_summary(nyc_json_summary[src_file])
|
| 36 |
+
} for src_file in nyc_json_summary if src_file != 'total'],
|
| 37 |
+
}],
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
return json.dumps(summary)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def file_summary(nyc_file_summary):
|
| 44 |
+
"""Returns a summary for a given file in the nyc JSON summary report."""
|
| 45 |
+
return {
|
| 46 |
+
'functions': element_summary(nyc_file_summary['functions']),
|
| 47 |
+
'lines': element_summary(nyc_file_summary['lines']),
|
| 48 |
+
'regions': element_summary(nyc_file_summary['branches'])
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def element_summary(element):
|
| 53 |
+
"""Returns a summary of a coverage element in the nyc JSON summary
|
| 54 |
+
of the file"""
|
| 55 |
+
return {
|
| 56 |
+
'count': element['total'],
|
| 57 |
+
'covered': element['covered'],
|
| 58 |
+
'notcovered': element['total'] - element['covered'] - element['skipped'],
|
| 59 |
+
'percent': element['pct'] if element['pct'] != 'Unknown' else 0
|
| 60 |
+
}
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def main():
|
| 64 |
+
"""Produces a llvm-cov style JSON summary from a nyc JSON summary."""
|
| 65 |
+
if len(sys.argv) != 3:
|
| 66 |
+
sys.stderr.write('Usage: %s <path_to_nyc_json_summary> <out_path_json>\n' %
|
| 67 |
+
sys.argv[0])
|
| 68 |
+
return 1
|
| 69 |
+
|
| 70 |
+
with open(sys.argv[1], 'r') as nyc_json_summary_file:
|
| 71 |
+
nyc_json_summary = json.load(nyc_json_summary_file)
|
| 72 |
+
json_summary = convert(nyc_json_summary)
|
| 73 |
+
with open(sys.argv[2], 'w') as json_output_file:
|
| 74 |
+
json_output_file.write(json_summary)
|
| 75 |
+
|
| 76 |
+
return 0
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
if __name__ == '__main__':
|
| 80 |
+
sys.exit(main())
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/profraw_update.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Helper script for upgrading a profraw file to latest version."""
|
| 18 |
+
|
| 19 |
+
from collections import namedtuple
|
| 20 |
+
import struct
|
| 21 |
+
import subprocess
|
| 22 |
+
import sys
|
| 23 |
+
|
| 24 |
+
HeaderGeneric = namedtuple('HeaderGeneric', 'magic version')
|
| 25 |
+
HeaderVersion9 = namedtuple(
|
| 26 |
+
'HeaderVersion9',
|
| 27 |
+
'BinaryIdsSize DataSize PaddingBytesBeforeCounters CountersSize \
|
| 28 |
+
PaddingBytesAfterCounters NumBitmapBytes PaddingBytesAfterBitmapBytes NamesSize CountersDelta BitmapDelta NamesDelta ValueKindLast'
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
PROFRAW_MAGIC = 0xff6c70726f667281
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def relativize_address(data, offset, databegin, sect_prf_cnts, sect_prf_data):
|
| 35 |
+
"""Turns an absolute offset into a relative one."""
|
| 36 |
+
value = struct.unpack('Q', data[offset:offset + 8])[0]
|
| 37 |
+
if sect_prf_cnts <= value < sect_prf_data:
|
| 38 |
+
# If the value is an address in the right section, make it relative.
|
| 39 |
+
value = (value - databegin) & 0xffffffffffffffff
|
| 40 |
+
value = struct.pack('Q', value)
|
| 41 |
+
for i in range(8):
|
| 42 |
+
data[offset + i] = value[i]
|
| 43 |
+
# address was made relative
|
| 44 |
+
return True
|
| 45 |
+
# no changes done
|
| 46 |
+
return False
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def upgrade(data, sect_prf_cnts, sect_prf_data):
|
| 50 |
+
"""Upgrades profraw data, knowing the sections addresses."""
|
| 51 |
+
generic_header = HeaderGeneric._make(struct.unpack('QQ', data[:16]))
|
| 52 |
+
if generic_header.magic != PROFRAW_MAGIC:
|
| 53 |
+
raise Exception('Bad magic.')
|
| 54 |
+
base_version = generic_header.version
|
| 55 |
+
|
| 56 |
+
if base_version >= 9:
|
| 57 |
+
# Nothing to do.
|
| 58 |
+
return data
|
| 59 |
+
if base_version < 5 or base_version == 6:
|
| 60 |
+
raise Exception('Unhandled version.')
|
| 61 |
+
|
| 62 |
+
if generic_header.version == 5:
|
| 63 |
+
generic_header = generic_header._replace(version=7)
|
| 64 |
+
# Upgrade from version 5 to 7 by adding binaryids field.
|
| 65 |
+
data = data[:8] + struct.pack('Q', generic_header.version) + struct.pack(
|
| 66 |
+
'Q', 0) + data[16:]
|
| 67 |
+
if generic_header.version == 7:
|
| 68 |
+
# cf https://reviews.llvm.org/D111123
|
| 69 |
+
generic_header = generic_header._replace(version=8)
|
| 70 |
+
data = data[:8] + struct.pack('Q', generic_header.version) + data[16:]
|
| 71 |
+
if generic_header.version == 8:
|
| 72 |
+
# see https://reviews.llvm.org/D138846
|
| 73 |
+
generic_header = generic_header._replace(version=9)
|
| 74 |
+
# Upgrade from version 8 to 9 by adding NumBitmapBytes, PaddingBytesAfterBitmapBytes and BitmapDelta fields.
|
| 75 |
+
data = data[:8] + struct.pack(
|
| 76 |
+
'Q', generic_header.version) + data[16:56] + struct.pack(
|
| 77 |
+
'QQ', 0, 0) + data[56:72] + struct.pack('Q', 0) + data[72:]
|
| 78 |
+
|
| 79 |
+
v9_header = HeaderVersion9._make(struct.unpack('QQQQQQQQQQQQ', data[16:112]))
|
| 80 |
+
|
| 81 |
+
if base_version <= 8 and v9_header.BinaryIdsSize % 8 != 0:
|
| 82 |
+
# Adds padding for binary ids.
|
| 83 |
+
# cf commit b9f547e8e51182d32f1912f97a3e53f4899ea6be
|
| 84 |
+
# cf https://reviews.llvm.org/D110365
|
| 85 |
+
padlen = 8 - (v9_header.BinaryIdsSize % 8)
|
| 86 |
+
v7_header = v9_header._replace(BinaryIdsSize=v9_header.BinaryIdsSize +
|
| 87 |
+
padlen)
|
| 88 |
+
data = data[:16] + struct.pack('Q', v9_header.BinaryIdsSize) + data[24:]
|
| 89 |
+
data = data[:112 + v9_header.BinaryIdsSize] + bytes(
|
| 90 |
+
padlen) + data[112 + v9_header.BinaryIdsSize:]
|
| 91 |
+
|
| 92 |
+
if base_version <= 8:
|
| 93 |
+
offset = 112 + v9_header.BinaryIdsSize
|
| 94 |
+
for d in range(v9_header.DataSize):
|
| 95 |
+
# Add BitmapPtr and aligned u32(NumBitmapBytes)
|
| 96 |
+
data = data[:offset + 3 * 8] + struct.pack(
|
| 97 |
+
'Q', 0) + data[offset + 3 * 8:offset + 6 * 8] + struct.pack(
|
| 98 |
+
'Q', 0) + data[offset + 6 * 8:]
|
| 99 |
+
value = struct.unpack('Q',
|
| 100 |
+
data[offset + 2 * 8:offset + 3 * 8])[0] - 16 * d
|
| 101 |
+
data = data[:offset + 2 * 8] + struct.pack('Q',
|
| 102 |
+
value) + data[offset + 3 * 8:]
|
| 103 |
+
offset += 8 * 8
|
| 104 |
+
|
| 105 |
+
if base_version >= 8:
|
| 106 |
+
# Nothing more to do.
|
| 107 |
+
return data
|
| 108 |
+
|
| 109 |
+
# Last changes are relaed to bump from 7 to version 8 making CountersPtr relative.
|
| 110 |
+
dataref = sect_prf_data
|
| 111 |
+
# 80 is offset of CountersDelta.
|
| 112 |
+
if not relativize_address(data, 80, dataref, sect_prf_cnts, sect_prf_data):
|
| 113 |
+
return data
|
| 114 |
+
|
| 115 |
+
offset = 112 + v9_header.BinaryIdsSize
|
| 116 |
+
# This also works for C+Rust binaries compiled with
|
| 117 |
+
# clang-14/rust-nightly-clang-13.
|
| 118 |
+
for _ in range(v9_header.DataSize):
|
| 119 |
+
# 16 is the offset of CounterPtr in ProfrawData structure.
|
| 120 |
+
relativize_address(data, offset + 16, dataref, sect_prf_cnts, sect_prf_data)
|
| 121 |
+
# We need this because of CountersDelta -= sizeof(*SrcData);
|
| 122 |
+
# seen in __llvm_profile_merge_from_buffer.
|
| 123 |
+
dataref += 44 + 2 * (v9_header.ValueKindLast + 1)
|
| 124 |
+
if was8:
|
| 125 |
+
#profraw9 added RelativeBitmapPtr and NumBitmapBytes (8+4 rounded up to 16)
|
| 126 |
+
dataref -= 16
|
| 127 |
+
# This is the size of one ProfrawData structure.
|
| 128 |
+
offset += 44 + 2 * (v9_header.ValueKindLast + 1)
|
| 129 |
+
|
| 130 |
+
return data
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def main():
|
| 134 |
+
"""Helper script for upgrading a profraw file to latest version."""
|
| 135 |
+
if len(sys.argv) < 3:
|
| 136 |
+
sys.stderr.write('Usage: %s <binary> options? <profraw>...\n' % sys.argv[0])
|
| 137 |
+
return 1
|
| 138 |
+
|
| 139 |
+
# First find llvm profile sections addresses in the elf, quick and dirty.
|
| 140 |
+
process = subprocess.Popen(['readelf', '-S', sys.argv[1]],
|
| 141 |
+
stdout=subprocess.PIPE)
|
| 142 |
+
output, err = process.communicate()
|
| 143 |
+
if err:
|
| 144 |
+
print('readelf failed')
|
| 145 |
+
return 2
|
| 146 |
+
for line in iter(output.split(b'\n')):
|
| 147 |
+
if b'__llvm_prf_cnts' in line:
|
| 148 |
+
sect_prf_cnts = int(line.split()[3], 16)
|
| 149 |
+
elif b'__llvm_prf_data' in line:
|
| 150 |
+
sect_prf_data = int(line.split()[3], 16)
|
| 151 |
+
|
| 152 |
+
out_name = "default.profup"
|
| 153 |
+
in_place = False
|
| 154 |
+
start = 2
|
| 155 |
+
if sys.argv[2] == "-i":
|
| 156 |
+
in_place = True
|
| 157 |
+
start = start + 1
|
| 158 |
+
elif sys.argv[2] == "-o":
|
| 159 |
+
out_name = sys.argv[3]
|
| 160 |
+
start = 4
|
| 161 |
+
|
| 162 |
+
if len(sys.argv) < start:
|
| 163 |
+
sys.stderr.write('Usage: %s <binary> options <profraw>...\n' % sys.argv[0])
|
| 164 |
+
return 1
|
| 165 |
+
|
| 166 |
+
for i in range(start, len(sys.argv)):
|
| 167 |
+
# Then open and read the input profraw file.
|
| 168 |
+
with open(sys.argv[i], 'rb') as input_file:
|
| 169 |
+
profraw_base = bytearray(input_file.read())
|
| 170 |
+
# Do the upgrade, returning a bytes object.
|
| 171 |
+
profraw_latest = upgrade(profraw_base, sect_prf_cnts, sect_prf_data)
|
| 172 |
+
# Write the output to the file given to the command line.
|
| 173 |
+
if in_place:
|
| 174 |
+
out_name = sys.argv[i]
|
| 175 |
+
with open(out_name, 'wb') as output_file:
|
| 176 |
+
output_file.write(profraw_latest)
|
| 177 |
+
|
| 178 |
+
return 0
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
if __name__ == '__main__':
|
| 182 |
+
sys.exit(main())
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/python_coverage_runner_help.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2022 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""
|
| 15 |
+
Helper to manage coverage.py related operations. Does two main
|
| 16 |
+
things: (1) pass commands into the coverage.py library and (2)
|
| 17 |
+
translate .coverage created from a pyinstaller executable into
|
| 18 |
+
paths that match local files. This is needed for html report creation.
|
| 19 |
+
"""
|
| 20 |
+
import os
|
| 21 |
+
import re
|
| 22 |
+
import json
|
| 23 |
+
import sys
|
| 24 |
+
from coverage.cmdline import main as coverage_main
|
| 25 |
+
from coverage.data import CoverageData
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def should_exclude_file(filepath):
|
| 29 |
+
"""Returns whether the path should be excluded from the coverage report."""
|
| 30 |
+
# Skip all atheris code
|
| 31 |
+
if "atheris" in filepath:
|
| 32 |
+
return True
|
| 33 |
+
|
| 34 |
+
# Filter out all standard python libraries
|
| 35 |
+
if ('/usr/local/lib/python' in filepath and
|
| 36 |
+
'site-packages' not in filepath and 'dist-packages' not in filepath):
|
| 37 |
+
return True
|
| 38 |
+
|
| 39 |
+
# Avoid all PyInstaller modules.
|
| 40 |
+
if 'PyInstaller' in filepath:
|
| 41 |
+
return True
|
| 42 |
+
|
| 43 |
+
return False
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def translate_lines(cov_data, new_cov_data, all_file_paths):
|
| 47 |
+
"""
|
| 48 |
+
Translate lines in a .coverage file created by coverage.py such that
|
| 49 |
+
the file paths points to local files instead. This is needed when collecting
|
| 50 |
+
coverage from executables created by pyinstaller.
|
| 51 |
+
"""
|
| 52 |
+
for pyinstaller_file_path in cov_data.measured_files():
|
| 53 |
+
stripped_py_file_path = pyinstaller_file_path
|
| 54 |
+
if stripped_py_file_path.startswith('/tmp/_MEI'):
|
| 55 |
+
stripped_py_file_path = '/'.join(stripped_py_file_path.split('/')[3:])
|
| 56 |
+
if stripped_py_file_path.startswith('/out/'):
|
| 57 |
+
stripped_py_file_path = stripped_py_file_path.replace('/out/', '')
|
| 58 |
+
|
| 59 |
+
# Check if this file exists in our file paths:
|
| 60 |
+
for local_file_path in all_file_paths:
|
| 61 |
+
if should_exclude_file(local_file_path):
|
| 62 |
+
continue
|
| 63 |
+
if local_file_path.endswith(stripped_py_file_path):
|
| 64 |
+
print('Found matching: %s' % (local_file_path))
|
| 65 |
+
new_cov_data.add_lines(
|
| 66 |
+
{local_file_path: cov_data.lines(pyinstaller_file_path)})
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def translate_coverage(all_file_paths):
|
| 70 |
+
"""
|
| 71 |
+
Translate pyinstaller-generated file paths in .coverage (produced by
|
| 72 |
+
coverage.py) into local file paths. Place result in .new_coverage.
|
| 73 |
+
"""
|
| 74 |
+
covdata_pre_translation = CoverageData('.coverage')
|
| 75 |
+
covdata_post_translation = CoverageData('.new_coverage')
|
| 76 |
+
|
| 77 |
+
covdata_pre_translation.read()
|
| 78 |
+
translate_lines(covdata_pre_translation, covdata_post_translation,
|
| 79 |
+
all_file_paths)
|
| 80 |
+
covdata_post_translation.write()
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def convert_coveragepy_cov_to_summary_json(src, dst):
|
| 84 |
+
"""
|
| 85 |
+
Converts a json file produced by coveragepy into a summary.json file
|
| 86 |
+
similary to llvm-cov output. `src` is the source coveragepy json file,
|
| 87 |
+
`dst` is the destination json file, which will be overwritten.
|
| 88 |
+
"""
|
| 89 |
+
dst_dict = {'data': [{'files': {}}]}
|
| 90 |
+
lines_covered = 0
|
| 91 |
+
lines_count = 0
|
| 92 |
+
with open(src, "r") as src_f:
|
| 93 |
+
src_json = json.loads(src_f.read())
|
| 94 |
+
if 'files' in src_json:
|
| 95 |
+
for elem in src_json.get('files'):
|
| 96 |
+
if 'summary' not in src_json['files'][elem]:
|
| 97 |
+
continue
|
| 98 |
+
src_dict = src_json['files'][elem]['summary']
|
| 99 |
+
count = src_dict['covered_lines'] + src_dict['missing_lines']
|
| 100 |
+
covered = src_dict['covered_lines']
|
| 101 |
+
notcovered = src_dict['missing_lines']
|
| 102 |
+
percent = src_dict['percent_covered']
|
| 103 |
+
|
| 104 |
+
# Accumulate line coverage
|
| 105 |
+
lines_covered += covered
|
| 106 |
+
lines_count += count
|
| 107 |
+
|
| 108 |
+
dst_dict['data'][0]['files'][elem] = {
|
| 109 |
+
'summary': {
|
| 110 |
+
'lines': {
|
| 111 |
+
'count': count,
|
| 112 |
+
'covered': covered,
|
| 113 |
+
'notcovered': notcovered,
|
| 114 |
+
'percent': percent
|
| 115 |
+
}
|
| 116 |
+
}
|
| 117 |
+
}
|
| 118 |
+
if lines_count > 0:
|
| 119 |
+
lines_covered_percent = lines_covered / lines_count
|
| 120 |
+
else:
|
| 121 |
+
lines_covered_percent = 0.0
|
| 122 |
+
dst_dict['data'][0]['totals'] = {
|
| 123 |
+
'branches': {
|
| 124 |
+
'count': 0,
|
| 125 |
+
'covered': 0,
|
| 126 |
+
'notcovered': 0,
|
| 127 |
+
'percent': 0.0
|
| 128 |
+
},
|
| 129 |
+
'functions': {
|
| 130 |
+
'count': 0,
|
| 131 |
+
'covered': 0,
|
| 132 |
+
'percent': 0.0
|
| 133 |
+
},
|
| 134 |
+
'instantiations': {
|
| 135 |
+
'count': 0,
|
| 136 |
+
'covered': 0,
|
| 137 |
+
'percent': 0.0
|
| 138 |
+
},
|
| 139 |
+
'lines': {
|
| 140 |
+
'count': lines_count,
|
| 141 |
+
'covered': lines_covered,
|
| 142 |
+
'percent': lines_covered_percent
|
| 143 |
+
},
|
| 144 |
+
'regions': {
|
| 145 |
+
'count': 0,
|
| 146 |
+
'covered': 0,
|
| 147 |
+
'notcovered': 0,
|
| 148 |
+
'percent': 0.0
|
| 149 |
+
}
|
| 150 |
+
}
|
| 151 |
+
|
| 152 |
+
with open(dst, 'w') as dst_f:
|
| 153 |
+
dst_f.write(json.dumps(dst_dict))
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def main():
|
| 157 |
+
"""
|
| 158 |
+
Main handler.
|
| 159 |
+
"""
|
| 160 |
+
if sys.argv[1] == 'translate':
|
| 161 |
+
print('Translating the coverage')
|
| 162 |
+
files_path = sys.argv[2]
|
| 163 |
+
all_file_paths = list()
|
| 164 |
+
for root, _, files in os.walk(files_path):
|
| 165 |
+
for relative_file_path in files:
|
| 166 |
+
abs_file_path = os.path.abspath(os.path.join(root, relative_file_path))
|
| 167 |
+
all_file_paths.append(abs_file_path)
|
| 168 |
+
print('Done with path walk')
|
| 169 |
+
translate_coverage(all_file_paths)
|
| 170 |
+
elif sys.argv[1] == 'convert-to-summary-json':
|
| 171 |
+
src = sys.argv[2]
|
| 172 |
+
dst = sys.argv[3]
|
| 173 |
+
convert_coveragepy_cov_to_summary_json(src, dst)
|
| 174 |
+
else:
|
| 175 |
+
# Pass commands into coverage package
|
| 176 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
| 177 |
+
sys.exit(coverage_main())
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
if __name__ == '__main__':
|
| 181 |
+
main()
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/rcfilt
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -u
|
| 2 |
+
# Copyright 2020 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
# Symbol demangling for both C++ and Rust
|
| 17 |
+
#
|
| 18 |
+
################################################################################
|
| 19 |
+
|
| 20 |
+
# simply pipe
|
| 21 |
+
rustfilt | c++filt -n
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/reproduce
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2016 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
FUZZER=$1
|
| 19 |
+
shift
|
| 20 |
+
|
| 21 |
+
if [ ! -v TESTCASE ]; then
|
| 22 |
+
TESTCASE="/testcase"
|
| 23 |
+
fi
|
| 24 |
+
|
| 25 |
+
if [ ! -f $TESTCASE ]; then
|
| 26 |
+
echo "Error: $TESTCASE not found, use: docker run -v <path>:$TESTCASE ..."
|
| 27 |
+
exit 1
|
| 28 |
+
fi
|
| 29 |
+
|
| 30 |
+
export RUN_FUZZER_MODE="interactive"
|
| 31 |
+
export FUZZING_ENGINE="libfuzzer"
|
| 32 |
+
export SKIP_SEED_CORPUS="1"
|
| 33 |
+
|
| 34 |
+
run_fuzzer $FUZZER $@ $TESTCASE
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/run_fuzzer
ADDED
|
@@ -0,0 +1,228 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Fuzzer runner. Appends .options arguments and seed corpus to users args.
|
| 19 |
+
# Usage: $0 <fuzzer_name> <fuzzer_args>
|
| 20 |
+
|
| 21 |
+
sysctl -w vm.mmap_rnd_bits=28
|
| 22 |
+
|
| 23 |
+
export PATH=$OUT:$PATH
|
| 24 |
+
cd $OUT
|
| 25 |
+
|
| 26 |
+
DEBUGGER=${DEBUGGER:-}
|
| 27 |
+
|
| 28 |
+
FUZZER=$1
|
| 29 |
+
shift
|
| 30 |
+
|
| 31 |
+
# This env var is set by CIFuzz. CIFuzz fills this directory with the corpus
|
| 32 |
+
# from ClusterFuzz.
|
| 33 |
+
CORPUS_DIR=${CORPUS_DIR:-}
|
| 34 |
+
if [ -z "$CORPUS_DIR" ]
|
| 35 |
+
then
|
| 36 |
+
CORPUS_DIR="/tmp/${FUZZER}_corpus"
|
| 37 |
+
rm -rf $CORPUS_DIR && mkdir -p $CORPUS_DIR
|
| 38 |
+
fi
|
| 39 |
+
|
| 40 |
+
SANITIZER=${SANITIZER:-}
|
| 41 |
+
if [ -z $SANITIZER ]; then
|
| 42 |
+
# If $SANITIZER is not specified (e.g. calling from `reproduce` command), it
|
| 43 |
+
# is not important and can be set to any value.
|
| 44 |
+
SANITIZER="default"
|
| 45 |
+
fi
|
| 46 |
+
|
| 47 |
+
if [[ "$RUN_FUZZER_MODE" = interactive ]]; then
|
| 48 |
+
FUZZER_OUT="$OUT/${FUZZER}_${FUZZING_ENGINE}_${SANITIZER}_out"
|
| 49 |
+
else
|
| 50 |
+
FUZZER_OUT="/tmp/${FUZZER}_${FUZZING_ENGINE}_${SANITIZER}_out"
|
| 51 |
+
fi
|
| 52 |
+
|
| 53 |
+
function get_dictionary() {
|
| 54 |
+
local options_file="$FUZZER.options"
|
| 55 |
+
local dict_file="$FUZZER.dict"
|
| 56 |
+
local dict=""
|
| 57 |
+
if [[ -f "$options_file" ]]; then
|
| 58 |
+
dict=$(sed -n 's/^\s*dict\s*=\s*\(.*\)/\1/p' "$options_file" | tail -1)
|
| 59 |
+
fi
|
| 60 |
+
if [[ -z "$dict" && -f "$dict_file" ]]; then
|
| 61 |
+
dict="$dict_file"
|
| 62 |
+
fi
|
| 63 |
+
[[ -z "$dict" ]] && return
|
| 64 |
+
if [[ "$FUZZING_ENGINE" = "libfuzzer" ]]; then
|
| 65 |
+
printf -- "-dict=%s" "$dict"
|
| 66 |
+
elif [[ "$FUZZING_ENGINE" = "afl" ]]; then
|
| 67 |
+
printf -- "-x %s" "$dict"
|
| 68 |
+
elif [[ "$FUZZING_ENGINE" = "honggfuzz" ]]; then
|
| 69 |
+
printf -- "--dict %s" "$dict"
|
| 70 |
+
elif [[ "$FUZZING_ENGINE" = "centipede" ]]; then
|
| 71 |
+
printf -- "--dictionary %s" "$dict"
|
| 72 |
+
else
|
| 73 |
+
printf "Unexpected FUZZING_ENGINE: $FUZZING_ENGINE, ignoring\n" >&2
|
| 74 |
+
fi
|
| 75 |
+
}
|
| 76 |
+
|
| 77 |
+
function get_extra_binaries() {
|
| 78 |
+
[[ "$FUZZING_ENGINE" != "centipede" ]] && return
|
| 79 |
+
|
| 80 |
+
extra_binaries="$OUT/__centipede_${SANITIZER}/${FUZZER}"
|
| 81 |
+
if compgen -G "$extra_binaries" >> /dev/null; then
|
| 82 |
+
printf -- "--extra_binaries %s" \""$extra_binaries\""
|
| 83 |
+
fi
|
| 84 |
+
|
| 85 |
+
}
|
| 86 |
+
|
| 87 |
+
rm -rf $FUZZER_OUT && mkdir -p $FUZZER_OUT
|
| 88 |
+
|
| 89 |
+
SEED_CORPUS="${FUZZER}_seed_corpus.zip"
|
| 90 |
+
|
| 91 |
+
# TODO: Investigate why this code block is skipped
|
| 92 |
+
# by all default fuzzers in bad_build_check.
|
| 93 |
+
# They all set SKIP_SEED_CORPUS=1.
|
| 94 |
+
if [ -f $SEED_CORPUS ] && [ -z ${SKIP_SEED_CORPUS:-} ]; then
|
| 95 |
+
echo "Using seed corpus: $SEED_CORPUS"
|
| 96 |
+
unzip -o -d ${CORPUS_DIR}/ $SEED_CORPUS > /dev/null
|
| 97 |
+
fi
|
| 98 |
+
|
| 99 |
+
OPTIONS_FILE="${FUZZER}.options"
|
| 100 |
+
CUSTOM_LIBFUZZER_OPTIONS=""
|
| 101 |
+
|
| 102 |
+
if [ -f $OPTIONS_FILE ]; then
|
| 103 |
+
custom_asan_options=$(parse_options.py $OPTIONS_FILE asan)
|
| 104 |
+
if [ ! -z $custom_asan_options ]; then
|
| 105 |
+
export ASAN_OPTIONS="$ASAN_OPTIONS:$custom_asan_options"
|
| 106 |
+
fi
|
| 107 |
+
|
| 108 |
+
custom_msan_options=$(parse_options.py $OPTIONS_FILE msan)
|
| 109 |
+
if [ ! -z $custom_msan_options ]; then
|
| 110 |
+
export MSAN_OPTIONS="$MSAN_OPTIONS:$custom_msan_options"
|
| 111 |
+
fi
|
| 112 |
+
|
| 113 |
+
custom_ubsan_options=$(parse_options.py $OPTIONS_FILE ubsan)
|
| 114 |
+
if [ ! -z $custom_ubsan_options ]; then
|
| 115 |
+
export UBSAN_OPTIONS="$UBSAN_OPTIONS:$custom_ubsan_options"
|
| 116 |
+
fi
|
| 117 |
+
|
| 118 |
+
CUSTOM_LIBFUZZER_OPTIONS=$(parse_options.py $OPTIONS_FILE libfuzzer)
|
| 119 |
+
fi
|
| 120 |
+
|
| 121 |
+
if [[ "$FUZZING_ENGINE" = afl ]]; then
|
| 122 |
+
|
| 123 |
+
# Set afl++ environment options.
|
| 124 |
+
export ASAN_OPTIONS="$ASAN_OPTIONS:abort_on_error=1:symbolize=0:detect_odr_violation=0:"
|
| 125 |
+
export MSAN_OPTIONS="$MSAN_OPTIONS:exit_code=86:symbolize=0"
|
| 126 |
+
export UBSAN_OPTIONS="$UBSAN_OPTIONS:symbolize=0"
|
| 127 |
+
export AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES=1
|
| 128 |
+
export AFL_SKIP_CPUFREQ=1
|
| 129 |
+
export AFL_TRY_AFFINITY=1
|
| 130 |
+
export AFL_FAST_CAL=1
|
| 131 |
+
export AFL_CMPLOG_ONLY_NEW=1
|
| 132 |
+
export AFL_FORKSRV_INIT_TMOUT=30000
|
| 133 |
+
export AFL_IGNORE_PROBLEMS=1
|
| 134 |
+
export AFL_IGNORE_UNKNOWN_ENVS=1
|
| 135 |
+
|
| 136 |
+
# If $OUT/afl_cmplog.txt is present this means the target was compiled for
|
| 137 |
+
# CMPLOG. So we have to add the proper parameters to afl-fuzz.
|
| 138 |
+
test -e "$OUT/afl_cmplog.txt" && AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -c $OUT/$FUZZER"
|
| 139 |
+
|
| 140 |
+
# If $OUT/afl++.dict we load it as a dictionary for afl-fuzz.
|
| 141 |
+
test -e "$OUT/afl++.dict" && AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -x $OUT/afl++.dict"
|
| 142 |
+
|
| 143 |
+
# Ensure timeout is a bit larger than 1sec as some of the OSS-Fuzz fuzzers
|
| 144 |
+
# are slower than this.
|
| 145 |
+
AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -t 5000+"
|
| 146 |
+
|
| 147 |
+
# AFL expects at least 1 file in the input dir.
|
| 148 |
+
echo input > ${CORPUS_DIR}/input
|
| 149 |
+
|
| 150 |
+
CMD_LINE="$OUT/afl-fuzz $AFL_FUZZER_ARGS -i $CORPUS_DIR -o $FUZZER_OUT $(get_dictionary) $* -- $OUT/$FUZZER"
|
| 151 |
+
|
| 152 |
+
echo afl++ setup:
|
| 153 |
+
env|grep AFL_
|
| 154 |
+
cat "$OUT/afl_options.txt"
|
| 155 |
+
|
| 156 |
+
elif [[ "$FUZZING_ENGINE" = honggfuzz ]]; then
|
| 157 |
+
|
| 158 |
+
# Honggfuzz expects at least 1 file in the input dir.
|
| 159 |
+
echo input > $CORPUS_DIR/input
|
| 160 |
+
# --exit_upon_crash: exit whith a first crash seen
|
| 161 |
+
# -V: verify crashes
|
| 162 |
+
# -R (report): save report file to this location
|
| 163 |
+
# -W (working dir): where the crashes go
|
| 164 |
+
# -v (verbose): don't use VTE UI, just stderr
|
| 165 |
+
# -z: use software-instrumentation of clang (trace-pc-guard....)
|
| 166 |
+
# -P: use persistent mode of fuzzing (i.e. LLVMFuzzerTestOneInput)
|
| 167 |
+
# -f: location of the initial (and destination) file corpus
|
| 168 |
+
# -n: number of fuzzing threads (and processes)
|
| 169 |
+
CMD_LINE="$OUT/honggfuzz -n 1 --exit_upon_crash -V -R /tmp/${FUZZER}_honggfuzz.report -W $FUZZER_OUT -v -z -P -f \"$CORPUS_DIR\" $(get_dictionary) $* -- \"$OUT/$FUZZER\""
|
| 170 |
+
|
| 171 |
+
if [[ $(LC_ALL=C grep -P "\x01_LIBHFUZZ_NETDRIVER_BINARY_SIGNATURE_\x02\xFF" "$FUZZER" ) ]]; then
|
| 172 |
+
# Honggfuzz Netdriver port. This must match the port in Clusterfuzz.
|
| 173 |
+
export HFND_TCP_PORT=8666
|
| 174 |
+
fi
|
| 175 |
+
elif [[ "$FUZZING_ENGINE" = centipede ]]; then
|
| 176 |
+
|
| 177 |
+
# Create the work and corpus directory for Centipede.
|
| 178 |
+
CENTIPEDE_WORKDIR="${CENTIPEDE_WORKDIR:-$OUT}"
|
| 179 |
+
|
| 180 |
+
# Centipede only saves crashes to crashes/ in workdir.
|
| 181 |
+
rm -rf $FUZZER_OUT
|
| 182 |
+
|
| 183 |
+
# --workdir: Dir that stores corpus&features in Centipede's own format.
|
| 184 |
+
# --corpus_dir: Location of the initial (and destination) file corpus.
|
| 185 |
+
# --fork_server: Execute the target(s) via the fork server.
|
| 186 |
+
# Run in fork mode to continue fuzzing indefinitely in case of
|
| 187 |
+
# OOMs, timeouts, and crashes.
|
| 188 |
+
# --exit_on_crash=0: Do not exit on crash.
|
| 189 |
+
# --timeout=1200: The process that executes target binary will abort
|
| 190 |
+
# if an input runs >= 1200s.
|
| 191 |
+
# --rss_limit_mb=0: Disable RSS limit.
|
| 192 |
+
# --address_space_limit_mb=0: No address space limit.
|
| 193 |
+
# --binary: The target binary under test without sanitizer.
|
| 194 |
+
# --extra_binary: The target binaries under test with sanitizers.
|
| 195 |
+
CMD_LINE="$OUT/centipede --workdir=$CENTIPEDE_WORKDIR --corpus_dir=\"$CORPUS_DIR\" --fork_server=1 --exit_on_crash=1 --timeout=1200 --rss_limit_mb=4096 --address_space_limit_mb=5120 $(get_dictionary) --binary=\"$OUT/${FUZZER}\" $(get_extra_binaries) $*"
|
| 196 |
+
else
|
| 197 |
+
|
| 198 |
+
CMD_LINE="$OUT/$FUZZER $FUZZER_ARGS $*"
|
| 199 |
+
|
| 200 |
+
if [ -z ${SKIP_SEED_CORPUS:-} ]; then
|
| 201 |
+
CMD_LINE="$CMD_LINE $CORPUS_DIR"
|
| 202 |
+
fi
|
| 203 |
+
|
| 204 |
+
if [[ ! -z ${CUSTOM_LIBFUZZER_OPTIONS} ]]; then
|
| 205 |
+
CMD_LINE="$CMD_LINE $CUSTOM_LIBFUZZER_OPTIONS"
|
| 206 |
+
fi
|
| 207 |
+
|
| 208 |
+
if [[ ! "$CMD_LINE" =~ "-dict=" ]]; then
|
| 209 |
+
if [ -f "$FUZZER.dict" ]; then
|
| 210 |
+
CMD_LINE="$CMD_LINE -dict=$FUZZER.dict"
|
| 211 |
+
fi
|
| 212 |
+
fi
|
| 213 |
+
|
| 214 |
+
CMD_LINE="$CMD_LINE < /dev/null"
|
| 215 |
+
|
| 216 |
+
fi
|
| 217 |
+
|
| 218 |
+
echo $CMD_LINE
|
| 219 |
+
|
| 220 |
+
# Unset OUT so the fuzz target can't rely on it.
|
| 221 |
+
unset OUT
|
| 222 |
+
|
| 223 |
+
if [ ! -z "$DEBUGGER" ]; then
|
| 224 |
+
CMD_LINE="$DEBUGGER $CMD_LINE"
|
| 225 |
+
fi
|
| 226 |
+
|
| 227 |
+
bash -c "$CMD_LINE"
|
| 228 |
+
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/ruzzy
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
ASAN_OPTIONS="allocator_may_return_null=1:detect_leaks=0:use_sigaltstack=0" LD_PRELOAD=$(ruby -e 'require "ruzzy"; print Ruzzy::ASAN_PATH') \
|
| 19 |
+
ruby $@
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/targets_list
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
for binary in $(find $OUT/ -executable -type f); do
|
| 4 |
+
[[ "$binary" != *.so ]] || continue
|
| 5 |
+
[[ $(basename "$binary") != jazzer_driver* ]] || continue
|
| 6 |
+
file "$binary" | grep -e ELF -e "shell script" > /dev/null 2>&1 || continue
|
| 7 |
+
grep "LLVMFuzzerTestOneInput" "$binary" > /dev/null 2>&1 || continue
|
| 8 |
+
|
| 9 |
+
basename "$binary"
|
| 10 |
+
done
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/base-images/base-runner/test_one.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Does bad_build_check on a fuzz target in $OUT."""
|
| 18 |
+
import os
|
| 19 |
+
import sys
|
| 20 |
+
|
| 21 |
+
import test_all
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def test_one(fuzz_target):
|
| 25 |
+
"""Does bad_build_check on one fuzz target. Returns True on success."""
|
| 26 |
+
with test_all.use_different_out_dir():
|
| 27 |
+
fuzz_target_path = os.path.join(os.environ['OUT'], fuzz_target)
|
| 28 |
+
result = test_all.do_bad_build_check(fuzz_target_path)
|
| 29 |
+
if result.returncode != 0:
|
| 30 |
+
sys.stdout.buffer.write(result.stdout + result.stderr + b'\n')
|
| 31 |
+
return False
|
| 32 |
+
return True
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def main():
|
| 36 |
+
"""Does bad_build_check on one fuzz target. Returns 1 on failure, 0 on
|
| 37 |
+
success."""
|
| 38 |
+
if len(sys.argv) != 2:
|
| 39 |
+
print('Usage: %d <fuzz_target>', sys.argv[0])
|
| 40 |
+
return 1
|
| 41 |
+
|
| 42 |
+
fuzz_target_binary = sys.argv[1]
|
| 43 |
+
return 0 if test_one(fuzz_target_binary) else 1
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
if __name__ == '__main__':
|
| 47 |
+
sys.exit(main())
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/actions/build_fuzzers/action.yml
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# action.yml
|
| 2 |
+
name: 'build-fuzzers'
|
| 3 |
+
description: "Builds an OSS-Fuzz project's fuzzers."
|
| 4 |
+
inputs:
|
| 5 |
+
oss-fuzz-project-name:
|
| 6 |
+
description: 'Name of the corresponding OSS-Fuzz project.'
|
| 7 |
+
required: true
|
| 8 |
+
language:
|
| 9 |
+
description: 'Programming language project is written in.'
|
| 10 |
+
required: false
|
| 11 |
+
default: 'c++'
|
| 12 |
+
dry-run:
|
| 13 |
+
description: 'If set, run the action without actually reporting a failure.'
|
| 14 |
+
default: false
|
| 15 |
+
allowed-broken-targets-percentage:
|
| 16 |
+
description: 'The percentage of broken targets allowed in bad_build_check.'
|
| 17 |
+
required: false
|
| 18 |
+
sanitizer:
|
| 19 |
+
description: 'The sanitizer to build the fuzzers with.'
|
| 20 |
+
default: 'address'
|
| 21 |
+
architecture:
|
| 22 |
+
description: 'The architecture used to build the fuzzers.'
|
| 23 |
+
default: 'x86_64'
|
| 24 |
+
project-src-path:
|
| 25 |
+
description: "The path to the project's source code checkout."
|
| 26 |
+
required: false
|
| 27 |
+
bad-build-check:
|
| 28 |
+
description: "Whether or not OSS-Fuzz's check for bad builds should be done."
|
| 29 |
+
required: false
|
| 30 |
+
default: true
|
| 31 |
+
keep-unaffected-fuzz-targets:
|
| 32 |
+
description: "Whether to keep unaffected fuzzers or delete them."
|
| 33 |
+
required: false
|
| 34 |
+
default: false
|
| 35 |
+
output-sarif:
|
| 36 |
+
description: "Whether to output fuzzing results to SARIF."
|
| 37 |
+
required: false
|
| 38 |
+
default: false
|
| 39 |
+
runs:
|
| 40 |
+
using: 'docker'
|
| 41 |
+
image: '../../../build_fuzzers.Dockerfile'
|
| 42 |
+
env:
|
| 43 |
+
OSS_FUZZ_PROJECT_NAME: ${{ inputs.oss-fuzz-project-name }}
|
| 44 |
+
LANGUAGE: ${{ inputs.language }}
|
| 45 |
+
DRY_RUN: ${{ inputs.dry-run}}
|
| 46 |
+
ALLOWED_BROKEN_TARGETS_PERCENTAGE: ${{ inputs.allowed-broken-targets-percentage}}
|
| 47 |
+
SANITIZER: ${{ inputs.sanitizer }}
|
| 48 |
+
ARCHITECTURE: ${{ inputs.architecture }}
|
| 49 |
+
PROJECT_SRC_PATH: ${{ inputs.project-src-path }}
|
| 50 |
+
LOW_DISK_SPACE: 'True'
|
| 51 |
+
BAD_BUILD_CHECK: ${{ inputs.bad-build-check }}
|
| 52 |
+
CIFUZZ_DEBUG: 'True'
|
| 53 |
+
CFL_PLATFORM: 'github'
|
| 54 |
+
KEEP_UNAFFECTED_FUZZ_TARGETS: ${{ inputs.keep-unaffected-fuzz-targets }}
|
| 55 |
+
OUTPUT_SARIF: ${{ inputs.output-sarif }}
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/actions/run_fuzzers/action.yml
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# action.yml
|
| 2 |
+
name: 'run-fuzzers'
|
| 3 |
+
description: 'Runs fuzz target binaries for a specified length of time.'
|
| 4 |
+
inputs:
|
| 5 |
+
oss-fuzz-project-name:
|
| 6 |
+
description: 'The OSS-Fuzz project name.'
|
| 7 |
+
required: true
|
| 8 |
+
language:
|
| 9 |
+
description: 'Programming language project is written in.'
|
| 10 |
+
required: false
|
| 11 |
+
default: 'c++'
|
| 12 |
+
fuzz-seconds:
|
| 13 |
+
description: 'The total time allotted for fuzzing in seconds.'
|
| 14 |
+
required: true
|
| 15 |
+
default: 600
|
| 16 |
+
dry-run:
|
| 17 |
+
description: 'If set, run the action without actually reporting a failure.'
|
| 18 |
+
default: false
|
| 19 |
+
sanitizer:
|
| 20 |
+
description: 'The sanitizer to run the fuzzers with.'
|
| 21 |
+
default: 'address'
|
| 22 |
+
mode:
|
| 23 |
+
description: |
|
| 24 |
+
The mode to run the fuzzers with ("code-change", "batch", "coverage", or "prune").
|
| 25 |
+
"code-change" is for fuzzing a pull request or commit.
|
| 26 |
+
"batch" is for non-interactive fuzzing of an entire project.
|
| 27 |
+
"coverage" is for coverage generation.
|
| 28 |
+
"prune" is for corpus pruning.
|
| 29 |
+
required: false
|
| 30 |
+
default: 'code-change'
|
| 31 |
+
github-token:
|
| 32 |
+
description: |
|
| 33 |
+
Token for GitHub API. WARNING: THIS SHOULD NOT BE USED IN PRODUCTION YET
|
| 34 |
+
You should use "secrets.GITHUB_TOKEN" in your workflow file, do not
|
| 35 |
+
hardcode the token.
|
| 36 |
+
TODO(https://github.com/google/oss-fuzz/pull/5841#discussion_r639393361):
|
| 37 |
+
Document locking this down.
|
| 38 |
+
required: false
|
| 39 |
+
report-unreproducible-crashes:
|
| 40 |
+
description: 'If True, then unreproducible crashes will be reported.'
|
| 41 |
+
required: false
|
| 42 |
+
default: False
|
| 43 |
+
minimize-crashes:
|
| 44 |
+
description: 'If True, reportable crashes will be minimized.'
|
| 45 |
+
required: false
|
| 46 |
+
default: False
|
| 47 |
+
parallel-fuzzing:
|
| 48 |
+
description: "Whether to use all available cores for fuzzing."
|
| 49 |
+
required: false
|
| 50 |
+
default: false
|
| 51 |
+
output-sarif:
|
| 52 |
+
description: "Whether to output fuzzing results to SARIF."
|
| 53 |
+
required: false
|
| 54 |
+
default: false
|
| 55 |
+
report-timeouts:
|
| 56 |
+
description: "Whether to report fails due to timeout."
|
| 57 |
+
required: false
|
| 58 |
+
default: true
|
| 59 |
+
report-ooms:
|
| 60 |
+
description: "Whether to report fails due to OOM."
|
| 61 |
+
required: false
|
| 62 |
+
default: true
|
| 63 |
+
runs:
|
| 64 |
+
using: 'docker'
|
| 65 |
+
image: '../../../run_fuzzers.Dockerfile'
|
| 66 |
+
env:
|
| 67 |
+
OSS_FUZZ_PROJECT_NAME: ${{ inputs.oss-fuzz-project-name }}
|
| 68 |
+
LANGUAGE: ${{ inputs.language }}
|
| 69 |
+
FUZZ_SECONDS: ${{ inputs.fuzz-seconds }}
|
| 70 |
+
DRY_RUN: ${{ inputs.dry-run}}
|
| 71 |
+
SANITIZER: ${{ inputs.sanitizer }}
|
| 72 |
+
MODE: ${{ inputs.mode }}
|
| 73 |
+
GITHUB_TOKEN: ${{ inputs.github-token }}
|
| 74 |
+
LOW_DISK_SPACE: 'True'
|
| 75 |
+
REPORT_UNREPRODUCIBLE_CRASHES: ${{ inputs.report-unreproducible-crashes }}
|
| 76 |
+
MINIMIZE_CRASHES: ${{ inputs.minimize-crashes }}
|
| 77 |
+
CIFUZZ_DEBUG: 'True'
|
| 78 |
+
CFL_PLATFORM: 'github'
|
| 79 |
+
PARALLEL_FUZZING: ${{ inputs.parallel-fuzzing }}
|
| 80 |
+
OUTPUT_SARIF: ${{ inputs.output-sarif }}
|
| 81 |
+
REPORT_TIMEOUTS: ${{ inputs.report-timeouts }}
|
| 82 |
+
REPORT_OOMS: ${{ inputs.report-ooms}}
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/cifuzz-base/Dockerfile
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2020 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
|
| 17 |
+
FROM ghcr.io/aixcc-finals/base-runner
|
| 18 |
+
|
| 19 |
+
RUN apt-get update && \
|
| 20 |
+
apt-get install -y systemd && \
|
| 21 |
+
wget https://download.docker.com/linux/ubuntu/dists/focal/pool/stable/amd64/docker-ce-cli_20.10.8~3-0~ubuntu-focal_amd64.deb -O /tmp/docker-ce.deb && \
|
| 22 |
+
dpkg -i /tmp/docker-ce.deb && \
|
| 23 |
+
rm /tmp/docker-ce.deb
|
| 24 |
+
|
| 25 |
+
ENV PATH=/opt/gcloud/google-cloud-sdk/bin/:$PATH
|
| 26 |
+
ENV OSS_FUZZ_ROOT=/opt/oss-fuzz
|
| 27 |
+
|
| 28 |
+
# Do this step before copying to make rebuilding faster when developing.
|
| 29 |
+
COPY ./infra/cifuzz/requirements.txt /tmp/requirements.txt
|
| 30 |
+
RUN python3 -m pip install -r /tmp/requirements.txt && rm /tmp/requirements.txt
|
| 31 |
+
|
| 32 |
+
ADD . ${OSS_FUZZ_ROOT}
|
| 33 |
+
# Don't use the default npm location since jazzer.js can break us.
|
| 34 |
+
# This means javascript needed by cifuzz/clusterfuzzlite must be executed in
|
| 35 |
+
# OSS_FUZZ_ROOT.
|
| 36 |
+
RUN cd ${OSS_FUZZ_ROOT} && npm install ${OSS_FUZZ_ROOT}/infra/cifuzz
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
ENV PYTHONUNBUFFERED=1
|
| 40 |
+
|
| 41 |
+
# Python file to execute when the docker container starts up.
|
| 42 |
+
# We can't use the env var $OSS_FUZZ_ROOT here. Since it's a constant env var,
|
| 43 |
+
# just expand to '/opt/oss-fuzz'.
|
| 44 |
+
ENTRYPOINT ["python3", "/opt/oss-fuzz/infra/cifuzz/cifuzz_combined_entrypoint.py"]
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/external-actions/build_fuzzers/action.yml
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# action.yml
|
| 2 |
+
name: 'build-fuzzers'
|
| 3 |
+
description: "Builds an OSS-Fuzz project's fuzzers."
|
| 4 |
+
inputs:
|
| 5 |
+
language:
|
| 6 |
+
description: 'Programming language project is written in.'
|
| 7 |
+
required: false
|
| 8 |
+
default: 'c++'
|
| 9 |
+
dry-run:
|
| 10 |
+
description: 'If set, run the action without actually reporting a failure.'
|
| 11 |
+
default: false
|
| 12 |
+
allowed-broken-targets-percentage:
|
| 13 |
+
description: 'The percentage of broken targets allowed in bad_build_check.'
|
| 14 |
+
required: false
|
| 15 |
+
sanitizer:
|
| 16 |
+
description: 'The sanitizer to build the fuzzers with.'
|
| 17 |
+
default: 'address'
|
| 18 |
+
project-src-path:
|
| 19 |
+
description: "The path to the project's source code checkout."
|
| 20 |
+
required: false
|
| 21 |
+
bad-build-check:
|
| 22 |
+
description: "Whether or not OSS-Fuzz's check for bad builds should be done."
|
| 23 |
+
required: false
|
| 24 |
+
default: true
|
| 25 |
+
keep-unaffected-fuzz-targets:
|
| 26 |
+
description: "Whether to keep unaffected fuzzers or delete them."
|
| 27 |
+
required: false
|
| 28 |
+
default: false
|
| 29 |
+
storage-repo:
|
| 30 |
+
description: |
|
| 31 |
+
The git repo to use for storing certain artifacts from fuzzing.
|
| 32 |
+
required: false
|
| 33 |
+
storage-repo-branch:
|
| 34 |
+
description: |
|
| 35 |
+
The branch of the git repo to use for storing certain artifacts from
|
| 36 |
+
fuzzing.
|
| 37 |
+
required: false
|
| 38 |
+
storage-repo-branch-coverage:
|
| 39 |
+
description: |
|
| 40 |
+
The branch of the git repo to use for storing coverage reports.
|
| 41 |
+
required: false
|
| 42 |
+
upload-build:
|
| 43 |
+
description: |
|
| 44 |
+
If set, will upload the build.
|
| 45 |
+
default: false
|
| 46 |
+
github-token:
|
| 47 |
+
description: |
|
| 48 |
+
Token for GitHub API. WARNING: THIS SHOULD NOT BE USED IN PRODUCTION YET
|
| 49 |
+
You should use "secrets.GITHUB_TOKEN" in your workflow file, do not
|
| 50 |
+
hardcode the token.
|
| 51 |
+
TODO(https://github.com/google/oss-fuzz/pull/5841#discussion_r639393361):
|
| 52 |
+
Document locking this down.
|
| 53 |
+
required: false
|
| 54 |
+
output-sarif:
|
| 55 |
+
description: "Whether to output fuzzing results to SARIF."
|
| 56 |
+
required: false
|
| 57 |
+
default: false
|
| 58 |
+
runs:
|
| 59 |
+
using: 'docker'
|
| 60 |
+
image: '../../../build_fuzzers.Dockerfile'
|
| 61 |
+
env:
|
| 62 |
+
OSS_FUZZ_PROJECT_NAME: ${{ inputs.oss-fuzz-project-name }}
|
| 63 |
+
LANGUAGE: ${{ inputs.language }}
|
| 64 |
+
DRY_RUN: ${{ inputs.dry-run}}
|
| 65 |
+
ALLOWED_BROKEN_TARGETS_PERCENTAGE: ${{ inputs.allowed-broken-targets-percentage}}
|
| 66 |
+
SANITIZER: ${{ inputs.sanitizer }}
|
| 67 |
+
PROJECT_SRC_PATH: ${{ inputs.project-src-path }}
|
| 68 |
+
GITHUB_TOKEN: ${{ inputs.github-token }}
|
| 69 |
+
LOW_DISK_SPACE: 'True'
|
| 70 |
+
BAD_BUILD_CHECK: ${{ inputs.bad-build-check }}
|
| 71 |
+
UPLOAD_BUILD: ${{ inputs.upload-build }}
|
| 72 |
+
CIFUZZ_DEBUG: 'True'
|
| 73 |
+
CFL_PLATFORM: 'github'
|
| 74 |
+
KEEP_UNAFFECTED_FUZZ_TARGETS: ${{ inputs.keep-unaffected-fuzz-targets }}
|
| 75 |
+
OUTPUT_SARIF: ${{ inputs.output-sarif }}
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/external-actions/run_fuzzers/action.yml
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# action.yml
|
| 2 |
+
name: 'run-fuzzers'
|
| 3 |
+
description: 'Runs fuzz target binaries for a specified length of time.'
|
| 4 |
+
inputs:
|
| 5 |
+
language:
|
| 6 |
+
description: 'Programming language project is written in.'
|
| 7 |
+
required: false
|
| 8 |
+
default: 'c++'
|
| 9 |
+
fuzz-seconds:
|
| 10 |
+
description: 'The total time allotted for fuzzing in seconds.'
|
| 11 |
+
required: true
|
| 12 |
+
default: 600
|
| 13 |
+
dry-run:
|
| 14 |
+
description: 'If set, run the action without actually reporting a failure.'
|
| 15 |
+
default: false
|
| 16 |
+
sanitizer:
|
| 17 |
+
description: 'The sanitizer to run the fuzzers with.'
|
| 18 |
+
default: 'address'
|
| 19 |
+
mode:
|
| 20 |
+
description: |
|
| 21 |
+
The mode to run the fuzzers with ("code-change", "batch", "coverage", or "prune").
|
| 22 |
+
"code-change" is for fuzzing a pull request or commit.
|
| 23 |
+
"batch" is for non-interactive fuzzing of an entire project.
|
| 24 |
+
"coverage" is for coverage generation.
|
| 25 |
+
"prune" is for corpus pruning.
|
| 26 |
+
required: false
|
| 27 |
+
default: 'code-change'
|
| 28 |
+
github-token:
|
| 29 |
+
description: |
|
| 30 |
+
Token for GitHub API. WARNING: THIS SHOULD NOT BE USED IN PRODUCTION YET
|
| 31 |
+
You should use "secrets.GITHUB_TOKEN" in your workflow file, do not
|
| 32 |
+
hardcode the token.
|
| 33 |
+
TODO(https://github.com/google/oss-fuzz/pull/5841#discussion_r639393361):
|
| 34 |
+
Document locking this down.
|
| 35 |
+
required: true
|
| 36 |
+
storage-repo:
|
| 37 |
+
description: |
|
| 38 |
+
The git repo to use for storing certain artifacts from fuzzing.
|
| 39 |
+
required: false
|
| 40 |
+
storage-repo-branch:
|
| 41 |
+
description: |
|
| 42 |
+
The branch of the git repo to use for storing certain artifacts from
|
| 43 |
+
fuzzing.
|
| 44 |
+
default: main
|
| 45 |
+
required: false
|
| 46 |
+
storage-repo-branch-coverage:
|
| 47 |
+
description: |
|
| 48 |
+
The branch of the git repo to use for storing coverage reports.
|
| 49 |
+
default: gh-pages
|
| 50 |
+
required: false
|
| 51 |
+
report-unreproducible-crashes:
|
| 52 |
+
description: 'If True, then unreproducible crashes will be reported.'
|
| 53 |
+
required: false
|
| 54 |
+
default: false
|
| 55 |
+
minimize-crashes:
|
| 56 |
+
description: 'If True, reportable crashes will be minimized.'
|
| 57 |
+
required: false
|
| 58 |
+
default: False
|
| 59 |
+
parallel-fuzzing:
|
| 60 |
+
description: "Whether to use all available cores for fuzzing."
|
| 61 |
+
required: false
|
| 62 |
+
default: false
|
| 63 |
+
output-sarif:
|
| 64 |
+
description: "Whether to output fuzzing results to SARIF."
|
| 65 |
+
required: false
|
| 66 |
+
default: false
|
| 67 |
+
report-timeouts:
|
| 68 |
+
description: "Whether to report fails due to timeout."
|
| 69 |
+
required: false
|
| 70 |
+
default: true
|
| 71 |
+
report-ooms:
|
| 72 |
+
description: "Whether to report fails due to OOM."
|
| 73 |
+
required: false
|
| 74 |
+
default: true
|
| 75 |
+
runs:
|
| 76 |
+
using: 'docker'
|
| 77 |
+
image: '../../../run_fuzzers.Dockerfile'
|
| 78 |
+
env:
|
| 79 |
+
OSS_FUZZ_PROJECT_NAME: ${{ inputs.oss-fuzz-project-name }}
|
| 80 |
+
LANGUAGE: ${{ inputs.language }}
|
| 81 |
+
FUZZ_SECONDS: ${{ inputs.fuzz-seconds }}
|
| 82 |
+
DRY_RUN: ${{ inputs.dry-run}}
|
| 83 |
+
SANITIZER: ${{ inputs.sanitizer }}
|
| 84 |
+
MODE: ${{ inputs.mode }}
|
| 85 |
+
GITHUB_TOKEN: ${{ inputs.github-token }}
|
| 86 |
+
LOW_DISK_SPACE: 'True'
|
| 87 |
+
GIT_STORE_REPO: ${{ inputs.storage-repo }}
|
| 88 |
+
GIT_STORE_BRANCH: ${{ inputs.storage-repo-branch }}
|
| 89 |
+
GIT_STORE_BRANCH_COVERAGE: ${{ inputs.storage-repo-branch-coverage }}
|
| 90 |
+
REPORT_UNREPRODUCIBLE_CRASHES: ${{ inputs.report-unreproducible-crashes }}
|
| 91 |
+
MINIMIZE_CRASHES: ${{ inputs.minimize-crashes }}
|
| 92 |
+
CIFUZZ_DEBUG: 'True'
|
| 93 |
+
CFL_PLATFORM: 'github'
|
| 94 |
+
PARALLEL_FUZZING: ${{ inputs.parallel-fuzzing }}
|
| 95 |
+
OUTPUT_SARIF: ${{ inputs.output-sarif }}
|
| 96 |
+
REPORT_TIMEOUTS: ${{ inputs.report-timeouts }}
|
| 97 |
+
REPORT_OOMS: ${{ inputs.report-ooms}}
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/__init__.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Module for a generic filestore."""
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class FilestoreError(Exception):
|
| 18 |
+
"""Error using the filestore."""
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
# pylint: disable=unused-argument,no-self-use
|
| 22 |
+
class BaseFilestore:
|
| 23 |
+
"""Base class for a filestore."""
|
| 24 |
+
|
| 25 |
+
def __init__(self, config):
|
| 26 |
+
self.config = config
|
| 27 |
+
|
| 28 |
+
def upload_crashes(self, name, directory):
|
| 29 |
+
"""Uploads the crashes at |directory| to |name|."""
|
| 30 |
+
raise NotImplementedError('Child class must implement method.')
|
| 31 |
+
|
| 32 |
+
def upload_corpus(self, name, directory, replace=False):
|
| 33 |
+
"""Uploads the corpus at |directory| to |name|."""
|
| 34 |
+
raise NotImplementedError('Child class must implement method.')
|
| 35 |
+
|
| 36 |
+
def upload_build(self, name, directory):
|
| 37 |
+
"""Uploads the build at |directory| to |name|."""
|
| 38 |
+
raise NotImplementedError('Child class must implement method.')
|
| 39 |
+
|
| 40 |
+
def upload_coverage(self, name, directory):
|
| 41 |
+
"""Uploads the coverage report at |directory| to |name|."""
|
| 42 |
+
raise NotImplementedError('Child class must implement method.')
|
| 43 |
+
|
| 44 |
+
def download_corpus(self, name, dst_directory):
|
| 45 |
+
"""Downloads the corpus located at |name| to |dst_directory|."""
|
| 46 |
+
raise NotImplementedError('Child class must implement method.')
|
| 47 |
+
|
| 48 |
+
def download_build(self, name, dst_directory):
|
| 49 |
+
"""Downloads the build with |name| to |dst_directory|."""
|
| 50 |
+
raise NotImplementedError('Child class must implement method.')
|
| 51 |
+
|
| 52 |
+
def download_coverage(self, name, dst_directory):
|
| 53 |
+
"""Downloads the latest project coverage report."""
|
| 54 |
+
raise NotImplementedError('Child class must implement method.')
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/filesystem/__init__.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2022 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Filestore implementation using a filesystem directory."""
|
| 15 |
+
import logging
|
| 16 |
+
import os
|
| 17 |
+
import shutil
|
| 18 |
+
import subprocess
|
| 19 |
+
import sys
|
| 20 |
+
|
| 21 |
+
from distutils import dir_util
|
| 22 |
+
|
| 23 |
+
# pylint: disable=wrong-import-position,import-error
|
| 24 |
+
sys.path.append(
|
| 25 |
+
os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir,
|
| 26 |
+
os.pardir, os.pardir))
|
| 27 |
+
import filestore
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def recursive_list_dir(directory):
|
| 31 |
+
"""Returns list of all files in |directory|, including those in
|
| 32 |
+
subdirectories."""
|
| 33 |
+
files = []
|
| 34 |
+
for root, _, filenames in os.walk(directory):
|
| 35 |
+
for filename in filenames:
|
| 36 |
+
files.append(os.path.join(root, filename))
|
| 37 |
+
return files
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class FilesystemFilestore(filestore.BaseFilestore):
|
| 41 |
+
"""Filesystem implementation using a filesystem directory."""
|
| 42 |
+
BUILD_DIR = 'build'
|
| 43 |
+
CRASHES_DIR = 'crashes'
|
| 44 |
+
CORPUS_DIR = 'corpus'
|
| 45 |
+
COVERAGE_DIR = 'coverage'
|
| 46 |
+
|
| 47 |
+
def __init__(self, config):
|
| 48 |
+
super().__init__(config)
|
| 49 |
+
self._filestore_root_dir = self.config.platform_conf.filestore_root_dir
|
| 50 |
+
|
| 51 |
+
def _get_filestore_path(self, name, prefix_dir):
|
| 52 |
+
"""Returns the filesystem path in the filestore for |name| and
|
| 53 |
+
|prefix_dir|."""
|
| 54 |
+
return os.path.join(self._filestore_root_dir, prefix_dir, name)
|
| 55 |
+
|
| 56 |
+
def _upload_directory(self, name, directory, prefix, delete=False):
|
| 57 |
+
filestore_path = self._get_filestore_path(name, prefix)
|
| 58 |
+
if os.path.exists(filestore_path):
|
| 59 |
+
initial_files = set(recursive_list_dir(filestore_path))
|
| 60 |
+
else:
|
| 61 |
+
initial_files = set()
|
| 62 |
+
|
| 63 |
+
# Make directory and any parents.
|
| 64 |
+
os.makedirs(filestore_path, exist_ok=True)
|
| 65 |
+
copied_files = set(dir_util.copy_tree(directory, filestore_path))
|
| 66 |
+
if not delete:
|
| 67 |
+
return True
|
| 68 |
+
|
| 69 |
+
files_to_delete = initial_files - copied_files
|
| 70 |
+
for file_path in files_to_delete:
|
| 71 |
+
os.remove(file_path)
|
| 72 |
+
return True
|
| 73 |
+
|
| 74 |
+
def _download_directory(self, name, dst_directory, prefix):
|
| 75 |
+
filestore_path = self._get_filestore_path(name, prefix)
|
| 76 |
+
return dir_util.copy_tree(filestore_path, dst_directory)
|
| 77 |
+
|
| 78 |
+
def upload_crashes(self, name, directory):
|
| 79 |
+
"""Uploads the crashes at |directory| to |name|."""
|
| 80 |
+
return self._upload_directory(name, directory, self.CRASHES_DIR)
|
| 81 |
+
|
| 82 |
+
def upload_corpus(self, name, directory, replace=False):
|
| 83 |
+
"""Uploads the crashes at |directory| to |name|."""
|
| 84 |
+
return self._upload_directory(name,
|
| 85 |
+
directory,
|
| 86 |
+
self.CORPUS_DIR,
|
| 87 |
+
delete=replace)
|
| 88 |
+
|
| 89 |
+
def upload_build(self, name, directory):
|
| 90 |
+
"""Uploads the build located at |directory| to |name|."""
|
| 91 |
+
return self._upload_directory(name, directory, self.BUILD_DIR)
|
| 92 |
+
|
| 93 |
+
def upload_coverage(self, name, directory):
|
| 94 |
+
"""Uploads the coverage report at |directory| to |name|."""
|
| 95 |
+
return self._upload_directory(name, directory, self.COVERAGE_DIR)
|
| 96 |
+
|
| 97 |
+
def download_corpus(self, name, dst_directory):
|
| 98 |
+
"""Downloads the corpus located at |name| to |dst_directory|."""
|
| 99 |
+
return self._download_directory(name, dst_directory, self.CORPUS_DIR)
|
| 100 |
+
|
| 101 |
+
def download_build(self, name, dst_directory):
|
| 102 |
+
"""Downloads the build with |name| to |dst_directory|."""
|
| 103 |
+
return self._download_directory(name, dst_directory, self.BUILD_DIR)
|
| 104 |
+
|
| 105 |
+
def download_coverage(self, name, dst_directory):
|
| 106 |
+
"""Downloads the latest project coverage report."""
|
| 107 |
+
return self._download_directory(name, dst_directory, self.COVERAGE_DIR)
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/git/__init__.py
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Module for a git based filestore."""
|
| 15 |
+
|
| 16 |
+
import logging
|
| 17 |
+
import os
|
| 18 |
+
import shutil
|
| 19 |
+
import subprocess
|
| 20 |
+
import sys
|
| 21 |
+
import tempfile
|
| 22 |
+
|
| 23 |
+
import filestore
|
| 24 |
+
|
| 25 |
+
# pylint: disable=wrong-import-position
|
| 26 |
+
INFRA_DIR = os.path.dirname(
|
| 27 |
+
os.path.dirname(os.path.dirname(os.path.dirname(
|
| 28 |
+
os.path.abspath(__file__)))))
|
| 29 |
+
sys.path.append(INFRA_DIR)
|
| 30 |
+
|
| 31 |
+
import retry
|
| 32 |
+
|
| 33 |
+
_PUSH_RETRIES = 3
|
| 34 |
+
_PUSH_BACKOFF = 1
|
| 35 |
+
_GIT_EMAIL = 'cifuzz@clusterfuzz.com'
|
| 36 |
+
_GIT_NAME = 'CIFuzz'
|
| 37 |
+
_CORPUS_DIR = 'corpus'
|
| 38 |
+
_COVERAGE_DIR = 'coverage'
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def git_runner(repo_path):
|
| 42 |
+
"""Returns a gits runner for the repo_path."""
|
| 43 |
+
|
| 44 |
+
def func(*args):
|
| 45 |
+
return subprocess.check_call(('git', '-C', repo_path) + args)
|
| 46 |
+
|
| 47 |
+
return func
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
# pylint: disable=unused-argument,no-self-use
|
| 51 |
+
class GitFilestore(filestore.BaseFilestore):
|
| 52 |
+
"""Generic git filestore. This still relies on another filestore provided by
|
| 53 |
+
the CI for larger artifacts or artifacts which make sense to be included as
|
| 54 |
+
the result of a workflow run."""
|
| 55 |
+
|
| 56 |
+
def __init__(self, config, ci_filestore):
|
| 57 |
+
super().__init__(config)
|
| 58 |
+
self.repo_path = tempfile.mkdtemp()
|
| 59 |
+
self._git = git_runner(self.repo_path)
|
| 60 |
+
self._clone(self.config.git_store_repo)
|
| 61 |
+
|
| 62 |
+
self._ci_filestore = ci_filestore
|
| 63 |
+
|
| 64 |
+
def __del__(self):
|
| 65 |
+
shutil.rmtree(self.repo_path)
|
| 66 |
+
|
| 67 |
+
def _clone(self, repo_url):
|
| 68 |
+
"""Clones repo URL."""
|
| 69 |
+
self._git('clone', repo_url, '.')
|
| 70 |
+
self._git('config', '--local', 'user.email', _GIT_EMAIL)
|
| 71 |
+
self._git('config', '--local', 'user.name', _GIT_NAME)
|
| 72 |
+
|
| 73 |
+
def _reset_git(self, branch):
|
| 74 |
+
"""Resets the git repo."""
|
| 75 |
+
self._git('fetch', 'origin')
|
| 76 |
+
try:
|
| 77 |
+
self._git('checkout', '-B', branch, 'origin/' + branch)
|
| 78 |
+
self._git('reset', '--hard', 'HEAD')
|
| 79 |
+
except subprocess.CalledProcessError:
|
| 80 |
+
self._git('checkout', '--orphan', branch)
|
| 81 |
+
|
| 82 |
+
self._git('clean', '-fxd')
|
| 83 |
+
|
| 84 |
+
# pylint: disable=too-many-arguments
|
| 85 |
+
@retry.wrap(_PUSH_RETRIES, _PUSH_BACKOFF)
|
| 86 |
+
def _upload_to_git(self,
|
| 87 |
+
message,
|
| 88 |
+
branch,
|
| 89 |
+
upload_path,
|
| 90 |
+
local_path,
|
| 91 |
+
replace=False):
|
| 92 |
+
"""Uploads a directory to git. If `replace` is True, then existing contents
|
| 93 |
+
in the upload_path is deleted."""
|
| 94 |
+
self._reset_git(branch)
|
| 95 |
+
|
| 96 |
+
full_repo_path = os.path.join(self.repo_path, upload_path)
|
| 97 |
+
if replace and os.path.exists(full_repo_path):
|
| 98 |
+
shutil.rmtree(full_repo_path)
|
| 99 |
+
|
| 100 |
+
shutil.copytree(local_path, full_repo_path, dirs_exist_ok=True)
|
| 101 |
+
self._git('add', '.')
|
| 102 |
+
try:
|
| 103 |
+
self._git('commit', '-m', message)
|
| 104 |
+
except subprocess.CalledProcessError:
|
| 105 |
+
logging.debug('No changes, skipping git push.')
|
| 106 |
+
return
|
| 107 |
+
|
| 108 |
+
self._git('push', 'origin', branch)
|
| 109 |
+
|
| 110 |
+
def upload_crashes(self, name, directory):
|
| 111 |
+
"""Uploads the crashes at |directory| to |name|."""
|
| 112 |
+
return self._ci_filestore.upload_crashes(name, directory)
|
| 113 |
+
|
| 114 |
+
def upload_corpus(self, name, directory, replace=False):
|
| 115 |
+
"""Uploads the corpus at |directory| to |name|."""
|
| 116 |
+
self._upload_to_git('Corpus upload',
|
| 117 |
+
self.config.git_store_branch,
|
| 118 |
+
os.path.join(_CORPUS_DIR, name),
|
| 119 |
+
directory,
|
| 120 |
+
replace=replace)
|
| 121 |
+
|
| 122 |
+
def upload_build(self, name, directory):
|
| 123 |
+
"""Uploads the build at |directory| to |name|."""
|
| 124 |
+
return self._ci_filestore.upload_build(name, directory)
|
| 125 |
+
|
| 126 |
+
def upload_coverage(self, name, directory):
|
| 127 |
+
"""Uploads the coverage report at |directory| to |name|."""
|
| 128 |
+
self._upload_to_git('Coverage upload',
|
| 129 |
+
self.config.git_store_branch_coverage,
|
| 130 |
+
os.path.join(_COVERAGE_DIR, name),
|
| 131 |
+
directory,
|
| 132 |
+
replace=True)
|
| 133 |
+
|
| 134 |
+
def download_corpus(self, name, dst_directory):
|
| 135 |
+
"""Downloads the corpus located at |name| to |dst_directory|."""
|
| 136 |
+
self._reset_git(self.config.git_store_branch)
|
| 137 |
+
path = os.path.join(self.repo_path, _CORPUS_DIR, name)
|
| 138 |
+
if not os.path.exists(path):
|
| 139 |
+
logging.debug('Corpus does not exist at %s.', path)
|
| 140 |
+
return False
|
| 141 |
+
|
| 142 |
+
shutil.copytree(path, dst_directory, dirs_exist_ok=True)
|
| 143 |
+
return True
|
| 144 |
+
|
| 145 |
+
def download_build(self, name, dst_directory):
|
| 146 |
+
"""Downloads the build with |name| to |dst_directory|."""
|
| 147 |
+
return self._ci_filestore.download_build(name, dst_directory)
|
| 148 |
+
|
| 149 |
+
def download_coverage(self, name, dst_directory):
|
| 150 |
+
"""Downloads the latest project coverage report."""
|
| 151 |
+
self._reset_git(self.config.git_store_branch_coverage)
|
| 152 |
+
path = os.path.join(self.repo_path, _COVERAGE_DIR, name)
|
| 153 |
+
if not os.path.exists(path):
|
| 154 |
+
logging.debug('Coverage does not exist at %s.', path)
|
| 155 |
+
return False
|
| 156 |
+
|
| 157 |
+
shutil.copytree(path, dst_directory, dirs_exist_ok=True)
|
| 158 |
+
return True
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/git/git_test.py
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Tests for git."""
|
| 15 |
+
import filecmp
|
| 16 |
+
import os
|
| 17 |
+
import tempfile
|
| 18 |
+
import subprocess
|
| 19 |
+
import sys
|
| 20 |
+
import unittest
|
| 21 |
+
from unittest import mock
|
| 22 |
+
|
| 23 |
+
# pylint: disable=wrong-import-position
|
| 24 |
+
INFRA_DIR = os.path.dirname(
|
| 25 |
+
os.path.dirname(os.path.dirname(os.path.dirname(
|
| 26 |
+
os.path.abspath(__file__)))))
|
| 27 |
+
sys.path.append(INFRA_DIR)
|
| 28 |
+
|
| 29 |
+
from filestore import git
|
| 30 |
+
import test_helpers
|
| 31 |
+
|
| 32 |
+
# pylint: disable=protected-access,no-self-use
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class GitFilestoreTest(unittest.TestCase):
|
| 36 |
+
"""Tests for GitFilestore."""
|
| 37 |
+
|
| 38 |
+
def setUp(self):
|
| 39 |
+
self.git_dir = tempfile.TemporaryDirectory()
|
| 40 |
+
self.addCleanup(self.git_dir.cleanup)
|
| 41 |
+
|
| 42 |
+
self.local_dir = tempfile.TemporaryDirectory()
|
| 43 |
+
self.addCleanup(self.local_dir.cleanup)
|
| 44 |
+
|
| 45 |
+
self.download_dir = tempfile.TemporaryDirectory()
|
| 46 |
+
self.addCleanup(self.download_dir.cleanup)
|
| 47 |
+
|
| 48 |
+
with open(os.path.join(self.local_dir.name, 'a'), 'w') as handle:
|
| 49 |
+
handle.write('')
|
| 50 |
+
|
| 51 |
+
os.makedirs(os.path.join(self.local_dir.name, 'b'))
|
| 52 |
+
|
| 53 |
+
with open(os.path.join(self.local_dir.name, 'b', 'c'), 'w') as handle:
|
| 54 |
+
handle.write('')
|
| 55 |
+
|
| 56 |
+
self.git_repo = git.git_runner(self.git_dir.name)
|
| 57 |
+
self.git_repo('init', '--bare')
|
| 58 |
+
|
| 59 |
+
self.config = test_helpers.create_run_config(
|
| 60 |
+
git_store_repo='file://' + self.git_dir.name,
|
| 61 |
+
git_store_branch='main',
|
| 62 |
+
git_store_branch_coverage='cov-branch')
|
| 63 |
+
|
| 64 |
+
self.mock_ci_filestore = mock.MagicMock()
|
| 65 |
+
self.git_store = git.GitFilestore(self.config, self.mock_ci_filestore)
|
| 66 |
+
|
| 67 |
+
def assert_dirs_same(self, first, second):
|
| 68 |
+
"""Asserts two dirs are the same."""
|
| 69 |
+
dcmp = filecmp.dircmp(first, second)
|
| 70 |
+
if dcmp.diff_files or dcmp.left_only or dcmp.right_only:
|
| 71 |
+
return False
|
| 72 |
+
|
| 73 |
+
return all(
|
| 74 |
+
self.assert_dirs_same(os.path.join(first, subdir),
|
| 75 |
+
os.path.join(second, subdir))
|
| 76 |
+
for subdir in dcmp.common_dirs)
|
| 77 |
+
|
| 78 |
+
def get_repo_filelist(self, branch):
|
| 79 |
+
"""Get files in repo."""
|
| 80 |
+
return subprocess.check_output([
|
| 81 |
+
'git', '-C', self.git_dir.name, 'ls-tree', '-r', '--name-only', branch
|
| 82 |
+
]).decode().splitlines()
|
| 83 |
+
|
| 84 |
+
def test_upload_download_corpus(self):
|
| 85 |
+
"""Tests uploading and downloading corpus."""
|
| 86 |
+
self.git_store.upload_corpus('target', self.local_dir.name)
|
| 87 |
+
self.git_store.download_corpus('target', self.download_dir.name)
|
| 88 |
+
self.assert_dirs_same(self.local_dir.name, self.download_dir.name)
|
| 89 |
+
|
| 90 |
+
self.assertCountEqual([
|
| 91 |
+
'corpus/target/a',
|
| 92 |
+
'corpus/target/b/c',
|
| 93 |
+
], self.get_repo_filelist('main'))
|
| 94 |
+
|
| 95 |
+
def test_upload_download_coverage(self):
|
| 96 |
+
"""Tests uploading and downloading corpus."""
|
| 97 |
+
self.git_store.upload_coverage('latest', self.local_dir.name)
|
| 98 |
+
self.git_store.download_coverage('latest', self.download_dir.name)
|
| 99 |
+
self.assert_dirs_same(self.local_dir.name, self.download_dir.name)
|
| 100 |
+
|
| 101 |
+
self.assertCountEqual([
|
| 102 |
+
'coverage/latest/a',
|
| 103 |
+
'coverage/latest/b/c',
|
| 104 |
+
], self.get_repo_filelist('cov-branch'))
|
| 105 |
+
|
| 106 |
+
def test_upload_crashes(self):
|
| 107 |
+
"""Tests uploading crashes."""
|
| 108 |
+
self.git_store.upload_crashes('current', self.local_dir.name)
|
| 109 |
+
self.mock_ci_filestore.upload_crashes.assert_called_with(
|
| 110 |
+
'current', self.local_dir.name)
|
| 111 |
+
|
| 112 |
+
def test_upload_build(self):
|
| 113 |
+
"""Tests uploading build."""
|
| 114 |
+
self.git_store.upload_build('sanitizer', self.local_dir.name)
|
| 115 |
+
self.mock_ci_filestore.upload_build.assert_called_with(
|
| 116 |
+
'sanitizer', self.local_dir.name)
|
| 117 |
+
|
| 118 |
+
def test_download_build(self):
|
| 119 |
+
"""Tests downloading build."""
|
| 120 |
+
self.git_store.download_build('sanitizer', self.download_dir.name)
|
| 121 |
+
self.mock_ci_filestore.download_build.assert_called_with(
|
| 122 |
+
'sanitizer', self.download_dir.name)
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/github_actions/__init__.py
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Implementation of a filestore using Github actions artifacts."""
|
| 15 |
+
import logging
|
| 16 |
+
import os
|
| 17 |
+
import shutil
|
| 18 |
+
import sys
|
| 19 |
+
import tarfile
|
| 20 |
+
import tempfile
|
| 21 |
+
|
| 22 |
+
# pylint: disable=wrong-import-position,import-error
|
| 23 |
+
INFRA_DIR = os.path.abspath(
|
| 24 |
+
os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir,
|
| 25 |
+
os.path.pardir))
|
| 26 |
+
sys.path.append(INFRA_DIR)
|
| 27 |
+
OSS_FUZZ_ROOT_DIR = os.path.dirname(INFRA_DIR)
|
| 28 |
+
|
| 29 |
+
import utils
|
| 30 |
+
import http_utils
|
| 31 |
+
import filestore
|
| 32 |
+
from filestore.github_actions import github_api
|
| 33 |
+
|
| 34 |
+
UPLOAD_JS = os.path.join(os.path.dirname(__file__), 'upload.js')
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def tar_directory(directory, archive_path):
|
| 38 |
+
"""Tars a |directory| and stores archive at |archive_path|. |archive_path|
|
| 39 |
+
must end in .tar"""
|
| 40 |
+
assert archive_path.endswith('.tar')
|
| 41 |
+
# Do this because make_archive will append the extension to archive_path.
|
| 42 |
+
archive_path = os.path.splitext(archive_path)[0]
|
| 43 |
+
|
| 44 |
+
root_directory = os.path.abspath(directory)
|
| 45 |
+
shutil.make_archive(archive_path,
|
| 46 |
+
'tar',
|
| 47 |
+
root_dir=root_directory,
|
| 48 |
+
base_dir='./')
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class GithubActionsFilestore(filestore.BaseFilestore):
|
| 52 |
+
"""Implementation of BaseFilestore using Github actions artifacts. Relies on
|
| 53 |
+
github_actions_toolkit for using the GitHub actions API and the github_api
|
| 54 |
+
module for using GitHub's standard API. We need to use both because the GitHub
|
| 55 |
+
actions API is the only way to upload an artifact but it does not support
|
| 56 |
+
downloading artifacts from other runs. The standard GitHub API does support
|
| 57 |
+
this however."""
|
| 58 |
+
|
| 59 |
+
ARTIFACT_PREFIX = 'cifuzz-'
|
| 60 |
+
BUILD_PREFIX = 'build-'
|
| 61 |
+
CRASHES_PREFIX = 'crashes-'
|
| 62 |
+
CORPUS_PREFIX = 'corpus-'
|
| 63 |
+
COVERAGE_PREFIX = 'coverage-'
|
| 64 |
+
|
| 65 |
+
def __init__(self, config):
|
| 66 |
+
super().__init__(config)
|
| 67 |
+
self.github_api_http_headers = github_api.get_http_auth_headers(config)
|
| 68 |
+
|
| 69 |
+
def _get_artifact_name(self, name):
|
| 70 |
+
"""Returns |name| prefixed with |self.ARITFACT_PREFIX| if it isn't already
|
| 71 |
+
prefixed. Otherwise returns |name|."""
|
| 72 |
+
if name.startswith(self.ARTIFACT_PREFIX):
|
| 73 |
+
return name
|
| 74 |
+
return f'{self.ARTIFACT_PREFIX}{name}'
|
| 75 |
+
|
| 76 |
+
def _upload_directory(self, name, directory): # pylint: disable=no-self-use
|
| 77 |
+
"""Uploads |directory| as artifact with |name|."""
|
| 78 |
+
name = self._get_artifact_name(name)
|
| 79 |
+
with tempfile.TemporaryDirectory() as temp_dir:
|
| 80 |
+
archive_path = os.path.join(temp_dir, name + '.tar')
|
| 81 |
+
tar_directory(directory, archive_path)
|
| 82 |
+
_raw_upload_directory(name, temp_dir)
|
| 83 |
+
|
| 84 |
+
def upload_crashes(self, name, directory):
|
| 85 |
+
"""Uploads the crashes at |directory| to |name|."""
|
| 86 |
+
return _raw_upload_directory(self.CRASHES_PREFIX + name, directory)
|
| 87 |
+
|
| 88 |
+
def upload_corpus(self, name, directory, replace=False):
|
| 89 |
+
"""Uploads the corpus at |directory| to |name|."""
|
| 90 |
+
# Not applicable as the the entire corpus is uploaded under a single
|
| 91 |
+
# artifact name.
|
| 92 |
+
del replace
|
| 93 |
+
return self._upload_directory(self.CORPUS_PREFIX + name, directory)
|
| 94 |
+
|
| 95 |
+
def upload_build(self, name, directory):
|
| 96 |
+
"""Uploads the build at |directory| to |name|."""
|
| 97 |
+
return self._upload_directory(self.BUILD_PREFIX + name, directory)
|
| 98 |
+
|
| 99 |
+
def upload_coverage(self, name, directory):
|
| 100 |
+
"""Uploads the coverage report at |directory| to |name|."""
|
| 101 |
+
return self._upload_directory(self.COVERAGE_PREFIX + name, directory)
|
| 102 |
+
|
| 103 |
+
def download_corpus(self, name, dst_directory): # pylint: disable=unused-argument,no-self-use
|
| 104 |
+
"""Downloads the corpus located at |name| to |dst_directory|."""
|
| 105 |
+
return self._download_artifact(self.CORPUS_PREFIX + name, dst_directory)
|
| 106 |
+
|
| 107 |
+
def _find_artifact(self, name):
|
| 108 |
+
"""Finds an artifact using the GitHub API and returns it."""
|
| 109 |
+
logging.debug('Listing artifacts.')
|
| 110 |
+
artifacts = self._list_artifacts()
|
| 111 |
+
artifact = github_api.find_artifact(name, artifacts)
|
| 112 |
+
logging.debug('Artifact: %s.', artifact)
|
| 113 |
+
return artifact
|
| 114 |
+
|
| 115 |
+
def _download_artifact(self, name, dst_directory):
|
| 116 |
+
"""Downloads artifact with |name| to |dst_directory|. Returns True on
|
| 117 |
+
success."""
|
| 118 |
+
name = self._get_artifact_name(name)
|
| 119 |
+
|
| 120 |
+
with tempfile.TemporaryDirectory() as temp_dir:
|
| 121 |
+
if not self._raw_download_artifact(name, temp_dir):
|
| 122 |
+
logging.warning('Could not download artifact: %s.', name)
|
| 123 |
+
return False
|
| 124 |
+
|
| 125 |
+
artifact_tarfile_path = os.path.join(temp_dir, name + '.tar')
|
| 126 |
+
if not os.path.exists(artifact_tarfile_path):
|
| 127 |
+
logging.error('Artifact zip did not contain a tarfile.')
|
| 128 |
+
return False
|
| 129 |
+
|
| 130 |
+
# TODO(jonathanmetzman): Replace this with archive.unpack from
|
| 131 |
+
# libClusterFuzz so we can avoid path traversal issues.
|
| 132 |
+
with tarfile.TarFile(artifact_tarfile_path) as artifact_tarfile:
|
| 133 |
+
artifact_tarfile.extractall(dst_directory)
|
| 134 |
+
return True
|
| 135 |
+
|
| 136 |
+
def _raw_download_artifact(self, name, dst_directory):
|
| 137 |
+
"""Downloads the artifact with |name| to |dst_directory|. Returns True on
|
| 138 |
+
success. Does not do any untarring or adding prefix to |name|."""
|
| 139 |
+
artifact = self._find_artifact(name)
|
| 140 |
+
if not artifact:
|
| 141 |
+
logging.warning('Could not find artifact: %s.', name)
|
| 142 |
+
return False
|
| 143 |
+
download_url = artifact['archive_download_url']
|
| 144 |
+
return http_utils.download_and_unpack_zip(
|
| 145 |
+
download_url, dst_directory, headers=self.github_api_http_headers)
|
| 146 |
+
|
| 147 |
+
def _list_artifacts(self):
|
| 148 |
+
"""Returns a list of artifacts."""
|
| 149 |
+
return github_api.list_artifacts(self.config.project_repo_owner,
|
| 150 |
+
self.config.project_repo_name,
|
| 151 |
+
self.github_api_http_headers)
|
| 152 |
+
|
| 153 |
+
def download_build(self, name, dst_directory):
|
| 154 |
+
"""Downloads the build with name |name| to |dst_directory|."""
|
| 155 |
+
return self._download_artifact(self.BUILD_PREFIX + name, dst_directory)
|
| 156 |
+
|
| 157 |
+
def download_coverage(self, name, dst_directory):
|
| 158 |
+
"""Downloads the latest project coverage report."""
|
| 159 |
+
return self._download_artifact(self.COVERAGE_PREFIX + name, dst_directory)
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
def _upload_artifact_with_upload_js(name, artifact_paths, directory):
|
| 163 |
+
"""Uploads the artifacts in |artifact_paths| that are located in |directory|
|
| 164 |
+
to |name|, using the upload.js script."""
|
| 165 |
+
command = [UPLOAD_JS, name, directory] + artifact_paths
|
| 166 |
+
_, _, retcode = utils.execute(command, location=OSS_FUZZ_ROOT_DIR)
|
| 167 |
+
return retcode == 0
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
def _raw_upload_directory(name, directory):
|
| 171 |
+
"""Uploads the artifacts located in |directory| to |name|. Does not do any
|
| 172 |
+
tarring or adding prefixes to |name|."""
|
| 173 |
+
# Get file paths.
|
| 174 |
+
artifact_paths = []
|
| 175 |
+
for root, _, curr_file_paths in os.walk(directory):
|
| 176 |
+
for file_path in curr_file_paths:
|
| 177 |
+
artifact_paths.append(os.path.join(root, file_path))
|
| 178 |
+
logging.debug('Artifact paths: %s.', artifact_paths)
|
| 179 |
+
return _upload_artifact_with_upload_js(name, artifact_paths, directory)
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_actions_test.py
ADDED
|
@@ -0,0 +1,283 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Tests for github_actions."""
|
| 15 |
+
import os
|
| 16 |
+
import shutil
|
| 17 |
+
import sys
|
| 18 |
+
import tarfile
|
| 19 |
+
import tempfile
|
| 20 |
+
import unittest
|
| 21 |
+
from unittest import mock
|
| 22 |
+
|
| 23 |
+
from pyfakefs import fake_filesystem_unittest
|
| 24 |
+
|
| 25 |
+
# pylint: disable=wrong-import-position
|
| 26 |
+
INFRA_DIR = os.path.dirname(
|
| 27 |
+
os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
| 28 |
+
sys.path.append(INFRA_DIR)
|
| 29 |
+
|
| 30 |
+
from filestore import github_actions
|
| 31 |
+
import test_helpers
|
| 32 |
+
|
| 33 |
+
# pylint: disable=protected-access,no-self-use
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class GithubActionsFilestoreTest(fake_filesystem_unittest.TestCase):
|
| 37 |
+
"""Tests for GithubActionsFilestore."""
|
| 38 |
+
|
| 39 |
+
@mock.patch('platform_config.github._get_event_data', return_value={})
|
| 40 |
+
def setUp(self, _): # pylint: disable=arguments-differ
|
| 41 |
+
test_helpers.patch_environ(self)
|
| 42 |
+
self.token = 'example githubtoken'
|
| 43 |
+
self.owner = 'exampleowner'
|
| 44 |
+
self.repo = 'examplerepo'
|
| 45 |
+
os.environ['GITHUB_REPOSITORY'] = f'{self.owner}/{self.repo}'
|
| 46 |
+
os.environ['GITHUB_EVENT_PATH'] = '/fake'
|
| 47 |
+
os.environ['CFL_PLATFORM'] = 'github'
|
| 48 |
+
os.environ['GITHUB_WORKSPACE'] = '/workspace'
|
| 49 |
+
self.config = test_helpers.create_run_config(token=self.token)
|
| 50 |
+
self.local_dir = '/local-dir'
|
| 51 |
+
self.testcase = os.path.join(self.local_dir, 'testcase')
|
| 52 |
+
|
| 53 |
+
def _get_expected_http_headers(self):
|
| 54 |
+
return {
|
| 55 |
+
'Authorization': f'token {self.token}',
|
| 56 |
+
'Accept': 'application/vnd.github.v3+json',
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
@mock.patch('filestore.github_actions.github_api.list_artifacts')
|
| 60 |
+
def test_list_artifacts(self, mock_list_artifacts):
|
| 61 |
+
"""Tests that _list_artifacts works as intended."""
|
| 62 |
+
filestore = github_actions.GithubActionsFilestore(self.config)
|
| 63 |
+
filestore._list_artifacts()
|
| 64 |
+
mock_list_artifacts.assert_called_with(self.owner, self.repo,
|
| 65 |
+
self._get_expected_http_headers())
|
| 66 |
+
|
| 67 |
+
@mock.patch('logging.warning')
|
| 68 |
+
@mock.patch('filestore.github_actions.GithubActionsFilestore._list_artifacts',
|
| 69 |
+
return_value=None)
|
| 70 |
+
@mock.patch('filestore.github_actions.github_api.find_artifact',
|
| 71 |
+
return_value=None)
|
| 72 |
+
def test_download_build_no_artifact(self, _, __, mock_warning):
|
| 73 |
+
"""Tests that download_build returns None and doesn't exception when
|
| 74 |
+
find_artifact can't find an artifact."""
|
| 75 |
+
filestore = github_actions.GithubActionsFilestore(self.config)
|
| 76 |
+
name = 'name'
|
| 77 |
+
build_dir = 'build-dir'
|
| 78 |
+
self.assertFalse(filestore.download_build(name, build_dir))
|
| 79 |
+
mock_warning.assert_called_with('Could not download artifact: %s.',
|
| 80 |
+
'cifuzz-build-' + name)
|
| 81 |
+
|
| 82 |
+
@mock.patch('logging.warning')
|
| 83 |
+
@mock.patch('filestore.github_actions.GithubActionsFilestore._list_artifacts',
|
| 84 |
+
return_value=None)
|
| 85 |
+
@mock.patch('filestore.github_actions.github_api.find_artifact',
|
| 86 |
+
return_value=None)
|
| 87 |
+
def test_download_corpus_no_artifact(self, _, __, mock_warning):
|
| 88 |
+
"""Tests that download_corpus_build returns None and doesn't exception when
|
| 89 |
+
find_artifact can't find an artifact."""
|
| 90 |
+
filestore = github_actions.GithubActionsFilestore(self.config)
|
| 91 |
+
name = 'name'
|
| 92 |
+
dst_dir = 'local-dir'
|
| 93 |
+
self.assertFalse(filestore.download_corpus(name, dst_dir))
|
| 94 |
+
mock_warning.assert_called_with('Could not download artifact: %s.',
|
| 95 |
+
'cifuzz-corpus-' + name)
|
| 96 |
+
|
| 97 |
+
@mock.patch('filestore.github_actions.tar_directory')
|
| 98 |
+
@mock.patch('filestore.github_actions._upload_artifact_with_upload_js')
|
| 99 |
+
def test_upload_corpus(self, mock_upload_artifact, mock_tar_directory):
|
| 100 |
+
"""Test uploading corpus."""
|
| 101 |
+
self._create_local_dir()
|
| 102 |
+
|
| 103 |
+
def mock_tar_directory_impl(_, archive_path):
|
| 104 |
+
self.fs.create_file(archive_path)
|
| 105 |
+
|
| 106 |
+
mock_tar_directory.side_effect = mock_tar_directory_impl
|
| 107 |
+
|
| 108 |
+
filestore = github_actions.GithubActionsFilestore(self.config)
|
| 109 |
+
filestore.upload_corpus('target', self.local_dir)
|
| 110 |
+
self.assert_upload(mock_upload_artifact, mock_tar_directory,
|
| 111 |
+
'corpus-target')
|
| 112 |
+
|
| 113 |
+
@mock.patch('filestore.github_actions._upload_artifact_with_upload_js')
|
| 114 |
+
def test_upload_crashes(self, mock_upload_artifact):
|
| 115 |
+
"""Test uploading crashes."""
|
| 116 |
+
self._create_local_dir()
|
| 117 |
+
|
| 118 |
+
filestore = github_actions.GithubActionsFilestore(self.config)
|
| 119 |
+
filestore.upload_crashes('current', self.local_dir)
|
| 120 |
+
mock_upload_artifact.assert_has_calls(
|
| 121 |
+
[mock.call('crashes-current', ['/local-dir/testcase'], '/local-dir')])
|
| 122 |
+
|
| 123 |
+
@mock.patch('filestore.github_actions.tar_directory')
|
| 124 |
+
@mock.patch('filestore.github_actions._upload_artifact_with_upload_js')
|
| 125 |
+
def test_upload_build(self, mock_upload_artifact, mock_tar_directory):
|
| 126 |
+
"""Test uploading build."""
|
| 127 |
+
self._create_local_dir()
|
| 128 |
+
|
| 129 |
+
def mock_tar_directory_impl(_, archive_path):
|
| 130 |
+
self.fs.create_file(archive_path)
|
| 131 |
+
|
| 132 |
+
mock_tar_directory.side_effect = mock_tar_directory_impl
|
| 133 |
+
|
| 134 |
+
filestore = github_actions.GithubActionsFilestore(self.config)
|
| 135 |
+
filestore.upload_build('sanitizer', self.local_dir)
|
| 136 |
+
self.assert_upload(mock_upload_artifact, mock_tar_directory,
|
| 137 |
+
'build-sanitizer')
|
| 138 |
+
|
| 139 |
+
@mock.patch('filestore.github_actions.tar_directory')
|
| 140 |
+
@mock.patch('filestore.github_actions._upload_artifact_with_upload_js')
|
| 141 |
+
def test_upload_coverage(self, mock_upload_artifact, mock_tar_directory):
|
| 142 |
+
"""Test uploading coverage."""
|
| 143 |
+
self._create_local_dir()
|
| 144 |
+
|
| 145 |
+
def mock_tar_directory_impl(_, archive_path):
|
| 146 |
+
self.fs.create_file(archive_path)
|
| 147 |
+
|
| 148 |
+
mock_tar_directory.side_effect = mock_tar_directory_impl
|
| 149 |
+
|
| 150 |
+
filestore = github_actions.GithubActionsFilestore(self.config)
|
| 151 |
+
filestore.upload_coverage('latest', self.local_dir)
|
| 152 |
+
self.assert_upload(mock_upload_artifact, mock_tar_directory,
|
| 153 |
+
'coverage-latest')
|
| 154 |
+
|
| 155 |
+
def assert_upload(self, mock_upload_artifact, mock_tar_directory,
|
| 156 |
+
expected_artifact_name):
|
| 157 |
+
"""Tests that upload_directory invokes tar_directory and
|
| 158 |
+
artifact_client.upload_artifact properly."""
|
| 159 |
+
# Don't assert what second argument will be since it's a temporary
|
| 160 |
+
# directory.
|
| 161 |
+
self.assertEqual(mock_tar_directory.call_args_list[0][0][0], self.local_dir)
|
| 162 |
+
|
| 163 |
+
# Don't assert what second and third arguments will be since they are
|
| 164 |
+
# temporary directories.
|
| 165 |
+
expected_artifact_name = 'cifuzz-' + expected_artifact_name
|
| 166 |
+
self.assertEqual(mock_upload_artifact.call_args_list[0][0][0],
|
| 167 |
+
expected_artifact_name)
|
| 168 |
+
|
| 169 |
+
# Assert artifacts list contains one tarfile.
|
| 170 |
+
artifacts_list = mock_upload_artifact.call_args_list[0][0][1]
|
| 171 |
+
self.assertEqual(len(artifacts_list), 1)
|
| 172 |
+
self.assertEqual(os.path.basename(artifacts_list[0]),
|
| 173 |
+
expected_artifact_name + '.tar')
|
| 174 |
+
|
| 175 |
+
def _create_local_dir(self):
|
| 176 |
+
"""Sets up pyfakefs and creates a corpus directory containing
|
| 177 |
+
self.testcase."""
|
| 178 |
+
self.setUpPyfakefs()
|
| 179 |
+
self.fs.create_file(self.testcase, contents='hi')
|
| 180 |
+
|
| 181 |
+
@mock.patch('filestore.github_actions.GithubActionsFilestore._find_artifact')
|
| 182 |
+
@mock.patch('http_utils.download_and_unpack_zip')
|
| 183 |
+
def test_download_artifact(self, mock_download_and_unpack_zip,
|
| 184 |
+
mock_find_artifact):
|
| 185 |
+
"""Tests that _download_artifact works as intended."""
|
| 186 |
+
artifact_download_url = 'http://example.com/download'
|
| 187 |
+
artifact_listing = {
|
| 188 |
+
'expired': False,
|
| 189 |
+
'name': 'corpus',
|
| 190 |
+
'archive_download_url': artifact_download_url
|
| 191 |
+
}
|
| 192 |
+
mock_find_artifact.return_value = artifact_listing
|
| 193 |
+
|
| 194 |
+
self._create_local_dir()
|
| 195 |
+
with tempfile.TemporaryDirectory() as temp_dir:
|
| 196 |
+
# Create a tarball.
|
| 197 |
+
archive_path = os.path.join(temp_dir, 'cifuzz-corpus.tar')
|
| 198 |
+
github_actions.tar_directory(self.local_dir, archive_path)
|
| 199 |
+
|
| 200 |
+
artifact_download_dst_dir = os.path.join(temp_dir, 'dst')
|
| 201 |
+
os.mkdir(artifact_download_dst_dir)
|
| 202 |
+
|
| 203 |
+
def mock_download_and_unpack_zip_impl(url, download_artifact_temp_dir,
|
| 204 |
+
headers):
|
| 205 |
+
self.assertEqual(url, artifact_download_url)
|
| 206 |
+
self.assertEqual(headers, self._get_expected_http_headers())
|
| 207 |
+
shutil.copy(
|
| 208 |
+
archive_path,
|
| 209 |
+
os.path.join(download_artifact_temp_dir,
|
| 210 |
+
os.path.basename(archive_path)))
|
| 211 |
+
return True
|
| 212 |
+
|
| 213 |
+
mock_download_and_unpack_zip.side_effect = (
|
| 214 |
+
mock_download_and_unpack_zip_impl)
|
| 215 |
+
filestore = github_actions.GithubActionsFilestore(self.config)
|
| 216 |
+
self.assertTrue(
|
| 217 |
+
filestore._download_artifact('corpus', artifact_download_dst_dir))
|
| 218 |
+
mock_find_artifact.assert_called_with('cifuzz-corpus')
|
| 219 |
+
self.assertTrue(
|
| 220 |
+
os.path.exists(
|
| 221 |
+
os.path.join(artifact_download_dst_dir,
|
| 222 |
+
os.path.basename(self.testcase))))
|
| 223 |
+
|
| 224 |
+
@mock.patch('filestore.github_actions.github_api.list_artifacts')
|
| 225 |
+
def test_find_artifact(self, mock_list_artifacts):
|
| 226 |
+
"""Tests that _find_artifact works as intended."""
|
| 227 |
+
artifact_listing_1 = {
|
| 228 |
+
'expired': False,
|
| 229 |
+
'name': 'other',
|
| 230 |
+
'archive_download_url': 'http://download1'
|
| 231 |
+
}
|
| 232 |
+
artifact_listing_2 = {
|
| 233 |
+
'expired': False,
|
| 234 |
+
'name': 'artifact',
|
| 235 |
+
'archive_download_url': 'http://download2'
|
| 236 |
+
}
|
| 237 |
+
artifact_listing_3 = {
|
| 238 |
+
'expired': True,
|
| 239 |
+
'name': 'artifact',
|
| 240 |
+
'archive_download_url': 'http://download3'
|
| 241 |
+
}
|
| 242 |
+
artifact_listing_4 = {
|
| 243 |
+
'expired': False,
|
| 244 |
+
'name': 'artifact',
|
| 245 |
+
'archive_download_url': 'http://download4'
|
| 246 |
+
}
|
| 247 |
+
artifacts = [
|
| 248 |
+
artifact_listing_1, artifact_listing_2, artifact_listing_3,
|
| 249 |
+
artifact_listing_4
|
| 250 |
+
]
|
| 251 |
+
mock_list_artifacts.return_value = artifacts
|
| 252 |
+
filestore = github_actions.GithubActionsFilestore(self.config)
|
| 253 |
+
# Test that find_artifact will return the most recent unexpired artifact
|
| 254 |
+
# with the correct name.
|
| 255 |
+
self.assertEqual(filestore._find_artifact('artifact'), artifact_listing_2)
|
| 256 |
+
mock_list_artifacts.assert_called_with(self.owner, self.repo,
|
| 257 |
+
self._get_expected_http_headers())
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
class TarDirectoryTest(unittest.TestCase):
|
| 261 |
+
"""Tests for tar_directory."""
|
| 262 |
+
|
| 263 |
+
def test_tar_directory(self):
|
| 264 |
+
"""Tests that tar_directory writes the archive to the correct location and
|
| 265 |
+
archives properly."""
|
| 266 |
+
with tempfile.TemporaryDirectory() as temp_dir:
|
| 267 |
+
archive_path = os.path.join(temp_dir, 'myarchive.tar')
|
| 268 |
+
archived_dir = os.path.join(temp_dir, 'toarchive')
|
| 269 |
+
os.mkdir(archived_dir)
|
| 270 |
+
archived_filename = 'file1'
|
| 271 |
+
archived_file_path = os.path.join(archived_dir, archived_filename)
|
| 272 |
+
with open(archived_file_path, 'w') as file_handle:
|
| 273 |
+
file_handle.write('hi')
|
| 274 |
+
github_actions.tar_directory(archived_dir, archive_path)
|
| 275 |
+
self.assertTrue(os.path.exists(archive_path))
|
| 276 |
+
|
| 277 |
+
# Now check it archives correctly.
|
| 278 |
+
unpacked_directory = os.path.join(temp_dir, 'unpacked')
|
| 279 |
+
with tarfile.TarFile(archive_path) as artifact_tarfile:
|
| 280 |
+
artifact_tarfile.extractall(unpacked_directory)
|
| 281 |
+
unpacked_archived_file_path = os.path.join(unpacked_directory,
|
| 282 |
+
archived_filename)
|
| 283 |
+
self.assertTrue(os.path.exists(unpacked_archived_file_path))
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_api.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Module for dealing with the GitHub API. This is different from
|
| 15 |
+
github_actions_toolkit which only deals with the actions API. We need to use
|
| 16 |
+
both."""
|
| 17 |
+
import logging
|
| 18 |
+
import os
|
| 19 |
+
import sys
|
| 20 |
+
|
| 21 |
+
import requests
|
| 22 |
+
|
| 23 |
+
import filestore
|
| 24 |
+
|
| 25 |
+
# pylint: disable=wrong-import-position,import-error
|
| 26 |
+
|
| 27 |
+
sys.path.append(
|
| 28 |
+
os.path.join(__file__, os.path.pardir, os.path.pardir, os.path.pardir,
|
| 29 |
+
os.path.pardir))
|
| 30 |
+
import retry
|
| 31 |
+
|
| 32 |
+
_MAX_ITEMS_PER_PAGE = 100
|
| 33 |
+
|
| 34 |
+
_GET_ATTEMPTS = 3
|
| 35 |
+
_GET_BACKOFF = 1
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def get_http_auth_headers(config):
|
| 39 |
+
"""Returns HTTP headers for authentication to the API."""
|
| 40 |
+
authorization = f'token {config.token}'
|
| 41 |
+
return {
|
| 42 |
+
'Authorization': authorization,
|
| 43 |
+
'Accept': 'application/vnd.github.v3+json'
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def _get_artifacts_list_api_url(repo_owner, repo_name):
|
| 48 |
+
"""Returns the artifacts_api_url for |repo_name| owned by |repo_owner|."""
|
| 49 |
+
github_api_url = os.getenv('GITHUB_API_URL', 'https://api.github.com')
|
| 50 |
+
return (f'{github_api_url}/repos/{repo_owner}/'
|
| 51 |
+
f'{repo_name}/actions/artifacts')
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
@retry.wrap(_GET_ATTEMPTS, _GET_BACKOFF)
|
| 55 |
+
def _do_get_request(*args, **kwargs):
|
| 56 |
+
"""Wrapped version of requests.get that does retries."""
|
| 57 |
+
return requests.get(*args, **kwargs)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _get_items(url, headers):
|
| 61 |
+
"""Generator that gets and yields items from a GitHub API endpoint (specified
|
| 62 |
+
by |URL|) sending |headers| with the get request."""
|
| 63 |
+
# Github API response pages are 1-indexed.
|
| 64 |
+
page_counter = 1
|
| 65 |
+
|
| 66 |
+
# Set to infinity so we run loop at least once.
|
| 67 |
+
total_num_items = float('inf')
|
| 68 |
+
|
| 69 |
+
item_num = 0
|
| 70 |
+
while item_num < total_num_items:
|
| 71 |
+
params = {'per_page': _MAX_ITEMS_PER_PAGE, 'page': str(page_counter)}
|
| 72 |
+
response = _do_get_request(url, params=params, headers=headers)
|
| 73 |
+
response_json = response.json()
|
| 74 |
+
if not response.status_code == 200:
|
| 75 |
+
# Check that request was successful.
|
| 76 |
+
logging.error('Request to %s failed. Code: %d. Response: %s',
|
| 77 |
+
response.request.url, response.status_code, response_json)
|
| 78 |
+
raise filestore.FilestoreError('Github API request failed.')
|
| 79 |
+
|
| 80 |
+
if total_num_items == float('inf'):
|
| 81 |
+
# Set proper total_num_items
|
| 82 |
+
total_num_items = response_json['total_count']
|
| 83 |
+
|
| 84 |
+
# Get the key for the items we are after.
|
| 85 |
+
keys = [key for key in response_json.keys() if key != 'total_count']
|
| 86 |
+
assert len(keys) == 1, keys
|
| 87 |
+
items_key = keys[0]
|
| 88 |
+
|
| 89 |
+
for item in response_json[items_key]:
|
| 90 |
+
yield item
|
| 91 |
+
item_num += 1
|
| 92 |
+
|
| 93 |
+
page_counter += 1
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def find_artifact(artifact_name, artifacts):
|
| 97 |
+
"""Find the artifact with the name |artifact_name| in |artifacts|."""
|
| 98 |
+
for artifact in artifacts:
|
| 99 |
+
# TODO(metzman): Handle multiple by making sure we download the latest.
|
| 100 |
+
if artifact['name'] == artifact_name and not artifact['expired']:
|
| 101 |
+
return artifact
|
| 102 |
+
return None
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def list_artifacts(owner, repo, headers):
|
| 106 |
+
"""Returns a generator of all the artifacts for |owner|/|repo|."""
|
| 107 |
+
url = _get_artifacts_list_api_url(owner, repo)
|
| 108 |
+
logging.debug('Getting artifacts from: %s', url)
|
| 109 |
+
return _get_items(url, headers)
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/github_actions/github_api_test.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Tests for github_api."""
|
| 15 |
+
import os
|
| 16 |
+
import sys
|
| 17 |
+
import unittest
|
| 18 |
+
|
| 19 |
+
# pylint: disable=wrong-import-position,import-error
|
| 20 |
+
sys.path.append(
|
| 21 |
+
os.path.abspath(
|
| 22 |
+
os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir,
|
| 23 |
+
os.path.pardir)))
|
| 24 |
+
|
| 25 |
+
from filestore.github_actions import github_api
|
| 26 |
+
import test_helpers
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class GetHttpAuthHeaders(unittest.TestCase):
|
| 30 |
+
"""Tests for get_http_auth_headers."""
|
| 31 |
+
|
| 32 |
+
def test_get_http_auth_headers(self):
|
| 33 |
+
"""Tests that get_http_auth_headers returns the correct result."""
|
| 34 |
+
token = 'example githubtoken'
|
| 35 |
+
run_config = test_helpers.create_run_config(token=token)
|
| 36 |
+
expected_headers = {
|
| 37 |
+
'Authorization': f'token {token}',
|
| 38 |
+
'Accept': 'application/vnd.github.v3+json',
|
| 39 |
+
}
|
| 40 |
+
self.assertEqual(expected_headers,
|
| 41 |
+
github_api.get_http_auth_headers(run_config))
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/github_actions/upload.js
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env node
|
| 2 |
+
// Copyright 2021 Google LLC
|
| 3 |
+
//
|
| 4 |
+
// Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
// you may not use this file except in compliance with the License.
|
| 6 |
+
// You may obtain a copy of the License at
|
| 7 |
+
//
|
| 8 |
+
// http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
//
|
| 10 |
+
// Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
// distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
// See the License for the specific language governing permissions and
|
| 14 |
+
// limitations under the License.
|
| 15 |
+
// Script for uploading an artifact. Returns 0 on success.
|
| 16 |
+
// Usage: upload.js <artifactName> <rootDirectory> <file 1>...<file N>
|
| 17 |
+
|
| 18 |
+
const fs = require('fs');
|
| 19 |
+
const { DefaultArtifactClient } = require('@actions/artifact');
|
| 20 |
+
|
| 21 |
+
const artifactClient = new DefaultArtifactClient();
|
| 22 |
+
const artifactName = process.argv[2];
|
| 23 |
+
const rootDirectory = process.argv[3]
|
| 24 |
+
const files = process.argv.slice(4);
|
| 25 |
+
const options = {
|
| 26 |
+
continueOnError: true
|
| 27 |
+
};
|
| 28 |
+
|
| 29 |
+
async function uploadArtifact() {
|
| 30 |
+
try {
|
| 31 |
+
const uploadResult = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options);
|
| 32 |
+
console.log(uploadResult);
|
| 33 |
+
if (uploadResult.failedItems.length > 0) {
|
| 34 |
+
return 1;
|
| 35 |
+
}
|
| 36 |
+
return 0;
|
| 37 |
+
} catch (error) {
|
| 38 |
+
console.error('Error uploading artifact:', error);
|
| 39 |
+
return 1;
|
| 40 |
+
}
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
uploadArtifact().then(exitCode => {
|
| 44 |
+
process.exit(exitCode);
|
| 45 |
+
});
|
local-test-commons-compress-delta-03/fuzz-tooling/infra/cifuzz/filestore/gitlab/__init__.py
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2022 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""GitLab filestore implementation."""
|
| 15 |
+
import logging
|
| 16 |
+
|
| 17 |
+
import json
|
| 18 |
+
import os
|
| 19 |
+
import shutil
|
| 20 |
+
import tempfile
|
| 21 |
+
|
| 22 |
+
import filestore
|
| 23 |
+
import http_utils
|
| 24 |
+
|
| 25 |
+
# pylint: disable=no-self-use,unused-argument
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class GitlabFilestore(filestore.BaseFilestore):
|
| 29 |
+
"""Implementation of BaseFilestore using GitLab.
|
| 30 |
+
Needs a cache to upload and download builds.
|
| 31 |
+
Needs a git repository for corpus and coverage.
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
BUILD_PREFIX = 'build-'
|
| 35 |
+
CORPUS_PREFIX = 'corpus-'
|
| 36 |
+
COVERAGE_PREFIX = 'coverage-'
|
| 37 |
+
CRASHES_PREFIX = 'crashes-'
|
| 38 |
+
|
| 39 |
+
def __init__(self, config):
|
| 40 |
+
super().__init__(config)
|
| 41 |
+
self.artifacts_dir = self.config.platform_conf.artifacts_dir
|
| 42 |
+
self.cache_dir = self.config.platform_conf.cache_dir
|
| 43 |
+
if self.config.git_store_repo:
|
| 44 |
+
self.git_filestore = filestore.git.GitFilestore(config, None)
|
| 45 |
+
else:
|
| 46 |
+
self.git_filestore = None
|
| 47 |
+
|
| 48 |
+
def upload_crashes(self, name, directory):
|
| 49 |
+
"""GitLab artifacts implementation of upload_crashes."""
|
| 50 |
+
# Upload crashes as job artifacts.
|
| 51 |
+
if os.listdir(directory):
|
| 52 |
+
dest_dir_artifacts = os.path.join(self.config.project_src_path,
|
| 53 |
+
self.artifacts_dir,
|
| 54 |
+
self.CRASHES_PREFIX + name)
|
| 55 |
+
logging.info('Uploading artifacts to %s.', dest_dir_artifacts)
|
| 56 |
+
shutil.copytree(directory, dest_dir_artifacts)
|
| 57 |
+
|
| 58 |
+
def upload_corpus(self, name, directory, replace=False):
|
| 59 |
+
"""GitLab artifacts implementation of upload_corpus."""
|
| 60 |
+
# Use the git filestore if any.
|
| 61 |
+
if self.git_filestore:
|
| 62 |
+
self.git_filestore.upload_corpus(name, directory, replace)
|
| 63 |
+
return
|
| 64 |
+
# Fall back to cache.
|
| 65 |
+
dest_dir_cache = os.path.join(self.config.project_src_path, self.cache_dir,
|
| 66 |
+
self.CORPUS_PREFIX + name)
|
| 67 |
+
logging.info('Copying from %s to cache %s.', directory, dest_dir_cache)
|
| 68 |
+
# Remove previous corpus from cache if any.
|
| 69 |
+
shutil.rmtree(dest_dir_cache, ignore_errors=True)
|
| 70 |
+
shutil.copytree(directory, dest_dir_cache, dirs_exist_ok=True)
|
| 71 |
+
|
| 72 |
+
def upload_build(self, name, directory):
|
| 73 |
+
"""GitLab artifacts implementation of upload_build."""
|
| 74 |
+
# Puts build into the cache.
|
| 75 |
+
dest_dir_cache = os.path.join(self.config.project_src_path, self.cache_dir,
|
| 76 |
+
self.BUILD_PREFIX + name)
|
| 77 |
+
logging.info('Copying from %s to cache %s.', directory, dest_dir_cache)
|
| 78 |
+
shutil.copytree(directory, dest_dir_cache, dirs_exist_ok=True)
|
| 79 |
+
|
| 80 |
+
def upload_coverage(self, name, directory):
|
| 81 |
+
"""GitLab artifacts implementation of upload_coverage."""
|
| 82 |
+
# Use the git filestore.
|
| 83 |
+
if self.git_filestore:
|
| 84 |
+
self.git_filestore.upload_coverage(name, directory)
|
| 85 |
+
return
|
| 86 |
+
# Fall back to cache.
|
| 87 |
+
dest_dir_cache = os.path.join(self.config.project_src_path, self.cache_dir,
|
| 88 |
+
self.COVERAGE_PREFIX + name)
|
| 89 |
+
logging.info('Copying from %s to cache %s.', directory, dest_dir_cache)
|
| 90 |
+
shutil.copytree(directory, dest_dir_cache, dirs_exist_ok=True)
|
| 91 |
+
# And also updates coverage reports as artifacts
|
| 92 |
+
# as it should not be too big.
|
| 93 |
+
dest_dir_artifacts = os.path.join(self.config.project_src_path,
|
| 94 |
+
self.artifacts_dir,
|
| 95 |
+
self.COVERAGE_PREFIX + name)
|
| 96 |
+
logging.info('Uploading artifacts to %s.', dest_dir_artifacts)
|
| 97 |
+
shutil.copytree(directory, dest_dir_artifacts)
|
| 98 |
+
|
| 99 |
+
def _copy_from_cache(self, src_dir_cache, dst_directory):
|
| 100 |
+
if not os.path.exists(src_dir_cache):
|
| 101 |
+
logging.info('Cache %s does not exist.', src_dir_cache)
|
| 102 |
+
return False
|
| 103 |
+
logging.info('Copying %s from cache to %s.', src_dir_cache, dst_directory)
|
| 104 |
+
shutil.copytree(src_dir_cache, dst_directory, dirs_exist_ok=True)
|
| 105 |
+
return True
|
| 106 |
+
|
| 107 |
+
def download_corpus(self, name, dst_directory):
|
| 108 |
+
"""GitLab artifacts implementation of download_corpus."""
|
| 109 |
+
# Use the git filestore if any.
|
| 110 |
+
if self.git_filestore:
|
| 111 |
+
self.git_filestore.download_corpus(name, dst_directory)
|
| 112 |
+
return
|
| 113 |
+
# Fall back to cache.
|
| 114 |
+
src_dir_cache = os.path.join(self.config.project_src_path, self.cache_dir,
|
| 115 |
+
self.CORPUS_PREFIX + name)
|
| 116 |
+
self._copy_from_cache(src_dir_cache, dst_directory)
|
| 117 |
+
|
| 118 |
+
def download_build(self, name, dst_directory):
|
| 119 |
+
"""GitLab artifacts implementation of download_build."""
|
| 120 |
+
# Gets build from the cache.
|
| 121 |
+
src_dir_cache = os.path.join(self.config.project_src_path, self.cache_dir,
|
| 122 |
+
self.BUILD_PREFIX + name)
|
| 123 |
+
return self._copy_from_cache(src_dir_cache, dst_directory)
|
| 124 |
+
|
| 125 |
+
def download_coverage(self, name, dst_directory):
|
| 126 |
+
"""GitLab artifacts implementation of download_coverage."""
|
| 127 |
+
# Use the git filestore if any.
|
| 128 |
+
if self.git_filestore:
|
| 129 |
+
return self.git_filestore.download_coverage(name, dst_directory)
|
| 130 |
+
# Fall back to cache.
|
| 131 |
+
src_dir_cache = os.path.join(self.config.project_src_path, self.cache_dir,
|
| 132 |
+
self.COVERAGE_PREFIX + name)
|
| 133 |
+
return self._copy_from_cache(src_dir_cache, dst_directory)
|