Add files using upload-large-folder tool
Browse files- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder-go/Dockerfile +33 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go +80 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder-python/Dockerfile +20 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder-swift/Dockerfile +22 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder/bash_parser.py +235 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder/compile_go_fuzzer +69 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder/install_go.sh +43 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder/install_ruby.sh +25 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder/install_rust.sh +22 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/coverage +549 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/coverage_helper +17 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/generate_differential_cov_report.py +228 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/install_go.sh +41 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/jacoco_report_converter.py +174 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/run_fuzzer +228 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/targets_list +10 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/test_all.py +295 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/cifuzz/filestore/no_filestore/__init__.py +51 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/tools/wycheproof/.gitignore +1 -0
- local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/tools/wycheproof/generate_job.py +50 -0
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder-go/Dockerfile
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
|
| 17 |
+
ARG IMG_TAG=latest
|
| 18 |
+
FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG}
|
| 19 |
+
|
| 20 |
+
# Set up Golang environment variables (copied from /root/.bash_profile).
|
| 21 |
+
ENV GOPATH /root/go
|
| 22 |
+
|
| 23 |
+
# /root/.go/bin is for the standard Go binaries (i.e. go, gofmt, etc).
|
| 24 |
+
# $GOPATH/bin is for the binaries from the dependencies installed via "go get".
|
| 25 |
+
ENV PATH $PATH:/root/.go/bin:$GOPATH/bin
|
| 26 |
+
|
| 27 |
+
COPY gosigfuzz.c $GOPATH/gosigfuzz/
|
| 28 |
+
|
| 29 |
+
RUN install_go.sh
|
| 30 |
+
|
| 31 |
+
# TODO(jonathanmetzman): Install this file using install_go.sh.
|
| 32 |
+
COPY ossfuzz_coverage_runner.go \
|
| 33 |
+
$GOPATH/
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
// Copyright 2020 Google LLC
|
| 2 |
+
//
|
| 3 |
+
// Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
// you may not use this file except in compliance with the License.
|
| 5 |
+
// You may obtain a copy of the License at
|
| 6 |
+
//
|
| 7 |
+
// http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
//
|
| 9 |
+
// Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
// distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
// See the License for the specific language governing permissions and
|
| 13 |
+
// limitations under the License.
|
| 14 |
+
|
| 15 |
+
package mypackagebeingfuzzed
|
| 16 |
+
|
| 17 |
+
import (
|
| 18 |
+
"io/fs"
|
| 19 |
+
"io/ioutil"
|
| 20 |
+
"os"
|
| 21 |
+
"path/filepath"
|
| 22 |
+
"runtime/pprof"
|
| 23 |
+
"testing"
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
func TestFuzzCorpus(t *testing.T) {
|
| 27 |
+
dir := os.Getenv("FUZZ_CORPUS_DIR")
|
| 28 |
+
if dir == "" {
|
| 29 |
+
t.Logf("No fuzzing corpus directory set")
|
| 30 |
+
return
|
| 31 |
+
}
|
| 32 |
+
filename := ""
|
| 33 |
+
defer func() {
|
| 34 |
+
if r := recover(); r != nil {
|
| 35 |
+
t.Error("Fuzz panicked in "+filename, r)
|
| 36 |
+
}
|
| 37 |
+
}()
|
| 38 |
+
profname := os.Getenv("FUZZ_PROFILE_NAME")
|
| 39 |
+
if profname != "" {
|
| 40 |
+
f, err := os.Create(profname + ".cpu.prof")
|
| 41 |
+
if err != nil {
|
| 42 |
+
t.Logf("error creating profile file %s\n", err)
|
| 43 |
+
} else {
|
| 44 |
+
_ = pprof.StartCPUProfile(f)
|
| 45 |
+
}
|
| 46 |
+
}
|
| 47 |
+
_, err := ioutil.ReadDir(dir)
|
| 48 |
+
if err != nil {
|
| 49 |
+
t.Logf("Not fuzzing corpus directory %s", err)
|
| 50 |
+
return
|
| 51 |
+
}
|
| 52 |
+
// recurse for regressions subdirectory
|
| 53 |
+
err = filepath.Walk(dir, func(fname string, info fs.FileInfo, err error) error {
|
| 54 |
+
if info.IsDir() {
|
| 55 |
+
return nil
|
| 56 |
+
}
|
| 57 |
+
data, err := ioutil.ReadFile(fname)
|
| 58 |
+
if err != nil {
|
| 59 |
+
t.Error("Failed to read corpus file", err)
|
| 60 |
+
return err
|
| 61 |
+
}
|
| 62 |
+
filename = fname
|
| 63 |
+
FuzzFunction(data)
|
| 64 |
+
return nil
|
| 65 |
+
})
|
| 66 |
+
if err != nil {
|
| 67 |
+
t.Error("Failed to run corpus", err)
|
| 68 |
+
}
|
| 69 |
+
if profname != "" {
|
| 70 |
+
pprof.StopCPUProfile()
|
| 71 |
+
f, err := os.Create(profname + ".heap.prof")
|
| 72 |
+
if err != nil {
|
| 73 |
+
t.Logf("error creating heap profile file %s\n", err)
|
| 74 |
+
}
|
| 75 |
+
if err = pprof.WriteHeapProfile(f); err != nil {
|
| 76 |
+
t.Logf("error writing heap profile file %s\n", err)
|
| 77 |
+
}
|
| 78 |
+
f.Close()
|
| 79 |
+
}
|
| 80 |
+
}
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder-python/Dockerfile
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
|
| 17 |
+
ARG IMG_TAG=latest
|
| 18 |
+
FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG}
|
| 19 |
+
|
| 20 |
+
RUN install_python.sh
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder-swift/Dockerfile
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
#
|
| 15 |
+
################################################################################
|
| 16 |
+
|
| 17 |
+
ARG IMG_TAG=latest
|
| 18 |
+
FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG}
|
| 19 |
+
|
| 20 |
+
RUN install_swift.sh
|
| 21 |
+
|
| 22 |
+
COPY precompile_swift /usr/local/bin/
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder/bash_parser.py
ADDED
|
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/python3
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
|
| 16 |
+
import os
|
| 17 |
+
import sys
|
| 18 |
+
|
| 19 |
+
from glob import glob
|
| 20 |
+
|
| 21 |
+
import bashlex
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def find_all_bash_scripts_in_src():
|
| 25 |
+
"""Finds all bash scripts that exist in SRC/. This is used to idenfiy scripts
|
| 26 |
+
that may be needed for reading during the AST parsing. This is the case
|
| 27 |
+
when a given build script calls another build script, then we need to
|
| 28 |
+
read those."""
|
| 29 |
+
all_local_scripts = [
|
| 30 |
+
y for x in os.walk('/src/') for y in glob(os.path.join(x[0], '*.sh'))
|
| 31 |
+
]
|
| 32 |
+
scripts_we_care_about = []
|
| 33 |
+
to_ignore = {'aflplusplus', 'honggfuzz', '/fuzztest', '/centipede'}
|
| 34 |
+
for s in all_local_scripts:
|
| 35 |
+
if any([x for x in to_ignore if x in s]):
|
| 36 |
+
continue
|
| 37 |
+
scripts_we_care_about.append(s)
|
| 38 |
+
|
| 39 |
+
print(scripts_we_care_about)
|
| 40 |
+
return scripts_we_care_about
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def should_discard_command(ast_tree) -> bool:
|
| 44 |
+
"""Returns True if the command shuold be avoided, otherwise False"""
|
| 45 |
+
try:
|
| 46 |
+
first_word = ast_tree.parts[0].word
|
| 47 |
+
except: # pylint: disable=bare-except
|
| 48 |
+
return False
|
| 49 |
+
|
| 50 |
+
if ('cmake' in first_word and
|
| 51 |
+
any('--build' in part.word for part in ast_tree.parts)):
|
| 52 |
+
return False
|
| 53 |
+
|
| 54 |
+
cmds_to_avoid_replaying = {
|
| 55 |
+
'configure', 'autoheader', 'autoconf', 'autoreconf', 'cmake', 'autogen.sh'
|
| 56 |
+
}
|
| 57 |
+
if any([cmd for cmd in cmds_to_avoid_replaying if cmd in first_word]):
|
| 58 |
+
return True
|
| 59 |
+
|
| 60 |
+
# Avoid all "make clean" calls. We dont want to erase previously build
|
| 61 |
+
# files.
|
| 62 |
+
try:
|
| 63 |
+
second_word = ast_tree.parts[1].word
|
| 64 |
+
except: # pylint: disable=bare-except
|
| 65 |
+
return False
|
| 66 |
+
if 'make' in first_word and 'clean' in second_word:
|
| 67 |
+
return True
|
| 68 |
+
|
| 69 |
+
# No match was found to commands we dont want to build. There is no
|
| 70 |
+
# indication we shuold avoid.
|
| 71 |
+
return False
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def is_local_redirection(ast_node, all_local_scripts):
|
| 75 |
+
"""Return the list of scripts corresponding to the command, in case
|
| 76 |
+
the command is an execution of a local script."""
|
| 77 |
+
# print("Checking")
|
| 78 |
+
|
| 79 |
+
# Capture local script called with ./random/path/build.sh
|
| 80 |
+
|
| 81 |
+
if len(ast_node.parts) >= 2:
|
| 82 |
+
try:
|
| 83 |
+
ast_node.parts[0].word
|
| 84 |
+
except:
|
| 85 |
+
return []
|
| 86 |
+
if ast_node.parts[0].word == '.':
|
| 87 |
+
suffixes_matching = []
|
| 88 |
+
#print(ast_node.parts[1].word)
|
| 89 |
+
for bash_script in all_local_scripts:
|
| 90 |
+
#print("- %s"%(bash_script))
|
| 91 |
+
cmd_to_exec = ast_node.parts[1].word.replace('$SRC', 'src')
|
| 92 |
+
if bash_script.endswith(cmd_to_exec):
|
| 93 |
+
suffixes_matching.append(bash_script)
|
| 94 |
+
#print(suffixes_matching)
|
| 95 |
+
return suffixes_matching
|
| 96 |
+
# Capture a local script called with $SRC/random/path/build.sh
|
| 97 |
+
if len(ast_node.parts) >= 1:
|
| 98 |
+
if '$SRC' in ast_node.parts[0].word:
|
| 99 |
+
suffixes_matching = []
|
| 100 |
+
print(ast_node.parts[0].word)
|
| 101 |
+
for bash_script in all_local_scripts:
|
| 102 |
+
print("- %s" % (bash_script))
|
| 103 |
+
cmd_to_exec = ast_node.parts[0].word.replace('$SRC', 'src')
|
| 104 |
+
if bash_script.endswith(cmd_to_exec):
|
| 105 |
+
suffixes_matching.append(bash_script)
|
| 106 |
+
print(suffixes_matching)
|
| 107 |
+
return suffixes_matching
|
| 108 |
+
|
| 109 |
+
return []
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def handle_ast_command(ast_node, all_scripts_in_fs, raw_script):
|
| 113 |
+
"""Generate bash script string for command node"""
|
| 114 |
+
new_script = ''
|
| 115 |
+
if should_discard_command(ast_node):
|
| 116 |
+
return ''
|
| 117 |
+
|
| 118 |
+
matches = is_local_redirection(ast_node, all_scripts_in_fs)
|
| 119 |
+
if len(matches) == 1:
|
| 120 |
+
new_script += parse_script(matches[0], all_scripts_in_fs) + '\n'
|
| 121 |
+
return ''
|
| 122 |
+
|
| 123 |
+
# Extract the command from the script string
|
| 124 |
+
idx_start = ast_node.pos[0]
|
| 125 |
+
idx_end = ast_node.pos[1]
|
| 126 |
+
new_script += raw_script[idx_start:idx_end]
|
| 127 |
+
#new_script += '\n'
|
| 128 |
+
|
| 129 |
+
# If mkdir is used, then ensure that '-p' is provided, as
|
| 130 |
+
# otherwise we will run into failures. We don't have to worry
|
| 131 |
+
# about multiple uses of -p as `mkdir -p -p -p`` is valid.
|
| 132 |
+
new_script = new_script.replace('mkdir', 'mkdir -p')
|
| 133 |
+
return new_script
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def handle_ast_list(ast_node, all_scripts_in_fs, raw_script):
|
| 137 |
+
"""Handles bashlex AST list."""
|
| 138 |
+
new_script = ''
|
| 139 |
+
try_hard = 1
|
| 140 |
+
|
| 141 |
+
if not try_hard:
|
| 142 |
+
list_start = ast_node.pos[0]
|
| 143 |
+
list_end = ast_node.pos[1]
|
| 144 |
+
new_script += raw_script[list_start:list_end] # + '\n'
|
| 145 |
+
else:
|
| 146 |
+
# This is more refined logic. Ideally, this should work, but it's a bit
|
| 147 |
+
# more intricate to get right due to e.g. white-space between positions
|
| 148 |
+
# and more extensive parsing needed. We don't neccesarily need this
|
| 149 |
+
# level of success rate for what we're trying to achieve, so am disabling
|
| 150 |
+
# this for now.
|
| 151 |
+
for part in ast_node.parts:
|
| 152 |
+
if part.kind == 'list':
|
| 153 |
+
new_script += handle_ast_list(part, all_scripts_in_fs, raw_script)
|
| 154 |
+
elif part.kind == 'command':
|
| 155 |
+
new_script += handle_ast_command(part, all_scripts_in_fs, raw_script)
|
| 156 |
+
else:
|
| 157 |
+
idx_start = part.pos[0]
|
| 158 |
+
idx_end = part.pos[1]
|
| 159 |
+
new_script += raw_script[idx_start:idx_end]
|
| 160 |
+
new_script += ' '
|
| 161 |
+
|
| 162 |
+
# Make sure what was created is valid syntax, and otherwise return empty
|
| 163 |
+
try:
|
| 164 |
+
bashlex.parse(new_script)
|
| 165 |
+
except: # pylint: disable=bare-except
|
| 166 |
+
# Maybe return the original here instead of skipping?
|
| 167 |
+
return ''
|
| 168 |
+
return new_script
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
def handle_ast_compound(ast_node, all_scripts_in_fs, raw_script):
|
| 172 |
+
"""Handles bashlex compound AST node."""
|
| 173 |
+
new_script = ''
|
| 174 |
+
list_start = ast_node.pos[0]
|
| 175 |
+
list_end = ast_node.pos[1]
|
| 176 |
+
new_script += raw_script[list_start:list_end] + '\n'
|
| 177 |
+
return new_script
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
def handle_node(ast_node, all_scripts_in_fs, build_script):
|
| 181 |
+
"""Generates a bash script string for a given node"""
|
| 182 |
+
if ast_node.kind == 'command':
|
| 183 |
+
return handle_ast_command(ast_node, all_scripts_in_fs, build_script)
|
| 184 |
+
elif ast_node.kind == 'list':
|
| 185 |
+
return handle_ast_list(ast_node, all_scripts_in_fs, build_script)
|
| 186 |
+
elif ast_node.kind == 'compound':
|
| 187 |
+
print('todo: handle compound')
|
| 188 |
+
return handle_ast_compound(ast_node, all_scripts_in_fs, build_script)
|
| 189 |
+
elif ast_node.kind == 'pipeline':
|
| 190 |
+
# Not supported
|
| 191 |
+
return ''
|
| 192 |
+
else:
|
| 193 |
+
raise Exception(f'Missing node handling: {ast_node.kind}')
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def parse_script(bash_script, all_scripts) -> str:
|
| 197 |
+
"""Top-level bash script parser"""
|
| 198 |
+
new_script = ''
|
| 199 |
+
with open(bash_script, 'r', encoding='utf-8') as f:
|
| 200 |
+
build_script = f.read()
|
| 201 |
+
try:
|
| 202 |
+
parts = bashlex.parse(build_script)
|
| 203 |
+
except bashlex.errors.ParsingError:
|
| 204 |
+
return ''
|
| 205 |
+
for part in parts:
|
| 206 |
+
new_script += handle_node(part, all_scripts, build_script)
|
| 207 |
+
new_script += '\n'
|
| 208 |
+
print("-" * 45)
|
| 209 |
+
print(part.kind)
|
| 210 |
+
print(part.dump())
|
| 211 |
+
|
| 212 |
+
return new_script
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def main():
|
| 216 |
+
"""Main function"""
|
| 217 |
+
all_scripts = find_all_bash_scripts_in_src()
|
| 218 |
+
replay_bash_script = parse_script(sys.argv[1], all_scripts)
|
| 219 |
+
|
| 220 |
+
print("REPLAYABLE BASH SCRIPT")
|
| 221 |
+
print("#" * 60)
|
| 222 |
+
print(replay_bash_script)
|
| 223 |
+
print("#" * 60)
|
| 224 |
+
|
| 225 |
+
out_dir = os.getenv('OUT', '/out')
|
| 226 |
+
with open(f'{out_dir}/replay-build-script.sh', 'w', encoding='utf-8') as f:
|
| 227 |
+
f.write(replay_bash_script)
|
| 228 |
+
|
| 229 |
+
src_dir = os.getenv('SRC', '/src')
|
| 230 |
+
with open(f'{src_dir}/replay_build.sh', 'w', encoding='utf-8') as f:
|
| 231 |
+
f.write(replay_bash_script)
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
if __name__ == "__main__":
|
| 235 |
+
main()
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder/compile_go_fuzzer
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2020 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
path=$1
|
| 19 |
+
function=$2
|
| 20 |
+
fuzzer=$3
|
| 21 |
+
tags="-tags gofuzz"
|
| 22 |
+
if [[ $# -eq 4 ]]; then
|
| 23 |
+
tags="-tags $4"
|
| 24 |
+
fi
|
| 25 |
+
|
| 26 |
+
# makes directory change temporary
|
| 27 |
+
(
|
| 28 |
+
cd $GOPATH/src/$path || true
|
| 29 |
+
# in the case we are in the right directory, with go.mod but no go.sum
|
| 30 |
+
go mod tidy || true
|
| 31 |
+
# project was downloaded with go get if go list fails
|
| 32 |
+
go list $tags $path || { cd $GOPATH/pkg/mod/ && cd `echo $path | cut -d/ -f1-3 | awk '{print $1"@*"}'`; } || cd -
|
| 33 |
+
# project does not have go.mod if go list fails again
|
| 34 |
+
go list $tags $path || { go mod init $path && go mod tidy ;}
|
| 35 |
+
|
| 36 |
+
if [[ $SANITIZER = *coverage* ]]; then
|
| 37 |
+
fuzzed_package=`go list $tags -f '{{.Name}}' $path`
|
| 38 |
+
abspath=`go list $tags -f {{.Dir}} $path`
|
| 39 |
+
cd $abspath
|
| 40 |
+
cp $GOPATH/ossfuzz_coverage_runner.go ./"${function,,}"_test.go
|
| 41 |
+
sed -i -e 's/FuzzFunction/'$function'/' ./"${function,,}"_test.go
|
| 42 |
+
sed -i -e 's/mypackagebeingfuzzed/'$fuzzed_package'/' ./"${function,,}"_test.go
|
| 43 |
+
sed -i -e 's/TestFuzzCorpus/Test'$function'Corpus/' ./"${function,,}"_test.go
|
| 44 |
+
|
| 45 |
+
# The repo is the module path/name, which is already created above in case it doesn't exist,
|
| 46 |
+
# but not always the same as the module path. This is necessary to handle SIV properly.
|
| 47 |
+
fuzzed_repo=$(go list $tags -f {{.Module}} "$path")
|
| 48 |
+
abspath_repo=`go list -m $tags -f {{.Dir}} $fuzzed_repo || go list $tags -f {{.Dir}} $fuzzed_repo`
|
| 49 |
+
# give equivalence to absolute paths in another file, as go test -cover uses golangish pkg.Dir
|
| 50 |
+
echo "s=$fuzzed_repo"="$abspath_repo"= > $OUT/$fuzzer.gocovpath
|
| 51 |
+
# Additional packages for which to get coverage.
|
| 52 |
+
pkgaddcov=""
|
| 53 |
+
# to prevent bash from failing about unbound variable
|
| 54 |
+
GO_COV_ADD_PKG_SET=${GO_COV_ADD_PKG:-}
|
| 55 |
+
if [[ -n "${GO_COV_ADD_PKG_SET}" ]]; then
|
| 56 |
+
pkgaddcov=","$GO_COV_ADD_PKG
|
| 57 |
+
abspath_repo=`go list -m $tags -f {{.Dir}} $GO_COV_ADD_PKG || go list $tags -f {{.Dir}} $GO_COV_ADD_PKG`
|
| 58 |
+
echo "s=^$GO_COV_ADD_PKG"="$abspath_repo"= >> $OUT/$fuzzer.gocovpath
|
| 59 |
+
fi
|
| 60 |
+
go test -run Test${function}Corpus -v $tags -coverpkg $fuzzed_repo/...$pkgaddcov -c -o $OUT/$fuzzer $path
|
| 61 |
+
else
|
| 62 |
+
# Compile and instrument all Go files relevant to this fuzz target.
|
| 63 |
+
echo "Running go-fuzz $tags -func $function -o $fuzzer.a $path"
|
| 64 |
+
go-fuzz $tags -func $function -o $fuzzer.a $path
|
| 65 |
+
|
| 66 |
+
# Link Go code ($fuzzer.a) with fuzzing engine to produce fuzz target binary.
|
| 67 |
+
$CXX $CXXFLAGS $LIB_FUZZING_ENGINE $fuzzer.a -o $OUT/$fuzzer
|
| 68 |
+
fi
|
| 69 |
+
)
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder/install_go.sh
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
cd /tmp
|
| 19 |
+
|
| 20 |
+
wget https://go.dev/dl/go1.23.4.linux-amd64.tar.gz
|
| 21 |
+
mkdir temp-go
|
| 22 |
+
tar -C temp-go/ -xzf go1.23.4.linux-amd64.tar.gz
|
| 23 |
+
|
| 24 |
+
mkdir /root/.go/
|
| 25 |
+
mv temp-go/go/* /root/.go/
|
| 26 |
+
rm -rf temp-go
|
| 27 |
+
|
| 28 |
+
echo 'Set "GOPATH=/root/go"'
|
| 29 |
+
echo 'Set "PATH=$PATH:/root/.go/bin:$GOPATH/bin"'
|
| 30 |
+
|
| 31 |
+
go install github.com/mdempsky/go114-fuzz-build@latest
|
| 32 |
+
ln -s $GOPATH/bin/go114-fuzz-build $GOPATH/bin/go-fuzz
|
| 33 |
+
|
| 34 |
+
# Build signal handler
|
| 35 |
+
if [ -f "$GOPATH/gosigfuzz/gosigfuzz.c" ]; then
|
| 36 |
+
clang -c $GOPATH/gosigfuzz/gosigfuzz.c -o $GOPATH/gosigfuzz/gosigfuzz.o
|
| 37 |
+
fi
|
| 38 |
+
|
| 39 |
+
cd /tmp
|
| 40 |
+
git clone https://github.com/AdamKorcz/go-118-fuzz-build
|
| 41 |
+
cd go-118-fuzz-build
|
| 42 |
+
go build
|
| 43 |
+
mv go-118-fuzz-build $GOPATH/bin/
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder/install_ruby.sh
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
apt update
|
| 19 |
+
apt install -y lsb-release software-properties-common gnupg2 binutils xz-utils libyaml-dev
|
| 20 |
+
gpg2 --keyserver keyserver.ubuntu.com --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB
|
| 21 |
+
curl -sSL https://get.rvm.io | bash
|
| 22 |
+
|
| 23 |
+
. /etc/profile.d/rvm.sh
|
| 24 |
+
|
| 25 |
+
rvm install ruby-3.3.1
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-builder/install_rust.sh
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
curl https://sh.rustup.rs | sh -s -- -y --default-toolchain=$RUSTUP_TOOLCHAIN --profile=minimal
|
| 19 |
+
cargo install cargo-fuzz --locked && rm -rf /rust/registry
|
| 20 |
+
# Needed to recompile rust std library for MSAN
|
| 21 |
+
rustup component add rust-src
|
| 22 |
+
cp -r /usr/local/lib/x86_64-unknown-linux-gnu/* /usr/local/lib/
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/coverage
ADDED
|
@@ -0,0 +1,549 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -u
|
| 2 |
+
# Copyright 2018 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
cd $OUT
|
| 18 |
+
|
| 19 |
+
if (( $# > 0 )); then
|
| 20 |
+
FUZZ_TARGETS="$@"
|
| 21 |
+
else
|
| 22 |
+
FUZZ_TARGETS="$(find . -maxdepth 1 -type f -executable -printf '%P\n' | \
|
| 23 |
+
grep -v -x -F \
|
| 24 |
+
-e 'llvm-symbolizer' \
|
| 25 |
+
-e 'jazzer_agent_deploy.jar' \
|
| 26 |
+
-e 'jazzer_driver' \
|
| 27 |
+
-e 'jazzer_driver_with_sanitizer' \
|
| 28 |
+
-e 'sanitizer_with_fuzzer.so')"
|
| 29 |
+
fi
|
| 30 |
+
|
| 31 |
+
COVERAGE_OUTPUT_DIR=${COVERAGE_OUTPUT_DIR:-$OUT}
|
| 32 |
+
|
| 33 |
+
DUMPS_DIR="$COVERAGE_OUTPUT_DIR/dumps"
|
| 34 |
+
FUZZERS_COVERAGE_DUMPS_DIR="$DUMPS_DIR/fuzzers_coverage"
|
| 35 |
+
MERGED_COVERAGE_DIR="$COVERAGE_OUTPUT_DIR/merged_coverage"
|
| 36 |
+
FUZZER_STATS_DIR="$COVERAGE_OUTPUT_DIR/fuzzer_stats"
|
| 37 |
+
TEXTCOV_REPORT_DIR="$COVERAGE_OUTPUT_DIR/textcov_reports"
|
| 38 |
+
LOGS_DIR="$COVERAGE_OUTPUT_DIR/logs"
|
| 39 |
+
REPORT_ROOT_DIR="$COVERAGE_OUTPUT_DIR/report"
|
| 40 |
+
REPORT_BY_TARGET_ROOT_DIR="$COVERAGE_OUTPUT_DIR/report_target"
|
| 41 |
+
PLATFORM=linux
|
| 42 |
+
REPORT_PLATFORM_DIR="$COVERAGE_OUTPUT_DIR/report/$PLATFORM"
|
| 43 |
+
|
| 44 |
+
for directory in $DUMPS_DIR $FUZZER_STATS_DIR $LOGS_DIR $REPORT_ROOT_DIR $TEXTCOV_REPORT_DIR\
|
| 45 |
+
$REPORT_PLATFORM_DIR $REPORT_BY_TARGET_ROOT_DIR $FUZZERS_COVERAGE_DUMPS_DIR $MERGED_COVERAGE_DIR; do
|
| 46 |
+
rm -rf $directory
|
| 47 |
+
mkdir -p $directory
|
| 48 |
+
done
|
| 49 |
+
|
| 50 |
+
PROFILE_FILE="$DUMPS_DIR/merged.profdata"
|
| 51 |
+
SUMMARY_FILE="$REPORT_PLATFORM_DIR/summary.json"
|
| 52 |
+
COVERAGE_TARGET_FILE="$FUZZER_STATS_DIR/coverage_targets.txt"
|
| 53 |
+
|
| 54 |
+
# Use path mapping, as $SRC directory from the builder is copied into $OUT/$SRC.
|
| 55 |
+
PATH_EQUIVALENCE_ARGS="-path-equivalence=/,$OUT"
|
| 56 |
+
|
| 57 |
+
# It's important to use $COVERAGE_EXTRA_ARGS as the last argument, because it
|
| 58 |
+
# can contain paths to source files / directories which are positional args.
|
| 59 |
+
LLVM_COV_COMMON_ARGS="$PATH_EQUIVALENCE_ARGS \
|
| 60 |
+
-ignore-filename-regex=.*src/libfuzzer/.* $COVERAGE_EXTRA_ARGS"
|
| 61 |
+
|
| 62 |
+
# Options to extract branch coverage.
|
| 63 |
+
BRANCH_COV_ARGS="--show-branches=count --show-expansions"
|
| 64 |
+
|
| 65 |
+
# Timeout for running a single fuzz target.
|
| 66 |
+
TIMEOUT=1h
|
| 67 |
+
|
| 68 |
+
# This will be used by llvm-cov command to generate the actual report.
|
| 69 |
+
objects=""
|
| 70 |
+
|
| 71 |
+
# Number of CPUs available, this is needed for running tests in parallel.
|
| 72 |
+
# Set the max number of parallel jobs to be the CPU count and a max of 10.
|
| 73 |
+
NPROC=$(nproc)
|
| 74 |
+
MAX_PARALLEL_COUNT=10
|
| 75 |
+
|
| 76 |
+
CORPUS_DIR=${CORPUS_DIR:-"/corpus"}
|
| 77 |
+
|
| 78 |
+
function run_fuzz_target {
|
| 79 |
+
local target=$1
|
| 80 |
+
|
| 81 |
+
# '%1m' will produce separate dump files for every object. For example, if a
|
| 82 |
+
# fuzz target loads a shared library, we will have dumps for both of them.
|
| 83 |
+
local profraw_file="$DUMPS_DIR/$target.%1m.profraw"
|
| 84 |
+
local profraw_file_mask="$DUMPS_DIR/$target.*.profraw"
|
| 85 |
+
local profdata_file="$DUMPS_DIR/$target.profdata"
|
| 86 |
+
local corpus_real="$CORPUS_DIR/${target}"
|
| 87 |
+
|
| 88 |
+
# -merge=1 requires an output directory, create a new, empty dir for that.
|
| 89 |
+
local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}"
|
| 90 |
+
rm -rf $corpus_dummy && mkdir -p $corpus_dummy
|
| 91 |
+
|
| 92 |
+
# Use -merge=1 instead of -runs=0 because merge is crash resistant and would
|
| 93 |
+
# let to get coverage using all corpus files even if there are crash inputs.
|
| 94 |
+
# Merge should not introduce any significant overhead compared to -runs=0,
|
| 95 |
+
# because (A) corpuses are already minimized; (B) we do not use sancov, and so
|
| 96 |
+
# libFuzzer always finishes merge with an empty output dir.
|
| 97 |
+
# Use 100s timeout instead of 25s as code coverage builds can be very slow.
|
| 98 |
+
local args="-merge=1 -timeout=100 $corpus_dummy $corpus_real"
|
| 99 |
+
|
| 100 |
+
export LLVM_PROFILE_FILE=$profraw_file
|
| 101 |
+
timeout $TIMEOUT $OUT/$target $args &> $LOGS_DIR/$target.log
|
| 102 |
+
if (( $? != 0 )); then
|
| 103 |
+
echo "Error occured while running $target:"
|
| 104 |
+
cat $LOGS_DIR/$target.log
|
| 105 |
+
fi
|
| 106 |
+
|
| 107 |
+
rm -rf $corpus_dummy
|
| 108 |
+
if (( $(du -c $profraw_file_mask | tail -n 1 | cut -f 1) == 0 )); then
|
| 109 |
+
# Skip fuzz targets that failed to produce profile dumps.
|
| 110 |
+
return 0
|
| 111 |
+
fi
|
| 112 |
+
|
| 113 |
+
# If necessary translate to latest profraw version.
|
| 114 |
+
if [[ $target == *"@"* ]]; then
|
| 115 |
+
# Extract fuzztest binary name from fuzztest wrapper script.
|
| 116 |
+
target=(${target//@/ }[0])
|
| 117 |
+
fi
|
| 118 |
+
profraw_update.py $OUT/$target -i $profraw_file_mask
|
| 119 |
+
llvm-profdata merge -j=1 -sparse $profraw_file_mask -o $profdata_file
|
| 120 |
+
|
| 121 |
+
# Delete unnecessary and (potentially) large .profraw files.
|
| 122 |
+
rm $profraw_file_mask
|
| 123 |
+
|
| 124 |
+
shared_libraries=$(coverage_helper shared_libs -build-dir=$OUT -object=$target)
|
| 125 |
+
|
| 126 |
+
llvm-cov export -summary-only -instr-profile=$profdata_file -object=$target \
|
| 127 |
+
$shared_libraries $LLVM_COV_COMMON_ARGS > $FUZZER_STATS_DIR/$target.json
|
| 128 |
+
|
| 129 |
+
# For introspector.
|
| 130 |
+
llvm-cov show -instr-profile=$profdata_file -object=$target -line-coverage-gt=0 $shared_libraries $BRANCH_COV_ARGS $LLVM_COV_COMMON_ARGS > ${TEXTCOV_REPORT_DIR}/$target.covreport
|
| 131 |
+
}
|
| 132 |
+
|
| 133 |
+
function run_go_fuzz_target {
|
| 134 |
+
local target=$1
|
| 135 |
+
|
| 136 |
+
echo "Running go target $target"
|
| 137 |
+
export FUZZ_CORPUS_DIR="$CORPUS_DIR/${target}/"
|
| 138 |
+
export FUZZ_PROFILE_NAME="$DUMPS_DIR/$target.perf"
|
| 139 |
+
|
| 140 |
+
# setup for native go fuzzers
|
| 141 |
+
cd $OUT
|
| 142 |
+
mkdir -p "testdata/fuzz/${target}"
|
| 143 |
+
cp -r "${FUZZ_CORPUS_DIR}" "testdata/fuzz/"
|
| 144 |
+
|
| 145 |
+
# rewrite libFuzzer corpus to Std Go corpus if native fuzzing
|
| 146 |
+
grep "TestFuzzCorpus" $target > /dev/null 2>&1 && $SYSGOPATH/bin/convertcorpus $target "testdata/fuzz/${target}"
|
| 147 |
+
cd -
|
| 148 |
+
|
| 149 |
+
timeout $TIMEOUT $OUT/$target -test.coverprofile $DUMPS_DIR/$target.profdata &> $LOGS_DIR/$target.log
|
| 150 |
+
if (( $? != 0 )); then
|
| 151 |
+
echo "Error occured while running $target:"
|
| 152 |
+
cat $LOGS_DIR/$target.log
|
| 153 |
+
fi
|
| 154 |
+
|
| 155 |
+
# cleanup after native go fuzzers
|
| 156 |
+
rm -r "${OUT}/testdata/fuzz/${target}"
|
| 157 |
+
|
| 158 |
+
# The Go 1.18 fuzzers are renamed to "*_fuzz_.go" during "infra/helper.py build_fuzzers".
|
| 159 |
+
# They are are therefore refered to as "*_fuzz_.go" in the profdata files.
|
| 160 |
+
# Since the copies named "*_fuzz_.go" do not exist in the file tree during
|
| 161 |
+
# the coverage build, we change the references in the .profdata files
|
| 162 |
+
# to the original file names.
|
| 163 |
+
#sed -i "s/_test.go_fuzz_.go/_test.go/g" $DUMPS_DIR/$target.profdata
|
| 164 |
+
# translate from golangish paths to current absolute paths
|
| 165 |
+
cat $OUT/$target.gocovpath | while read i; do sed -i $i $DUMPS_DIR/$target.profdata; done
|
| 166 |
+
# cf PATH_EQUIVALENCE_ARGS
|
| 167 |
+
sed -i 's=/='$OUT'/=' $DUMPS_DIR/$target.profdata
|
| 168 |
+
$SYSGOPATH/bin/gocovsum $DUMPS_DIR/$target.profdata > $FUZZER_STATS_DIR/$target.json
|
| 169 |
+
}
|
| 170 |
+
|
| 171 |
+
function run_python_fuzz_target {
|
| 172 |
+
local target=$1
|
| 173 |
+
local zipped_sources="$DUMPS_DIR/$target.deps.zip"
|
| 174 |
+
local corpus_real="$CORPUS_DIR/${target}"
|
| 175 |
+
# Write dummy stats file
|
| 176 |
+
echo "{}" > "$FUZZER_STATS_DIR/$target.json"
|
| 177 |
+
|
| 178 |
+
# Run fuzzer
|
| 179 |
+
$OUT/$target $corpus_real -atheris_runs=$(ls -la $corpus_real | wc -l) > $LOGS_DIR/$target.log 2>&1
|
| 180 |
+
if (( $? != 0 )); then
|
| 181 |
+
echo "Error happened getting coverage of $target"
|
| 182 |
+
echo "This is likely because Atheris did not exit gracefully"
|
| 183 |
+
cat $LOGS_DIR/$target.log
|
| 184 |
+
return 0
|
| 185 |
+
fi
|
| 186 |
+
mv .coverage $OUT/.coverage_$target
|
| 187 |
+
}
|
| 188 |
+
|
| 189 |
+
function run_java_fuzz_target {
|
| 190 |
+
local target=$1
|
| 191 |
+
|
| 192 |
+
local exec_file="$DUMPS_DIR/$target.exec"
|
| 193 |
+
local class_dump_dir="$DUMPS_DIR/${target}_classes/"
|
| 194 |
+
mkdir "$class_dump_dir"
|
| 195 |
+
local corpus_real="$CORPUS_DIR/${target}"
|
| 196 |
+
|
| 197 |
+
# -merge=1 requires an output directory, create a new, empty dir for that.
|
| 198 |
+
local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}"
|
| 199 |
+
rm -rf $corpus_dummy && mkdir -p $corpus_dummy
|
| 200 |
+
|
| 201 |
+
# Use 100s timeout instead of 25s as code coverage builds can be very slow.
|
| 202 |
+
local jacoco_args="destfile=$exec_file,classdumpdir=$class_dump_dir,excludes=com.code_intelligence.jazzer.*\\:com.sun.tools.attach.VirtualMachine"
|
| 203 |
+
local args="-merge=1 -timeout=100 --nohooks \
|
| 204 |
+
--additional_jvm_args=-javaagent\\:/opt/jacoco-agent.jar=$jacoco_args \
|
| 205 |
+
$corpus_dummy $corpus_real"
|
| 206 |
+
|
| 207 |
+
timeout $TIMEOUT $OUT/$target $args &> $LOGS_DIR/$target.log
|
| 208 |
+
if (( $? != 0 )); then
|
| 209 |
+
echo "Error occured while running $target:"
|
| 210 |
+
cat $LOGS_DIR/$target.log
|
| 211 |
+
fi
|
| 212 |
+
|
| 213 |
+
if (( $(du -c $exec_file | tail -n 1 | cut -f 1) == 0 )); then
|
| 214 |
+
# Skip fuzz targets that failed to produce .exec files.
|
| 215 |
+
echo "$target failed to produce .exec file."
|
| 216 |
+
return 0
|
| 217 |
+
fi
|
| 218 |
+
|
| 219 |
+
# Generate XML report only as input to jacoco_report_converter.
|
| 220 |
+
# Source files are not needed for the summary.
|
| 221 |
+
local xml_report="$DUMPS_DIR/${target}.xml"
|
| 222 |
+
local summary_file="$FUZZER_STATS_DIR/$target.json"
|
| 223 |
+
java -jar /opt/jacoco-cli.jar report $exec_file \
|
| 224 |
+
--xml $xml_report \
|
| 225 |
+
--classfiles $class_dump_dir
|
| 226 |
+
|
| 227 |
+
# Write llvm-cov summary file.
|
| 228 |
+
jacoco_report_converter.py $xml_report $summary_file
|
| 229 |
+
}
|
| 230 |
+
|
| 231 |
+
function run_javascript_fuzz_target {
|
| 232 |
+
local target=$1
|
| 233 |
+
local corpus_real="$CORPUS_DIR/${target}"
|
| 234 |
+
|
| 235 |
+
# -merge=1 requires an output directory, create a new, empty dir for that.
|
| 236 |
+
local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}"
|
| 237 |
+
rm -rf $corpus_dummy && mkdir -p $corpus_dummy
|
| 238 |
+
|
| 239 |
+
# IstanbulJS currently does not work when the tested program creates
|
| 240 |
+
# subprocesses. For this reason, we first minimize the corpus removing
|
| 241 |
+
# any crashing inputs so that we can report source-based code coverage
|
| 242 |
+
# with a single sweep over the minimized corpus
|
| 243 |
+
local merge_args="-merge=1 -timeout=100 $corpus_dummy $corpus_real"
|
| 244 |
+
timeout $TIMEOUT $OUT/$target $merge_args &> $LOGS_DIR/$target.log
|
| 245 |
+
|
| 246 |
+
# nyc saves the coverage reports in a directory with the default name "coverage"
|
| 247 |
+
local coverage_dir="$DUMPS_DIR/coverage_dir_for_${target}"
|
| 248 |
+
rm -rf $coverage_dir && mkdir -p $coverage_dir
|
| 249 |
+
|
| 250 |
+
local nyc_json_coverage_file="$coverage_dir/coverage-final.json"
|
| 251 |
+
local nyc_json_summary_file="$coverage_dir/coverage-summary.json"
|
| 252 |
+
|
| 253 |
+
local args="-runs=0 $corpus_dummy"
|
| 254 |
+
local jazzerjs_args="--coverage --coverageDirectory $coverage_dir --coverageReporters json --coverageReporters json-summary"
|
| 255 |
+
|
| 256 |
+
JAZZERJS_EXTRA_ARGS=$jazzerjs_args $OUT/$target $args &> $LOGS_DIR/$target.log
|
| 257 |
+
|
| 258 |
+
if (( $? != 0 )); then
|
| 259 |
+
echo "Error occured while running $target:"
|
| 260 |
+
cat $LOGS_DIR/$target.log
|
| 261 |
+
fi
|
| 262 |
+
|
| 263 |
+
if [ ! -s $nyc_json_coverage_file ]; then
|
| 264 |
+
# Skip fuzz targets that failed to produce coverage-final.json file.
|
| 265 |
+
echo "$target failed to produce coverage-final.json file."
|
| 266 |
+
return 0
|
| 267 |
+
fi
|
| 268 |
+
|
| 269 |
+
cp $nyc_json_coverage_file $FUZZERS_COVERAGE_DUMPS_DIR/$target.json
|
| 270 |
+
|
| 271 |
+
local summary_file="$FUZZER_STATS_DIR/$target.json"
|
| 272 |
+
|
| 273 |
+
nyc_report_converter.py $nyc_json_summary_file $summary_file
|
| 274 |
+
}
|
| 275 |
+
|
| 276 |
+
function generate_html {
|
| 277 |
+
local profdata=$1
|
| 278 |
+
local shared_libraries=$2
|
| 279 |
+
local objects=$3
|
| 280 |
+
local output_dir=$4
|
| 281 |
+
|
| 282 |
+
rm -rf "$output_dir"
|
| 283 |
+
mkdir -p "$output_dir/$PLATFORM"
|
| 284 |
+
|
| 285 |
+
local llvm_cov_args="-instr-profile=$profdata $objects $LLVM_COV_COMMON_ARGS"
|
| 286 |
+
llvm-cov show -format=html -output-dir=$output_dir -Xdemangler rcfilt $llvm_cov_args
|
| 287 |
+
|
| 288 |
+
# Export coverage summary in JSON format.
|
| 289 |
+
local summary_file=$output_dir/$PLATFORM/summary.json
|
| 290 |
+
|
| 291 |
+
llvm-cov export -summary-only $llvm_cov_args > $summary_file
|
| 292 |
+
|
| 293 |
+
coverage_helper -v post_process -src-root-dir=/ -summary-file=$summary_file \
|
| 294 |
+
-output-dir=$output_dir $PATH_EQUIVALENCE_ARGS
|
| 295 |
+
}
|
| 296 |
+
|
| 297 |
+
export SYSGOPATH=$GOPATH
|
| 298 |
+
export GOPATH=$OUT/$GOPATH
|
| 299 |
+
# Run each fuzz target, generate raw coverage dumps.
|
| 300 |
+
for fuzz_target in $FUZZ_TARGETS; do
|
| 301 |
+
# Test if fuzz target is a golang one.
|
| 302 |
+
if [[ $FUZZING_LANGUAGE == "go" ]]; then
|
| 303 |
+
# Continue if not a fuzz target.
|
| 304 |
+
if [[ $FUZZING_ENGINE != "none" ]]; then
|
| 305 |
+
grep "FUZZ_CORPUS_DIR" $fuzz_target > /dev/null 2>&1 || grep "testing\.T" $fuzz_target > /dev/null 2>&1 || continue
|
| 306 |
+
fi
|
| 307 |
+
# Log the target in the targets file.
|
| 308 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 309 |
+
|
| 310 |
+
# Run the coverage collection.
|
| 311 |
+
run_go_fuzz_target $fuzz_target &
|
| 312 |
+
elif [[ $FUZZING_LANGUAGE == "python" ]]; then
|
| 313 |
+
echo "Entering python fuzzing"
|
| 314 |
+
# Log the target in the targets file.
|
| 315 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 316 |
+
|
| 317 |
+
# Run the coverage collection.
|
| 318 |
+
run_python_fuzz_target $fuzz_target
|
| 319 |
+
elif [[ $FUZZING_LANGUAGE == "jvm" ]]; then
|
| 320 |
+
# Continue if not a fuzz target.
|
| 321 |
+
if [[ $FUZZING_ENGINE != "none" ]]; then
|
| 322 |
+
grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue
|
| 323 |
+
fi
|
| 324 |
+
|
| 325 |
+
echo "Running $fuzz_target"
|
| 326 |
+
# Log the target in the targets file.
|
| 327 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 328 |
+
|
| 329 |
+
# Run the coverage collection.
|
| 330 |
+
run_java_fuzz_target $fuzz_target &
|
| 331 |
+
elif [[ $FUZZING_LANGUAGE == "javascript" ]]; then
|
| 332 |
+
# Continue if not a fuzz target.
|
| 333 |
+
if [[ $FUZZING_ENGINE != "none" ]]; then
|
| 334 |
+
grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue
|
| 335 |
+
fi
|
| 336 |
+
|
| 337 |
+
echo "Running $fuzz_target"
|
| 338 |
+
# Log the target in the targets file.
|
| 339 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 340 |
+
|
| 341 |
+
# Run the coverage collection.
|
| 342 |
+
run_javascript_fuzz_target $fuzz_target &
|
| 343 |
+
else
|
| 344 |
+
# Continue if not a fuzz target.
|
| 345 |
+
if [[ $FUZZING_ENGINE != "none" ]]; then
|
| 346 |
+
grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue
|
| 347 |
+
fi
|
| 348 |
+
|
| 349 |
+
echo "Running $fuzz_target"
|
| 350 |
+
# Log the target in the targets file.
|
| 351 |
+
echo ${fuzz_target} >> $COVERAGE_TARGET_FILE
|
| 352 |
+
|
| 353 |
+
# Run the coverage collection.
|
| 354 |
+
run_fuzz_target $fuzz_target &
|
| 355 |
+
|
| 356 |
+
# Rewrite object if its a FUZZTEST target
|
| 357 |
+
if [[ $fuzz_target == *"@"* ]]; then
|
| 358 |
+
# Extract fuzztest binary name from fuzztest wrapper script.
|
| 359 |
+
fuzz_target=(${fuzz_target//@/ }[0])
|
| 360 |
+
fi
|
| 361 |
+
if [[ -z $objects ]]; then
|
| 362 |
+
# The first object needs to be passed without -object= flag.
|
| 363 |
+
objects="$fuzz_target"
|
| 364 |
+
else
|
| 365 |
+
objects="$objects -object=$fuzz_target"
|
| 366 |
+
fi
|
| 367 |
+
fi
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
# Limit the number of processes to be spawned.
|
| 371 |
+
n_child_proc=$(jobs -rp | wc -l)
|
| 372 |
+
while [[ "$n_child_proc" -eq "$NPROC" || "$n_child_proc" -gt "$MAX_PARALLEL_COUNT" ]]; do
|
| 373 |
+
sleep 4
|
| 374 |
+
n_child_proc=$(jobs -rp | wc -l)
|
| 375 |
+
done
|
| 376 |
+
done
|
| 377 |
+
|
| 378 |
+
# Wait for background processes to finish.
|
| 379 |
+
wait
|
| 380 |
+
|
| 381 |
+
if [[ $FUZZING_LANGUAGE == "go" ]]; then
|
| 382 |
+
echo $DUMPS_DIR
|
| 383 |
+
$SYSGOPATH/bin/gocovmerge $DUMPS_DIR/*.profdata > fuzz.cov
|
| 384 |
+
gotoolcover -html=fuzz.cov -o $REPORT_ROOT_DIR/index.html
|
| 385 |
+
$SYSGOPATH/bin/gocovsum fuzz.cov > $SUMMARY_FILE
|
| 386 |
+
cp $REPORT_ROOT_DIR/index.html $REPORT_PLATFORM_DIR/index.html
|
| 387 |
+
$SYSGOPATH/bin/pprof-merge $DUMPS_DIR/*.perf.cpu.prof
|
| 388 |
+
mv merged.data $REPORT_ROOT_DIR/cpu.prof
|
| 389 |
+
$SYSGOPATH/bin/pprof-merge $DUMPS_DIR/*.perf.heap.prof
|
| 390 |
+
mv merged.data $REPORT_ROOT_DIR/heap.prof
|
| 391 |
+
#TODO some proxy for go tool pprof -http=127.0.0.1:8001 $DUMPS_DIR/cpu.prof
|
| 392 |
+
echo "Finished generating code coverage report for Go fuzz targets."
|
| 393 |
+
elif [[ $FUZZING_LANGUAGE == "python" ]]; then
|
| 394 |
+
# Extract source files from all dependency zip folders
|
| 395 |
+
mkdir -p /pythoncovmergedfiles/medio
|
| 396 |
+
PYCOVDIR=/pycovdir/
|
| 397 |
+
mkdir $PYCOVDIR
|
| 398 |
+
for fuzzer in $FUZZ_TARGETS; do
|
| 399 |
+
fuzzer_deps=${fuzzer}.pkg.deps.zip
|
| 400 |
+
unzip $OUT/${fuzzer_deps}
|
| 401 |
+
rsync -r ./medio /pythoncovmergedfiles/medio
|
| 402 |
+
rm -rf ./medio
|
| 403 |
+
|
| 404 |
+
# Translate paths in unzipped folders to paths that we can use
|
| 405 |
+
mv $OUT/.coverage_$fuzzer .coverage
|
| 406 |
+
python3 /usr/local/bin/python_coverage_runner_help.py translate /pythoncovmergedfiles/medio
|
| 407 |
+
cp .new_coverage $PYCOVDIR/.coverage_$fuzzer
|
| 408 |
+
cp .new_coverage $OUT/coverage_d_$fuzzer
|
| 409 |
+
done
|
| 410 |
+
|
| 411 |
+
# Combine coverage
|
| 412 |
+
cd $PYCOVDIR
|
| 413 |
+
python3 /usr/local/bin/python_coverage_runner_help.py combine .coverage_*
|
| 414 |
+
python3 /usr/local/bin/python_coverage_runner_help.py html
|
| 415 |
+
# Produce all_cov file used by fuzz introspector.
|
| 416 |
+
python3 /usr/local/bin/python_coverage_runner_help.py json -o ${TEXTCOV_REPORT_DIR}/all_cov.json
|
| 417 |
+
|
| 418 |
+
# Generate .json with similar format to llvm-cov output.
|
| 419 |
+
python3 /usr/local/bin/python_coverage_runner_help.py \
|
| 420 |
+
convert-to-summary-json ${TEXTCOV_REPORT_DIR}/all_cov.json $SUMMARY_FILE
|
| 421 |
+
|
| 422 |
+
# Copy coverage date out
|
| 423 |
+
cp htmlcov/status.json ${TEXTCOV_REPORT_DIR}/html_status.json
|
| 424 |
+
|
| 425 |
+
mv htmlcov/* $REPORT_PLATFORM_DIR/
|
| 426 |
+
mv .coverage_* $REPORT_PLATFORM_DIR/
|
| 427 |
+
elif [[ $FUZZING_LANGUAGE == "jvm" ]]; then
|
| 428 |
+
|
| 429 |
+
# From this point on the script does not tolerate any errors.
|
| 430 |
+
set -e
|
| 431 |
+
|
| 432 |
+
# Merge .exec files from the individual targets.
|
| 433 |
+
jacoco_merged_exec=$DUMPS_DIR/jacoco.merged.exec
|
| 434 |
+
java -jar /opt/jacoco-cli.jar merge $DUMPS_DIR/*.exec \
|
| 435 |
+
--destfile $jacoco_merged_exec
|
| 436 |
+
|
| 437 |
+
# Prepare classes directory for jacoco process
|
| 438 |
+
classes_dir=$DUMPS_DIR/classes
|
| 439 |
+
mkdir $classes_dir
|
| 440 |
+
|
| 441 |
+
# Only copy class files found in $OUT/$SRC to ensure they are
|
| 442 |
+
# lively compiled from the project, avoiding inclusion of
|
| 443 |
+
# dependency classes. This also includes the fuzzer classes.
|
| 444 |
+
find "$OUT/$SRC" -type f -name "*.class" | while read -r class_file; do
|
| 445 |
+
# Skip module-info.class
|
| 446 |
+
if [[ "$(basename "$class_file")" == "module-info.class" ]]; then
|
| 447 |
+
continue
|
| 448 |
+
fi
|
| 449 |
+
|
| 450 |
+
# Use javap to extract the fully qualified name of the class and copy it to $classes_dir
|
| 451 |
+
fqn=$(javap -verbose "$class_file" 2>/dev/null | grep "this_class:" | grep -oP '(?<=// ).*')
|
| 452 |
+
if [ -n "$fqn" ]; then
|
| 453 |
+
mkdir -p $classes_dir/$(dirname $fqn)
|
| 454 |
+
cp $class_file $classes_dir/$fqn.class
|
| 455 |
+
fi
|
| 456 |
+
done
|
| 457 |
+
|
| 458 |
+
# Heuristically determine source directories based on Maven structure.
|
| 459 |
+
# Always include the $SRC root as it likely contains the fuzzer sources.
|
| 460 |
+
sourcefiles_args=(--sourcefiles $OUT/$SRC)
|
| 461 |
+
source_dirs=$(find $OUT/$SRC -type d -name 'java')
|
| 462 |
+
for source_dir in $source_dirs; do
|
| 463 |
+
sourcefiles_args+=(--sourcefiles "$source_dir")
|
| 464 |
+
done
|
| 465 |
+
|
| 466 |
+
# Generate HTML and XML reports.
|
| 467 |
+
xml_report=$REPORT_PLATFORM_DIR/index.xml
|
| 468 |
+
java -jar /opt/jacoco-cli.jar report $jacoco_merged_exec \
|
| 469 |
+
--html $REPORT_PLATFORM_DIR \
|
| 470 |
+
--xml $xml_report \
|
| 471 |
+
--classfiles $classes_dir \
|
| 472 |
+
"${sourcefiles_args[@]}"
|
| 473 |
+
|
| 474 |
+
# Also serve the raw exec file and XML report, which can be useful for
|
| 475 |
+
# automated analysis.
|
| 476 |
+
cp $jacoco_merged_exec $REPORT_PLATFORM_DIR/jacoco.exec
|
| 477 |
+
cp $xml_report $REPORT_PLATFORM_DIR/jacoco.xml
|
| 478 |
+
cp $xml_report $TEXTCOV_REPORT_DIR/jacoco.xml
|
| 479 |
+
|
| 480 |
+
# Write llvm-cov summary file.
|
| 481 |
+
jacoco_report_converter.py $xml_report $SUMMARY_FILE
|
| 482 |
+
|
| 483 |
+
set +e
|
| 484 |
+
elif [[ $FUZZING_LANGUAGE == "javascript" ]]; then
|
| 485 |
+
|
| 486 |
+
# From this point on the script does not tolerate any errors.
|
| 487 |
+
set -e
|
| 488 |
+
|
| 489 |
+
json_report=$MERGED_COVERAGE_DIR/coverage.json
|
| 490 |
+
nyc merge $FUZZERS_COVERAGE_DUMPS_DIR $json_report
|
| 491 |
+
|
| 492 |
+
nyc report -t $MERGED_COVERAGE_DIR --report-dir $REPORT_PLATFORM_DIR --reporter=html --reporter=json-summary
|
| 493 |
+
|
| 494 |
+
nyc_json_summary_file=$REPORT_PLATFORM_DIR/coverage-summary.json
|
| 495 |
+
|
| 496 |
+
# Write llvm-cov summary file.
|
| 497 |
+
nyc_report_converter.py $nyc_json_summary_file $SUMMARY_FILE
|
| 498 |
+
|
| 499 |
+
set +e
|
| 500 |
+
else
|
| 501 |
+
|
| 502 |
+
# From this point on the script does not tolerate any errors.
|
| 503 |
+
set -e
|
| 504 |
+
|
| 505 |
+
# Merge all dumps from the individual targets.
|
| 506 |
+
rm -f $PROFILE_FILE
|
| 507 |
+
llvm-profdata merge -sparse $DUMPS_DIR/*.profdata -o $PROFILE_FILE
|
| 508 |
+
|
| 509 |
+
# TODO(mmoroz): add script from Chromium for rendering directory view reports.
|
| 510 |
+
# The first path in $objects does not have -object= prefix (llvm-cov format).
|
| 511 |
+
shared_libraries=$(coverage_helper shared_libs -build-dir=$OUT -object=$objects)
|
| 512 |
+
objects="$objects $shared_libraries"
|
| 513 |
+
|
| 514 |
+
generate_html $PROFILE_FILE "$shared_libraries" "$objects" "$REPORT_ROOT_DIR"
|
| 515 |
+
|
| 516 |
+
# Per target reports.
|
| 517 |
+
for fuzz_target in $FUZZ_TARGETS; do
|
| 518 |
+
if [[ $fuzz_target == *"@"* ]]; then
|
| 519 |
+
profdata_path=$DUMPS_DIR/$fuzz_target.profdata
|
| 520 |
+
report_dir=$REPORT_BY_TARGET_ROOT_DIR/$fuzz_target
|
| 521 |
+
# Extract fuzztest binary name from fuzztest wrapper script.
|
| 522 |
+
fuzz_target=(${fuzz_target//@/ }[0])
|
| 523 |
+
else
|
| 524 |
+
profdata_path=$DUMPS_DIR/$fuzz_target.profdata
|
| 525 |
+
report_dir=$REPORT_BY_TARGET_ROOT_DIR/$fuzz_target
|
| 526 |
+
fi
|
| 527 |
+
if [[ ! -f "$profdata_path" ]]; then
|
| 528 |
+
echo "WARNING: $fuzz_target has no profdata generated."
|
| 529 |
+
continue
|
| 530 |
+
fi
|
| 531 |
+
|
| 532 |
+
generate_html $profdata_path "$shared_libraries" "$fuzz_target" "$report_dir"
|
| 533 |
+
done
|
| 534 |
+
|
| 535 |
+
set +e
|
| 536 |
+
fi
|
| 537 |
+
|
| 538 |
+
# Make sure report is readable.
|
| 539 |
+
chmod -R +r $REPORT_ROOT_DIR $REPORT_BY_TARGET_ROOT_DIR
|
| 540 |
+
find $REPORT_ROOT_DIR $REPORT_BY_TARGET_ROOT_DIR -type d -exec chmod +x {} +
|
| 541 |
+
|
| 542 |
+
# HTTP_PORT is optional.
|
| 543 |
+
set +u
|
| 544 |
+
if [[ -n $HTTP_PORT ]]; then
|
| 545 |
+
# Serve the report locally.
|
| 546 |
+
echo "Serving the report on http://127.0.0.1:$HTTP_PORT/linux/index.html"
|
| 547 |
+
cd $REPORT_ROOT_DIR
|
| 548 |
+
python3 -m http.server $HTTP_PORT
|
| 549 |
+
fi
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/coverage_helper
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -u
|
| 2 |
+
# Copyright 2018 Google Inc.
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
python3 $CODE_COVERAGE_SRC/coverage_utils.py $@
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/generate_differential_cov_report.py
ADDED
|
@@ -0,0 +1,228 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
#
|
| 3 |
+
# Copyright 2023 Google LLC
|
| 4 |
+
#
|
| 5 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 6 |
+
# you may not use this file except in compliance with the License.
|
| 7 |
+
# You may obtain a copy of the License at
|
| 8 |
+
#
|
| 9 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 10 |
+
#
|
| 11 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 12 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 13 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 14 |
+
# See the License for the specific language governing permissions and
|
| 15 |
+
# limitations under the License.
|
| 16 |
+
#
|
| 17 |
+
################################################################################
|
| 18 |
+
"""Script for generating differential coverage reports.
|
| 19 |
+
generate_differential_cov_report.py <profdata-dump-directory> \
|
| 20 |
+
<profdata-directory-to-subtract-from-first> <output-directory>
|
| 21 |
+
"""
|
| 22 |
+
import os
|
| 23 |
+
import shutil
|
| 24 |
+
import subprocess
|
| 25 |
+
import sys
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class ProfData:
|
| 29 |
+
"""Class representing a profdata file."""
|
| 30 |
+
|
| 31 |
+
def __init__(self, text):
|
| 32 |
+
self.function_profs = []
|
| 33 |
+
for function_prof in text.split('\n\n'):
|
| 34 |
+
if not function_prof:
|
| 35 |
+
continue
|
| 36 |
+
self.function_profs.append(FunctionProf(function_prof))
|
| 37 |
+
|
| 38 |
+
def to_string(self):
|
| 39 |
+
"""Convert back to a string."""
|
| 40 |
+
return '\n'.join(
|
| 41 |
+
[function_prof.to_string() for function_prof in self.function_profs])
|
| 42 |
+
|
| 43 |
+
def find_function(self, function, idx=None):
|
| 44 |
+
"""Find the same function in this profdata."""
|
| 45 |
+
if idx is not None:
|
| 46 |
+
try:
|
| 47 |
+
possibility = self.function_profs[idx]
|
| 48 |
+
if function.func_hash == possibility.func_hash:
|
| 49 |
+
return possibility
|
| 50 |
+
except IndexError:
|
| 51 |
+
pass
|
| 52 |
+
for function_prof in self.function_profs:
|
| 53 |
+
if function_prof.func_hash == function.func_hash:
|
| 54 |
+
return function_prof
|
| 55 |
+
return None
|
| 56 |
+
|
| 57 |
+
def subtract(self, subtrahend):
|
| 58 |
+
"""Subtract subtrahend from this profdata."""
|
| 59 |
+
for idx, function_prof in enumerate(self.function_profs):
|
| 60 |
+
subtrahend_function_prof = subtrahend.find_function(function_prof, idx)
|
| 61 |
+
function_prof.subtract(subtrahend_function_prof)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class FunctionProf:
|
| 65 |
+
"""Profile of a function."""
|
| 66 |
+
FUNC_HASH_COMMENT_LINE = '# Func Hash:'
|
| 67 |
+
NUM_COUNTERS_COMMENT_LINE = '# Num Counters:'
|
| 68 |
+
COUNTER_VALUES_COMMENT_LINE = '# Counter Values:'
|
| 69 |
+
|
| 70 |
+
def __init__(self, text):
|
| 71 |
+
print(text)
|
| 72 |
+
lines = text.splitlines()
|
| 73 |
+
self.function = lines[0]
|
| 74 |
+
assert self.FUNC_HASH_COMMENT_LINE == lines[1]
|
| 75 |
+
self.func_hash = lines[2]
|
| 76 |
+
assert self.NUM_COUNTERS_COMMENT_LINE == lines[3]
|
| 77 |
+
self.num_counters = int(lines[4])
|
| 78 |
+
assert self.COUNTER_VALUES_COMMENT_LINE == lines[5]
|
| 79 |
+
self.counter_values = [1 if int(line) else 0 for line in lines[6:]]
|
| 80 |
+
|
| 81 |
+
def to_string(self):
|
| 82 |
+
"""Convert back to text."""
|
| 83 |
+
lines = [
|
| 84 |
+
self.function,
|
| 85 |
+
self.FUNC_HASH_COMMENT_LINE,
|
| 86 |
+
self.func_hash,
|
| 87 |
+
self.NUM_COUNTERS_COMMENT_LINE,
|
| 88 |
+
str(self.num_counters),
|
| 89 |
+
self.COUNTER_VALUES_COMMENT_LINE,
|
| 90 |
+
] + [str(num) for num in self.counter_values]
|
| 91 |
+
return '\n'.join(lines)
|
| 92 |
+
|
| 93 |
+
def subtract(self, subtrahend_prof):
|
| 94 |
+
"""Subtract this other function from this function."""
|
| 95 |
+
if not subtrahend_prof:
|
| 96 |
+
print(self.function, 'has no subtrahend')
|
| 97 |
+
# Nothing to subtract.
|
| 98 |
+
return
|
| 99 |
+
self.counter_values = [
|
| 100 |
+
max(counter1 - counter2, 0) for counter1, counter2 in zip(
|
| 101 |
+
self.counter_values, subtrahend_prof.counter_values)
|
| 102 |
+
]
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def get_profdata_files(directory):
|
| 106 |
+
"""Returns profdata files in |directory|."""
|
| 107 |
+
profdatas = []
|
| 108 |
+
for filename in os.listdir(directory):
|
| 109 |
+
filename = os.path.join(directory, filename)
|
| 110 |
+
if filename.endswith('.profdata'):
|
| 111 |
+
profdatas.append(filename)
|
| 112 |
+
return profdatas
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def convert_profdata_to_text(profdata):
|
| 116 |
+
"""Convert a profdata binary file to a profdata text file."""
|
| 117 |
+
profdata_text = f'{profdata}.txt'
|
| 118 |
+
if os.path.exists(profdata_text):
|
| 119 |
+
os.remove(profdata_text)
|
| 120 |
+
command = [
|
| 121 |
+
'llvm-profdata', 'merge', '-j=1', '-sparse', profdata, '--text', '-o',
|
| 122 |
+
profdata_text
|
| 123 |
+
]
|
| 124 |
+
print(command)
|
| 125 |
+
subprocess.run(command, check=True)
|
| 126 |
+
return profdata_text
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def convert_text_profdata_to_bin(profdata_text):
|
| 130 |
+
"""Convert a profdata text file to a profdata binary file."""
|
| 131 |
+
profdata = profdata_text.replace('.txt', '').replace('.profdata',
|
| 132 |
+
'') + '.profdata'
|
| 133 |
+
print('bin profdata', profdata)
|
| 134 |
+
if os.path.exists(profdata):
|
| 135 |
+
os.remove(profdata)
|
| 136 |
+
command = [
|
| 137 |
+
'llvm-profdata', 'merge', '-j=1', '-sparse', profdata_text, '-o', profdata
|
| 138 |
+
]
|
| 139 |
+
print(command)
|
| 140 |
+
subprocess.run(command, check=True)
|
| 141 |
+
return profdata
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def get_difference(minuend_filename, subtrahend_filename):
|
| 145 |
+
"""Subtract subtrahend_filename from minuend_filename."""
|
| 146 |
+
with open(minuend_filename, 'r', encoding='utf-8') as minuend_file:
|
| 147 |
+
print('minuend', minuend_filename)
|
| 148 |
+
minuend = ProfData(minuend_file.read())
|
| 149 |
+
with open(subtrahend_filename, 'r', encoding='utf-8') as subtrahend_file:
|
| 150 |
+
print('subtrahend', subtrahend_filename)
|
| 151 |
+
subtrahend = ProfData(subtrahend_file.read())
|
| 152 |
+
|
| 153 |
+
minuend.subtract(subtrahend)
|
| 154 |
+
return minuend
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def profdatas_to_objects(profdatas):
|
| 158 |
+
"""Get the corresponding objects for each profdata."""
|
| 159 |
+
return [
|
| 160 |
+
os.path.splitext(os.path.basename(profdata))[0] for profdata in profdatas
|
| 161 |
+
]
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def generate_differential_cov_reports(minuend_profdatas, subtrahend_profdatas,
|
| 165 |
+
difference_dir):
|
| 166 |
+
"""Calculate the differences between all profdatas and generate differential
|
| 167 |
+
coverage reports."""
|
| 168 |
+
profdata_objects = profdatas_to_objects(minuend_profdatas)
|
| 169 |
+
real_profdata_objects = [
|
| 170 |
+
binobject for binobject in profdata_objects if binobject != 'merged'
|
| 171 |
+
]
|
| 172 |
+
for minuend, subtrahend, binobject in zip(minuend_profdatas,
|
| 173 |
+
subtrahend_profdatas,
|
| 174 |
+
profdata_objects):
|
| 175 |
+
minuend_text = convert_profdata_to_text(minuend)
|
| 176 |
+
subtrahend_text = convert_profdata_to_text(subtrahend)
|
| 177 |
+
difference = get_difference(minuend_text, subtrahend_text)
|
| 178 |
+
basename = os.path.basename(minuend_text)
|
| 179 |
+
difference_text = os.path.join(difference_dir, basename)
|
| 180 |
+
with open(difference_text, 'w', encoding='utf-8') as file_handle:
|
| 181 |
+
file_handle.write(difference.to_string())
|
| 182 |
+
difference_profdata = convert_text_profdata_to_bin(difference_text)
|
| 183 |
+
if not difference_profdata.endswith('merged.profdata'):
|
| 184 |
+
generate_html_report(difference_profdata, [binobject],
|
| 185 |
+
os.path.join(difference_dir, binobject))
|
| 186 |
+
else:
|
| 187 |
+
generate_html_report(difference_profdata, real_profdata_objects,
|
| 188 |
+
os.path.join(difference_dir, 'merged'))
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def generate_html_report(profdata, objects, directory):
|
| 192 |
+
"""Generate an HTML coverage report."""
|
| 193 |
+
# TODO(metzman): Deal with shared libs.
|
| 194 |
+
html_dir = os.path.join(directory, 'reports')
|
| 195 |
+
if os.path.exists(html_dir):
|
| 196 |
+
os.remove(html_dir)
|
| 197 |
+
os.makedirs(html_dir)
|
| 198 |
+
out_dir = os.getenv('OUT', '/out')
|
| 199 |
+
command = [
|
| 200 |
+
'llvm-cov', 'show', f'-path-equivalence=/,{out_dir}', '-format=html',
|
| 201 |
+
'-Xdemangler', 'rcfilt', f'-instr-profile={profdata}'
|
| 202 |
+
]
|
| 203 |
+
|
| 204 |
+
objects = [os.path.join(out_dir, binobject) for binobject in objects]
|
| 205 |
+
command += objects + ['-o', html_dir]
|
| 206 |
+
print(' '.join(command))
|
| 207 |
+
subprocess.run(command, check=True)
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
def main():
|
| 211 |
+
"""Generate differential coverage reports."""
|
| 212 |
+
if len(sys.argv) != 4:
|
| 213 |
+
print(
|
| 214 |
+
f'Usage: {sys.argv[0]} <minuend_dir> <subtrahend_dir> <difference_dir>')
|
| 215 |
+
minuend_dir = sys.argv[1]
|
| 216 |
+
subtrahend_dir = sys.argv[2]
|
| 217 |
+
difference_dir = sys.argv[3]
|
| 218 |
+
if os.path.exists(difference_dir):
|
| 219 |
+
shutil.rmtree(difference_dir)
|
| 220 |
+
os.makedirs(difference_dir, exist_ok=True)
|
| 221 |
+
minuend_profdatas = get_profdata_files(minuend_dir)
|
| 222 |
+
subtrahend_profdatas = get_profdata_files(subtrahend_dir)
|
| 223 |
+
generate_differential_cov_reports(minuend_profdatas, subtrahend_profdatas,
|
| 224 |
+
difference_dir)
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
if __name__ == '__main__':
|
| 228 |
+
main()
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/install_go.sh
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eux
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Install go on x86_64, don't do anything on ARM.
|
| 19 |
+
|
| 20 |
+
case $(uname -m) in
|
| 21 |
+
x86_64)
|
| 22 |
+
# Download and install Go 1.19.
|
| 23 |
+
wget -q https://storage.googleapis.com/golang/getgo/installer_linux -O $SRC/installer_linux
|
| 24 |
+
chmod +x $SRC/installer_linux
|
| 25 |
+
SHELL="bash" $SRC/installer_linux -version 1.19
|
| 26 |
+
rm $SRC/installer_linux
|
| 27 |
+
# Set up Golang coverage modules.
|
| 28 |
+
printf $(find . -name gocoverage)
|
| 29 |
+
cd $GOPATH/gocoverage && /root/.go/bin/go install ./...
|
| 30 |
+
cd convertcorpus && /root/.go/bin/go install .
|
| 31 |
+
cd /root/.go/src/cmd/cover && /root/.go/bin/go build && mv cover $GOPATH/bin/gotoolcover
|
| 32 |
+
;;
|
| 33 |
+
aarch64)
|
| 34 |
+
# Don't install go because installer is not provided.
|
| 35 |
+
echo "Not installing go: aarch64."
|
| 36 |
+
;;
|
| 37 |
+
*)
|
| 38 |
+
echo "Error: unsupported architecture: $(uname -m)"
|
| 39 |
+
exit 1
|
| 40 |
+
;;
|
| 41 |
+
esac
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/jacoco_report_converter.py
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2021 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Helper script for creating an llvm-cov style JSON summary from a JaCoCo XML
|
| 18 |
+
report."""
|
| 19 |
+
import json
|
| 20 |
+
import os
|
| 21 |
+
import sys
|
| 22 |
+
import xml.etree.ElementTree as ET
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def convert(xml):
|
| 26 |
+
"""Turns a JaCoCo XML report into an llvm-cov JSON summary."""
|
| 27 |
+
summary = {
|
| 28 |
+
'type': 'oss-fuzz.java.coverage.json.export',
|
| 29 |
+
'version': '1.0.0',
|
| 30 |
+
'data': [{
|
| 31 |
+
'totals': {},
|
| 32 |
+
'files': [],
|
| 33 |
+
}],
|
| 34 |
+
}
|
| 35 |
+
|
| 36 |
+
report = ET.fromstring(xml)
|
| 37 |
+
totals = make_element_summary(report)
|
| 38 |
+
summary['data'][0]['totals'] = totals
|
| 39 |
+
|
| 40 |
+
# Since Java compilation does not track source file location, we match
|
| 41 |
+
# coverage info to source files via the full class name, e.g. we search for
|
| 42 |
+
# a path in /out/src ending in foo/bar/Baz.java for the class foo.bar.Baz.
|
| 43 |
+
# Under the assumptions that a given project only ever contains a single
|
| 44 |
+
# version of a class and that no class name appears as a suffix of another
|
| 45 |
+
# class name, we can assign coverage info to every source file matched in that
|
| 46 |
+
# way.
|
| 47 |
+
src_files = list_src_files()
|
| 48 |
+
|
| 49 |
+
for class_element in report.findall('./package/class'):
|
| 50 |
+
# Skip fuzzer classes
|
| 51 |
+
if is_fuzzer_class(class_element):
|
| 52 |
+
continue
|
| 53 |
+
|
| 54 |
+
# Skip non class elements
|
| 55 |
+
if 'sourcefilename' not in class_element.attrib:
|
| 56 |
+
continue
|
| 57 |
+
|
| 58 |
+
class_name = class_element.attrib['name']
|
| 59 |
+
package_name = os.path.dirname(class_name)
|
| 60 |
+
basename = class_element.attrib['sourcefilename']
|
| 61 |
+
# This path is 'foo/Bar.java' for the class element
|
| 62 |
+
# <class name="foo/Bar" sourcefilename="Bar.java">.
|
| 63 |
+
canonical_path = os.path.join(package_name, basename)
|
| 64 |
+
|
| 65 |
+
class_summary = make_element_summary(class_element)
|
| 66 |
+
for src_file in relative_to_src_path(src_files, canonical_path):
|
| 67 |
+
summary['data'][0]['files'].append({
|
| 68 |
+
'filename': src_file,
|
| 69 |
+
'summary': class_summary,
|
| 70 |
+
})
|
| 71 |
+
|
| 72 |
+
return json.dumps(summary)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def list_src_files():
|
| 76 |
+
"""Returns a map from basename to full path for all files in $OUT/$SRC."""
|
| 77 |
+
filename_to_paths = {}
|
| 78 |
+
out_path = os.environ['OUT'] + '/'
|
| 79 |
+
src_path = os.environ['SRC']
|
| 80 |
+
src_in_out = out_path + src_path
|
| 81 |
+
for dirpath, _, filenames in os.walk(src_in_out):
|
| 82 |
+
for filename in filenames:
|
| 83 |
+
full_path = dirpath + '/' + filename
|
| 84 |
+
# Map /out//src/... to /src/...
|
| 85 |
+
file_path = full_path[len(out_path):]
|
| 86 |
+
filename_to_paths.setdefault(filename, []).append(file_path)
|
| 87 |
+
return filename_to_paths
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def is_fuzzer_class(class_element):
|
| 91 |
+
"""Check if the class is fuzzer class."""
|
| 92 |
+
method_elements = class_element.find('./method[@name=\"fuzzerTestOneInput\"]')
|
| 93 |
+
if method_elements:
|
| 94 |
+
return True
|
| 95 |
+
|
| 96 |
+
return False
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def relative_to_src_path(src_files, canonical_path):
|
| 100 |
+
"""Returns all paths in src_files ending in canonical_path."""
|
| 101 |
+
basename = os.path.basename(canonical_path)
|
| 102 |
+
if basename not in src_files:
|
| 103 |
+
return []
|
| 104 |
+
candidate_paths = src_files[basename]
|
| 105 |
+
return [
|
| 106 |
+
path for path in candidate_paths if path.endswith("/" + canonical_path)
|
| 107 |
+
]
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def make_element_summary(element):
|
| 111 |
+
"""Returns a coverage summary for an element in the XML report."""
|
| 112 |
+
summary = {}
|
| 113 |
+
|
| 114 |
+
function_counter = element.find('./counter[@type=\'METHOD\']')
|
| 115 |
+
summary['functions'] = make_counter_summary(function_counter)
|
| 116 |
+
|
| 117 |
+
line_counter = element.find('./counter[@type=\'LINE\']')
|
| 118 |
+
summary['lines'] = make_counter_summary(line_counter)
|
| 119 |
+
|
| 120 |
+
# JaCoCo tracks branch coverage, which counts the covered control-flow edges
|
| 121 |
+
# between llvm-cov's regions instead of the covered regions themselves. For
|
| 122 |
+
# non-trivial code parts, the difference is usually negligible. However, if
|
| 123 |
+
# all methods of a class consist of a single region only (no branches),
|
| 124 |
+
# JaCoCo does not report any branch coverage even if there is instruction
|
| 125 |
+
# coverage. Since this would give incorrect results for CI Fuzz purposes, we
|
| 126 |
+
# increase the regions counter by 1 if there is any amount of instruction
|
| 127 |
+
# coverage.
|
| 128 |
+
instruction_counter = element.find('./counter[@type=\'INSTRUCTION\']')
|
| 129 |
+
has_some_coverage = instruction_counter is not None and int(
|
| 130 |
+
instruction_counter.attrib["covered"]) > 0
|
| 131 |
+
branch_covered_adjustment = 1 if has_some_coverage else 0
|
| 132 |
+
region_counter = element.find('./counter[@type=\'BRANCH\']')
|
| 133 |
+
summary['regions'] = make_counter_summary(
|
| 134 |
+
region_counter, covered_adjustment=branch_covered_adjustment)
|
| 135 |
+
|
| 136 |
+
return summary
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def make_counter_summary(counter_element, covered_adjustment=0):
|
| 140 |
+
"""Turns a JaCoCo <counter> element into an llvm-cov totals entry."""
|
| 141 |
+
summary = {}
|
| 142 |
+
covered = covered_adjustment
|
| 143 |
+
missed = 0
|
| 144 |
+
if counter_element is not None:
|
| 145 |
+
covered += int(counter_element.attrib['covered'])
|
| 146 |
+
missed += int(counter_element.attrib['missed'])
|
| 147 |
+
summary['covered'] = covered
|
| 148 |
+
summary['notcovered'] = missed
|
| 149 |
+
summary['count'] = summary['covered'] + summary['notcovered']
|
| 150 |
+
if summary['count'] != 0:
|
| 151 |
+
summary['percent'] = (100.0 * summary['covered']) / summary['count']
|
| 152 |
+
else:
|
| 153 |
+
summary['percent'] = 0
|
| 154 |
+
return summary
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def main():
|
| 158 |
+
"""Produces an llvm-cov style JSON summary from a JaCoCo XML report."""
|
| 159 |
+
if len(sys.argv) != 3:
|
| 160 |
+
sys.stderr.write('Usage: %s <path_to_jacoco_xml> <out_path_json>\n' %
|
| 161 |
+
sys.argv[0])
|
| 162 |
+
return 1
|
| 163 |
+
|
| 164 |
+
with open(sys.argv[1], 'r') as xml_file:
|
| 165 |
+
xml_report = xml_file.read()
|
| 166 |
+
json_summary = convert(xml_report)
|
| 167 |
+
with open(sys.argv[2], 'w') as json_file:
|
| 168 |
+
json_file.write(json_summary)
|
| 169 |
+
|
| 170 |
+
return 0
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
if __name__ == '__main__':
|
| 174 |
+
sys.exit(main())
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/run_fuzzer
ADDED
|
@@ -0,0 +1,228 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash -eu
|
| 2 |
+
# Copyright 2024 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
|
| 18 |
+
# Fuzzer runner. Appends .options arguments and seed corpus to users args.
|
| 19 |
+
# Usage: $0 <fuzzer_name> <fuzzer_args>
|
| 20 |
+
|
| 21 |
+
sysctl -w vm.mmap_rnd_bits=28
|
| 22 |
+
|
| 23 |
+
export PATH=$OUT:$PATH
|
| 24 |
+
cd $OUT
|
| 25 |
+
|
| 26 |
+
DEBUGGER=${DEBUGGER:-}
|
| 27 |
+
|
| 28 |
+
FUZZER=$1
|
| 29 |
+
shift
|
| 30 |
+
|
| 31 |
+
# This env var is set by CIFuzz. CIFuzz fills this directory with the corpus
|
| 32 |
+
# from ClusterFuzz.
|
| 33 |
+
CORPUS_DIR=${CORPUS_DIR:-}
|
| 34 |
+
if [ -z "$CORPUS_DIR" ]
|
| 35 |
+
then
|
| 36 |
+
CORPUS_DIR="/tmp/${FUZZER}_corpus"
|
| 37 |
+
rm -rf $CORPUS_DIR && mkdir -p $CORPUS_DIR
|
| 38 |
+
fi
|
| 39 |
+
|
| 40 |
+
SANITIZER=${SANITIZER:-}
|
| 41 |
+
if [ -z $SANITIZER ]; then
|
| 42 |
+
# If $SANITIZER is not specified (e.g. calling from `reproduce` command), it
|
| 43 |
+
# is not important and can be set to any value.
|
| 44 |
+
SANITIZER="default"
|
| 45 |
+
fi
|
| 46 |
+
|
| 47 |
+
if [[ "$RUN_FUZZER_MODE" = interactive ]]; then
|
| 48 |
+
FUZZER_OUT="$OUT/${FUZZER}_${FUZZING_ENGINE}_${SANITIZER}_out"
|
| 49 |
+
else
|
| 50 |
+
FUZZER_OUT="/tmp/${FUZZER}_${FUZZING_ENGINE}_${SANITIZER}_out"
|
| 51 |
+
fi
|
| 52 |
+
|
| 53 |
+
function get_dictionary() {
|
| 54 |
+
local options_file="$FUZZER.options"
|
| 55 |
+
local dict_file="$FUZZER.dict"
|
| 56 |
+
local dict=""
|
| 57 |
+
if [[ -f "$options_file" ]]; then
|
| 58 |
+
dict=$(sed -n 's/^\s*dict\s*=\s*\(.*\)/\1/p' "$options_file" | tail -1)
|
| 59 |
+
fi
|
| 60 |
+
if [[ -z "$dict" && -f "$dict_file" ]]; then
|
| 61 |
+
dict="$dict_file"
|
| 62 |
+
fi
|
| 63 |
+
[[ -z "$dict" ]] && return
|
| 64 |
+
if [[ "$FUZZING_ENGINE" = "libfuzzer" ]]; then
|
| 65 |
+
printf -- "-dict=%s" "$dict"
|
| 66 |
+
elif [[ "$FUZZING_ENGINE" = "afl" ]]; then
|
| 67 |
+
printf -- "-x %s" "$dict"
|
| 68 |
+
elif [[ "$FUZZING_ENGINE" = "honggfuzz" ]]; then
|
| 69 |
+
printf -- "--dict %s" "$dict"
|
| 70 |
+
elif [[ "$FUZZING_ENGINE" = "centipede" ]]; then
|
| 71 |
+
printf -- "--dictionary %s" "$dict"
|
| 72 |
+
else
|
| 73 |
+
printf "Unexpected FUZZING_ENGINE: $FUZZING_ENGINE, ignoring\n" >&2
|
| 74 |
+
fi
|
| 75 |
+
}
|
| 76 |
+
|
| 77 |
+
function get_extra_binaries() {
|
| 78 |
+
[[ "$FUZZING_ENGINE" != "centipede" ]] && return
|
| 79 |
+
|
| 80 |
+
extra_binaries="$OUT/__centipede_${SANITIZER}/${FUZZER}"
|
| 81 |
+
if compgen -G "$extra_binaries" >> /dev/null; then
|
| 82 |
+
printf -- "--extra_binaries %s" \""$extra_binaries\""
|
| 83 |
+
fi
|
| 84 |
+
|
| 85 |
+
}
|
| 86 |
+
|
| 87 |
+
rm -rf $FUZZER_OUT && mkdir -p $FUZZER_OUT
|
| 88 |
+
|
| 89 |
+
SEED_CORPUS="${FUZZER}_seed_corpus.zip"
|
| 90 |
+
|
| 91 |
+
# TODO: Investigate why this code block is skipped
|
| 92 |
+
# by all default fuzzers in bad_build_check.
|
| 93 |
+
# They all set SKIP_SEED_CORPUS=1.
|
| 94 |
+
if [ -f $SEED_CORPUS ] && [ -z ${SKIP_SEED_CORPUS:-} ]; then
|
| 95 |
+
echo "Using seed corpus: $SEED_CORPUS"
|
| 96 |
+
unzip -o -d ${CORPUS_DIR}/ $SEED_CORPUS > /dev/null
|
| 97 |
+
fi
|
| 98 |
+
|
| 99 |
+
OPTIONS_FILE="${FUZZER}.options"
|
| 100 |
+
CUSTOM_LIBFUZZER_OPTIONS=""
|
| 101 |
+
|
| 102 |
+
if [ -f $OPTIONS_FILE ]; then
|
| 103 |
+
custom_asan_options=$(parse_options.py $OPTIONS_FILE asan)
|
| 104 |
+
if [ ! -z $custom_asan_options ]; then
|
| 105 |
+
export ASAN_OPTIONS="$ASAN_OPTIONS:$custom_asan_options"
|
| 106 |
+
fi
|
| 107 |
+
|
| 108 |
+
custom_msan_options=$(parse_options.py $OPTIONS_FILE msan)
|
| 109 |
+
if [ ! -z $custom_msan_options ]; then
|
| 110 |
+
export MSAN_OPTIONS="$MSAN_OPTIONS:$custom_msan_options"
|
| 111 |
+
fi
|
| 112 |
+
|
| 113 |
+
custom_ubsan_options=$(parse_options.py $OPTIONS_FILE ubsan)
|
| 114 |
+
if [ ! -z $custom_ubsan_options ]; then
|
| 115 |
+
export UBSAN_OPTIONS="$UBSAN_OPTIONS:$custom_ubsan_options"
|
| 116 |
+
fi
|
| 117 |
+
|
| 118 |
+
CUSTOM_LIBFUZZER_OPTIONS=$(parse_options.py $OPTIONS_FILE libfuzzer)
|
| 119 |
+
fi
|
| 120 |
+
|
| 121 |
+
if [[ "$FUZZING_ENGINE" = afl ]]; then
|
| 122 |
+
|
| 123 |
+
# Set afl++ environment options.
|
| 124 |
+
export ASAN_OPTIONS="$ASAN_OPTIONS:abort_on_error=1:symbolize=0:detect_odr_violation=0:"
|
| 125 |
+
export MSAN_OPTIONS="$MSAN_OPTIONS:exit_code=86:symbolize=0"
|
| 126 |
+
export UBSAN_OPTIONS="$UBSAN_OPTIONS:symbolize=0"
|
| 127 |
+
export AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES=1
|
| 128 |
+
export AFL_SKIP_CPUFREQ=1
|
| 129 |
+
export AFL_TRY_AFFINITY=1
|
| 130 |
+
export AFL_FAST_CAL=1
|
| 131 |
+
export AFL_CMPLOG_ONLY_NEW=1
|
| 132 |
+
export AFL_FORKSRV_INIT_TMOUT=30000
|
| 133 |
+
export AFL_IGNORE_PROBLEMS=1
|
| 134 |
+
export AFL_IGNORE_UNKNOWN_ENVS=1
|
| 135 |
+
|
| 136 |
+
# If $OUT/afl_cmplog.txt is present this means the target was compiled for
|
| 137 |
+
# CMPLOG. So we have to add the proper parameters to afl-fuzz.
|
| 138 |
+
test -e "$OUT/afl_cmplog.txt" && AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -c $OUT/$FUZZER"
|
| 139 |
+
|
| 140 |
+
# If $OUT/afl++.dict we load it as a dictionary for afl-fuzz.
|
| 141 |
+
test -e "$OUT/afl++.dict" && AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -x $OUT/afl++.dict"
|
| 142 |
+
|
| 143 |
+
# Ensure timeout is a bit larger than 1sec as some of the OSS-Fuzz fuzzers
|
| 144 |
+
# are slower than this.
|
| 145 |
+
AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -t 5000+"
|
| 146 |
+
|
| 147 |
+
# AFL expects at least 1 file in the input dir.
|
| 148 |
+
echo input > ${CORPUS_DIR}/input
|
| 149 |
+
|
| 150 |
+
CMD_LINE="$OUT/afl-fuzz $AFL_FUZZER_ARGS -i $CORPUS_DIR -o $FUZZER_OUT $(get_dictionary) $* -- $OUT/$FUZZER"
|
| 151 |
+
|
| 152 |
+
echo afl++ setup:
|
| 153 |
+
env|grep AFL_
|
| 154 |
+
cat "$OUT/afl_options.txt"
|
| 155 |
+
|
| 156 |
+
elif [[ "$FUZZING_ENGINE" = honggfuzz ]]; then
|
| 157 |
+
|
| 158 |
+
# Honggfuzz expects at least 1 file in the input dir.
|
| 159 |
+
echo input > $CORPUS_DIR/input
|
| 160 |
+
# --exit_upon_crash: exit whith a first crash seen
|
| 161 |
+
# -V: verify crashes
|
| 162 |
+
# -R (report): save report file to this location
|
| 163 |
+
# -W (working dir): where the crashes go
|
| 164 |
+
# -v (verbose): don't use VTE UI, just stderr
|
| 165 |
+
# -z: use software-instrumentation of clang (trace-pc-guard....)
|
| 166 |
+
# -P: use persistent mode of fuzzing (i.e. LLVMFuzzerTestOneInput)
|
| 167 |
+
# -f: location of the initial (and destination) file corpus
|
| 168 |
+
# -n: number of fuzzing threads (and processes)
|
| 169 |
+
CMD_LINE="$OUT/honggfuzz -n 1 --exit_upon_crash -V -R /tmp/${FUZZER}_honggfuzz.report -W $FUZZER_OUT -v -z -P -f \"$CORPUS_DIR\" $(get_dictionary) $* -- \"$OUT/$FUZZER\""
|
| 170 |
+
|
| 171 |
+
if [[ $(LC_ALL=C grep -P "\x01_LIBHFUZZ_NETDRIVER_BINARY_SIGNATURE_\x02\xFF" "$FUZZER" ) ]]; then
|
| 172 |
+
# Honggfuzz Netdriver port. This must match the port in Clusterfuzz.
|
| 173 |
+
export HFND_TCP_PORT=8666
|
| 174 |
+
fi
|
| 175 |
+
elif [[ "$FUZZING_ENGINE" = centipede ]]; then
|
| 176 |
+
|
| 177 |
+
# Create the work and corpus directory for Centipede.
|
| 178 |
+
CENTIPEDE_WORKDIR="${CENTIPEDE_WORKDIR:-$OUT}"
|
| 179 |
+
|
| 180 |
+
# Centipede only saves crashes to crashes/ in workdir.
|
| 181 |
+
rm -rf $FUZZER_OUT
|
| 182 |
+
|
| 183 |
+
# --workdir: Dir that stores corpus&features in Centipede's own format.
|
| 184 |
+
# --corpus_dir: Location of the initial (and destination) file corpus.
|
| 185 |
+
# --fork_server: Execute the target(s) via the fork server.
|
| 186 |
+
# Run in fork mode to continue fuzzing indefinitely in case of
|
| 187 |
+
# OOMs, timeouts, and crashes.
|
| 188 |
+
# --exit_on_crash=0: Do not exit on crash.
|
| 189 |
+
# --timeout=1200: The process that executes target binary will abort
|
| 190 |
+
# if an input runs >= 1200s.
|
| 191 |
+
# --rss_limit_mb=0: Disable RSS limit.
|
| 192 |
+
# --address_space_limit_mb=0: No address space limit.
|
| 193 |
+
# --binary: The target binary under test without sanitizer.
|
| 194 |
+
# --extra_binary: The target binaries under test with sanitizers.
|
| 195 |
+
CMD_LINE="$OUT/centipede --workdir=$CENTIPEDE_WORKDIR --corpus_dir=\"$CORPUS_DIR\" --fork_server=1 --exit_on_crash=1 --timeout=1200 --rss_limit_mb=4096 --address_space_limit_mb=5120 $(get_dictionary) --binary=\"$OUT/${FUZZER}\" $(get_extra_binaries) $*"
|
| 196 |
+
else
|
| 197 |
+
|
| 198 |
+
CMD_LINE="$OUT/$FUZZER $FUZZER_ARGS $*"
|
| 199 |
+
|
| 200 |
+
if [ -z ${SKIP_SEED_CORPUS:-} ]; then
|
| 201 |
+
CMD_LINE="$CMD_LINE $CORPUS_DIR"
|
| 202 |
+
fi
|
| 203 |
+
|
| 204 |
+
if [[ ! -z ${CUSTOM_LIBFUZZER_OPTIONS} ]]; then
|
| 205 |
+
CMD_LINE="$CMD_LINE $CUSTOM_LIBFUZZER_OPTIONS"
|
| 206 |
+
fi
|
| 207 |
+
|
| 208 |
+
if [[ ! "$CMD_LINE" =~ "-dict=" ]]; then
|
| 209 |
+
if [ -f "$FUZZER.dict" ]; then
|
| 210 |
+
CMD_LINE="$CMD_LINE -dict=$FUZZER.dict"
|
| 211 |
+
fi
|
| 212 |
+
fi
|
| 213 |
+
|
| 214 |
+
CMD_LINE="$CMD_LINE < /dev/null"
|
| 215 |
+
|
| 216 |
+
fi
|
| 217 |
+
|
| 218 |
+
echo $CMD_LINE
|
| 219 |
+
|
| 220 |
+
# Unset OUT so the fuzz target can't rely on it.
|
| 221 |
+
unset OUT
|
| 222 |
+
|
| 223 |
+
if [ ! -z "$DEBUGGER" ]; then
|
| 224 |
+
CMD_LINE="$DEBUGGER $CMD_LINE"
|
| 225 |
+
fi
|
| 226 |
+
|
| 227 |
+
bash -c "$CMD_LINE"
|
| 228 |
+
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/targets_list
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
for binary in $(find $OUT/ -executable -type f); do
|
| 4 |
+
[[ "$binary" != *.so ]] || continue
|
| 5 |
+
[[ $(basename "$binary") != jazzer_driver* ]] || continue
|
| 6 |
+
file "$binary" | grep -e ELF -e "shell script" > /dev/null 2>&1 || continue
|
| 7 |
+
grep "LLVMFuzzerTestOneInput" "$binary" > /dev/null 2>&1 || continue
|
| 8 |
+
|
| 9 |
+
basename "$binary"
|
| 10 |
+
done
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/base-images/base-runner/test_all.py
ADDED
|
@@ -0,0 +1,295 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2020 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Does bad_build_check on all fuzz targets in $OUT."""
|
| 18 |
+
|
| 19 |
+
import contextlib
|
| 20 |
+
import multiprocessing
|
| 21 |
+
import os
|
| 22 |
+
import re
|
| 23 |
+
import subprocess
|
| 24 |
+
import stat
|
| 25 |
+
import sys
|
| 26 |
+
import tempfile
|
| 27 |
+
|
| 28 |
+
BASE_TMP_FUZZER_DIR = '/tmp/not-out'
|
| 29 |
+
|
| 30 |
+
EXECUTABLE = stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH
|
| 31 |
+
|
| 32 |
+
IGNORED_TARGETS = [
|
| 33 |
+
r'do_stuff_fuzzer', r'checksum_fuzzer', r'fuzz_dump', r'fuzz_keyring',
|
| 34 |
+
r'xmltest', r'fuzz_compression_sas_rle', r'ares_*_fuzzer'
|
| 35 |
+
]
|
| 36 |
+
|
| 37 |
+
IGNORED_TARGETS_RE = re.compile('^' + r'$|^'.join(IGNORED_TARGETS) + '$')
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def move_directory_contents(src_directory, dst_directory):
|
| 41 |
+
"""Moves contents of |src_directory| to |dst_directory|."""
|
| 42 |
+
# Use mv because mv preserves file permissions. If we don't preserve file
|
| 43 |
+
# permissions that can mess up CheckFuzzerBuildTest in cifuzz_test.py and
|
| 44 |
+
# other cases where one is calling test_all on files not in OSS-Fuzz's real
|
| 45 |
+
# out directory.
|
| 46 |
+
src_contents = [
|
| 47 |
+
os.path.join(src_directory, filename)
|
| 48 |
+
for filename in os.listdir(src_directory)
|
| 49 |
+
]
|
| 50 |
+
command = ['mv'] + src_contents + [dst_directory]
|
| 51 |
+
subprocess.check_call(command)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def is_elf(filepath):
|
| 55 |
+
"""Returns True if |filepath| is an ELF file."""
|
| 56 |
+
result = subprocess.run(['file', filepath],
|
| 57 |
+
stdout=subprocess.PIPE,
|
| 58 |
+
check=False)
|
| 59 |
+
return b'ELF' in result.stdout
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def is_shell_script(filepath):
|
| 63 |
+
"""Returns True if |filepath| is a shell script."""
|
| 64 |
+
result = subprocess.run(['file', filepath],
|
| 65 |
+
stdout=subprocess.PIPE,
|
| 66 |
+
check=False)
|
| 67 |
+
return b'shell script' in result.stdout
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def find_fuzz_targets(directory):
|
| 71 |
+
"""Returns paths to fuzz targets in |directory|."""
|
| 72 |
+
# TODO(https://github.com/google/oss-fuzz/issues/4585): Use libClusterFuzz for
|
| 73 |
+
# this.
|
| 74 |
+
fuzz_targets = []
|
| 75 |
+
for filename in os.listdir(directory):
|
| 76 |
+
path = os.path.join(directory, filename)
|
| 77 |
+
if filename == 'llvm-symbolizer':
|
| 78 |
+
continue
|
| 79 |
+
if filename.startswith('afl-'):
|
| 80 |
+
continue
|
| 81 |
+
if filename.startswith('jazzer_'):
|
| 82 |
+
continue
|
| 83 |
+
if not os.path.isfile(path):
|
| 84 |
+
continue
|
| 85 |
+
if not os.stat(path).st_mode & EXECUTABLE:
|
| 86 |
+
continue
|
| 87 |
+
# Fuzz targets can either be ELF binaries or shell scripts (e.g. wrapper
|
| 88 |
+
# scripts for Python and JVM targets or rules_fuzzing builds with runfiles
|
| 89 |
+
# trees).
|
| 90 |
+
if not is_elf(path) and not is_shell_script(path):
|
| 91 |
+
continue
|
| 92 |
+
if os.getenv('FUZZING_ENGINE') not in {'none', 'wycheproof'}:
|
| 93 |
+
with open(path, 'rb') as file_handle:
|
| 94 |
+
binary_contents = file_handle.read()
|
| 95 |
+
if b'LLVMFuzzerTestOneInput' not in binary_contents:
|
| 96 |
+
continue
|
| 97 |
+
fuzz_targets.append(path)
|
| 98 |
+
return fuzz_targets
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def do_bad_build_check(fuzz_target):
|
| 102 |
+
"""Runs bad_build_check on |fuzz_target|. Returns a
|
| 103 |
+
Subprocess.ProcessResult."""
|
| 104 |
+
print('INFO: performing bad build checks for', fuzz_target)
|
| 105 |
+
if centipede_needs_auxiliaries():
|
| 106 |
+
print('INFO: Finding Centipede\'s auxiliary for target', fuzz_target)
|
| 107 |
+
auxiliary_path = find_centipede_auxiliary(fuzz_target)
|
| 108 |
+
print('INFO: Using auxiliary binary:', auxiliary_path)
|
| 109 |
+
auxiliary = [auxiliary_path]
|
| 110 |
+
else:
|
| 111 |
+
auxiliary = []
|
| 112 |
+
|
| 113 |
+
command = ['bad_build_check', fuzz_target] + auxiliary
|
| 114 |
+
with tempfile.TemporaryDirectory() as temp_centipede_workdir:
|
| 115 |
+
# Do this so that centipede doesn't fill up the disk during bad build check
|
| 116 |
+
env = os.environ.copy()
|
| 117 |
+
env['CENTIPEDE_WORKDIR'] = temp_centipede_workdir
|
| 118 |
+
return subprocess.run(command,
|
| 119 |
+
stderr=subprocess.PIPE,
|
| 120 |
+
stdout=subprocess.PIPE,
|
| 121 |
+
env=env,
|
| 122 |
+
check=False)
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def get_broken_fuzz_targets(bad_build_results, fuzz_targets):
|
| 126 |
+
"""Returns a list of broken fuzz targets and their process results in
|
| 127 |
+
|fuzz_targets| where each item in |bad_build_results| is the result of
|
| 128 |
+
bad_build_check on the corresponding element in |fuzz_targets|."""
|
| 129 |
+
broken = []
|
| 130 |
+
for result, fuzz_target in zip(bad_build_results, fuzz_targets):
|
| 131 |
+
if result.returncode != 0:
|
| 132 |
+
broken.append((fuzz_target, result))
|
| 133 |
+
return broken
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def has_ignored_targets(out_dir):
|
| 137 |
+
"""Returns True if |out_dir| has any fuzz targets we are supposed to ignore
|
| 138 |
+
bad build checks of."""
|
| 139 |
+
out_files = set(os.listdir(out_dir))
|
| 140 |
+
for filename in out_files:
|
| 141 |
+
if re.match(IGNORED_TARGETS_RE, filename):
|
| 142 |
+
return True
|
| 143 |
+
return False
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
@contextlib.contextmanager
|
| 147 |
+
def use_different_out_dir():
|
| 148 |
+
"""Context manager that moves OUT to subdirectory of BASE_TMP_FUZZER_DIR. This
|
| 149 |
+
is useful for catching hardcoding. Note that this sets the environment
|
| 150 |
+
variable OUT and therefore must be run before multiprocessing.Pool is created.
|
| 151 |
+
Resets OUT at the end."""
|
| 152 |
+
# Use a fake OUT directory to catch path hardcoding that breaks on
|
| 153 |
+
# ClusterFuzz.
|
| 154 |
+
initial_out = os.getenv('OUT')
|
| 155 |
+
os.makedirs(BASE_TMP_FUZZER_DIR, exist_ok=True)
|
| 156 |
+
# Use a random subdirectory of BASE_TMP_FUZZER_DIR to allow running multiple
|
| 157 |
+
# instances of test_all in parallel (useful for integration testing).
|
| 158 |
+
with tempfile.TemporaryDirectory(dir=BASE_TMP_FUZZER_DIR) as out:
|
| 159 |
+
# Set this so that run_fuzzer which is called by bad_build_check works
|
| 160 |
+
# properly.
|
| 161 |
+
os.environ['OUT'] = out
|
| 162 |
+
# We move the contents of the directory because we can't move the
|
| 163 |
+
# directory itself because it is a mount.
|
| 164 |
+
move_directory_contents(initial_out, out)
|
| 165 |
+
try:
|
| 166 |
+
yield out
|
| 167 |
+
finally:
|
| 168 |
+
move_directory_contents(out, initial_out)
|
| 169 |
+
os.environ['OUT'] = initial_out
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def test_all_outside_out(allowed_broken_targets_percentage):
|
| 173 |
+
"""Wrapper around test_all that changes OUT and returns the result."""
|
| 174 |
+
with use_different_out_dir() as out:
|
| 175 |
+
return test_all(out, allowed_broken_targets_percentage)
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
def centipede_needs_auxiliaries():
|
| 179 |
+
"""Checks if auxiliaries are needed for Centipede."""
|
| 180 |
+
# Centipede always requires unsanitized binaries as the main fuzz targets,
|
| 181 |
+
# and separate sanitized binaries as auxiliaries.
|
| 182 |
+
# 1. Building sanitized binaries with helper.py (i.e., local or GitHub CI):
|
| 183 |
+
# Unsanitized ones will be built automatically into the same docker container.
|
| 184 |
+
# Script bad_build_check tests both
|
| 185 |
+
# a) If main fuzz targets can run with the auxiliaries, and
|
| 186 |
+
# b) If the auxiliaries are built with the correct sanitizers.
|
| 187 |
+
# 2. In Trial build and production build:
|
| 188 |
+
# Two kinds of binaries will be in separated buckets / docker containers.
|
| 189 |
+
# Script bad_build_check tests either
|
| 190 |
+
# a) If the unsanitized binaries can run without the sanitized ones, or
|
| 191 |
+
# b) If the sanitized binaries are built with the correct sanitizers.
|
| 192 |
+
return (os.getenv('FUZZING_ENGINE') == 'centipede' and
|
| 193 |
+
os.getenv('SANITIZER') != 'none' and os.getenv('HELPER') == 'True')
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def find_centipede_auxiliary(main_fuzz_target_path):
|
| 197 |
+
"""Finds the sanitized binary path that corresponds to |main_fuzz_target| for
|
| 198 |
+
bad_build_check."""
|
| 199 |
+
target_dir, target_name = os.path.split(main_fuzz_target_path)
|
| 200 |
+
sanitized_binary_dir = os.path.join(target_dir,
|
| 201 |
+
f'__centipede_{os.getenv("SANITIZER")}')
|
| 202 |
+
sanitized_binary_path = os.path.join(sanitized_binary_dir, target_name)
|
| 203 |
+
|
| 204 |
+
if os.path.isfile(sanitized_binary_path):
|
| 205 |
+
return sanitized_binary_path
|
| 206 |
+
|
| 207 |
+
# Neither of the following two should ever happen, returns None to indicate
|
| 208 |
+
# an error.
|
| 209 |
+
if os.path.isdir(sanitized_binary_dir):
|
| 210 |
+
print('ERROR: Unable to identify Centipede\'s sanitized target'
|
| 211 |
+
f'{sanitized_binary_path} in {os.listdir(sanitized_binary_dir)}')
|
| 212 |
+
else:
|
| 213 |
+
print('ERROR: Unable to identify Centipede\'s sanitized target directory'
|
| 214 |
+
f'{sanitized_binary_dir} in {os.listdir(target_dir)}')
|
| 215 |
+
return None
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
def test_all(out, allowed_broken_targets_percentage): # pylint: disable=too-many-return-statements
|
| 219 |
+
"""Do bad_build_check on all fuzz targets."""
|
| 220 |
+
# TODO(metzman): Refactor so that we can convert test_one to python.
|
| 221 |
+
fuzz_targets = find_fuzz_targets(out)
|
| 222 |
+
if not fuzz_targets:
|
| 223 |
+
print('ERROR: No fuzz targets found.')
|
| 224 |
+
return False
|
| 225 |
+
|
| 226 |
+
if centipede_needs_auxiliaries():
|
| 227 |
+
for fuzz_target in fuzz_targets:
|
| 228 |
+
if not find_centipede_auxiliary(fuzz_target):
|
| 229 |
+
print(f'ERROR: Couldn\'t find auxiliary for {fuzz_target}.')
|
| 230 |
+
return False
|
| 231 |
+
|
| 232 |
+
pool = multiprocessing.Pool()
|
| 233 |
+
bad_build_results = pool.map(do_bad_build_check, fuzz_targets)
|
| 234 |
+
pool.close()
|
| 235 |
+
pool.join()
|
| 236 |
+
broken_targets = get_broken_fuzz_targets(bad_build_results, fuzz_targets)
|
| 237 |
+
broken_targets_count = len(broken_targets)
|
| 238 |
+
if not broken_targets_count:
|
| 239 |
+
return True
|
| 240 |
+
|
| 241 |
+
print('Retrying failed fuzz targets sequentially', broken_targets_count)
|
| 242 |
+
pool = multiprocessing.Pool(1)
|
| 243 |
+
retry_targets = []
|
| 244 |
+
for broken_target, result in broken_targets:
|
| 245 |
+
retry_targets.append(broken_target)
|
| 246 |
+
bad_build_results = pool.map(do_bad_build_check, retry_targets)
|
| 247 |
+
pool.close()
|
| 248 |
+
pool.join()
|
| 249 |
+
broken_targets = get_broken_fuzz_targets(bad_build_results, broken_targets)
|
| 250 |
+
broken_targets_count = len(broken_targets)
|
| 251 |
+
if not broken_targets_count:
|
| 252 |
+
return True
|
| 253 |
+
|
| 254 |
+
print('Broken fuzz targets', broken_targets_count)
|
| 255 |
+
total_targets_count = len(fuzz_targets)
|
| 256 |
+
broken_targets_percentage = 100 * broken_targets_count / total_targets_count
|
| 257 |
+
for broken_target, result in broken_targets:
|
| 258 |
+
print(broken_target)
|
| 259 |
+
# Use write because we can't print binary strings.
|
| 260 |
+
sys.stdout.buffer.write(result.stdout + result.stderr + b'\n')
|
| 261 |
+
|
| 262 |
+
if broken_targets_percentage > allowed_broken_targets_percentage:
|
| 263 |
+
print('ERROR: {broken_targets_percentage}% of fuzz targets seem to be '
|
| 264 |
+
'broken. See the list above for a detailed information.'.format(
|
| 265 |
+
broken_targets_percentage=broken_targets_percentage))
|
| 266 |
+
if has_ignored_targets(out):
|
| 267 |
+
print('Build check automatically passing because of ignored targets.')
|
| 268 |
+
return True
|
| 269 |
+
return False
|
| 270 |
+
print('{total_targets_count} fuzzers total, {broken_targets_count} '
|
| 271 |
+
'seem to be broken ({broken_targets_percentage}%).'.format(
|
| 272 |
+
total_targets_count=total_targets_count,
|
| 273 |
+
broken_targets_count=broken_targets_count,
|
| 274 |
+
broken_targets_percentage=broken_targets_percentage))
|
| 275 |
+
return True
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
def get_allowed_broken_targets_percentage():
|
| 279 |
+
"""Returns the value of the environment value
|
| 280 |
+
'ALLOWED_BROKEN_TARGETS_PERCENTAGE' as an int or returns a reasonable
|
| 281 |
+
default."""
|
| 282 |
+
return int(os.getenv('ALLOWED_BROKEN_TARGETS_PERCENTAGE') or '10')
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
def main():
|
| 286 |
+
"""Does bad_build_check on all fuzz targets in parallel. Returns 0 on success.
|
| 287 |
+
Returns 1 on failure."""
|
| 288 |
+
allowed_broken_targets_percentage = get_allowed_broken_targets_percentage()
|
| 289 |
+
if not test_all_outside_out(allowed_broken_targets_percentage):
|
| 290 |
+
return 1
|
| 291 |
+
return 0
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
if __name__ == '__main__':
|
| 295 |
+
sys.exit(main())
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/cifuzz/filestore/no_filestore/__init__.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2021 Google LLC
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
# you may not use this file except in compliance with the License.
|
| 5 |
+
# You may obtain a copy of the License at
|
| 6 |
+
#
|
| 7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
#
|
| 9 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
# See the License for the specific language governing permissions and
|
| 13 |
+
# limitations under the License.
|
| 14 |
+
"""Empty filestore implementation for platforms that haven't implemented it."""
|
| 15 |
+
import logging
|
| 16 |
+
|
| 17 |
+
import filestore
|
| 18 |
+
|
| 19 |
+
# pylint:disable=no-self-use,unused-argument
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class NoFilestore(filestore.BaseFilestore):
|
| 23 |
+
"""Empty Filestore implementation."""
|
| 24 |
+
|
| 25 |
+
def upload_crashes(self, name, directory):
|
| 26 |
+
"""Noop implementation of upload_crashes."""
|
| 27 |
+
logging.info('Not uploading crashes because no Filestore.')
|
| 28 |
+
|
| 29 |
+
def upload_corpus(self, name, directory, replace=False):
|
| 30 |
+
"""Noop implementation of upload_corpus."""
|
| 31 |
+
logging.info('Not uploading corpus because no Filestore.')
|
| 32 |
+
|
| 33 |
+
def upload_build(self, name, directory):
|
| 34 |
+
"""Noop implementation of upload_build."""
|
| 35 |
+
logging.info('Not uploading build because no Filestore.')
|
| 36 |
+
|
| 37 |
+
def upload_coverage(self, name, directory):
|
| 38 |
+
"""Noop implementation of upload_coverage."""
|
| 39 |
+
logging.info('Not uploading coverage because no Filestore.')
|
| 40 |
+
|
| 41 |
+
def download_corpus(self, name, dst_directory):
|
| 42 |
+
"""Noop implementation of download_corpus."""
|
| 43 |
+
logging.info('Not downloading corpus because no Filestore.')
|
| 44 |
+
|
| 45 |
+
def download_build(self, name, dst_directory):
|
| 46 |
+
"""Noop implementation of download_build."""
|
| 47 |
+
logging.info('Not downloading build because no Filestore.')
|
| 48 |
+
|
| 49 |
+
def download_coverage(self, name, dst_directory):
|
| 50 |
+
"""Noop implementation of download_coverage."""
|
| 51 |
+
logging.info('Not downloading coverage because no Filestore.')
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/tools/wycheproof/.gitignore
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
wycheproof.zip
|
local-test-commons-compress-full-01-vuln_2/fuzz-tooling/infra/tools/wycheproof/generate_job.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# Copyright 2022 Google LLC
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
#
|
| 16 |
+
################################################################################
|
| 17 |
+
"""Script for generating an OSS-Fuzz job for a wycheproof project."""
|
| 18 |
+
import sys
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def main():
|
| 22 |
+
"""Usage generate_job.py <project>."""
|
| 23 |
+
project = sys.argv[1]
|
| 24 |
+
print(f'Name: wycheproof_nosanitizer_{project}')
|
| 25 |
+
job_definition = f"""CUSTOM_BINARY = False
|
| 26 |
+
BAD_BUILD_CHECK = False
|
| 27 |
+
APP_NAME = WycheproofTarget.bash
|
| 28 |
+
THREAD_ALIVE_CHECK_INTERVAL = 10
|
| 29 |
+
TEST_TIMEOUT = 3600
|
| 30 |
+
CRASH_RETRIES = 1
|
| 31 |
+
AGGREGATE_COVERAGE = False
|
| 32 |
+
TESTCASE_COVERAGE = False
|
| 33 |
+
FILE_GITHUB_ISSUE = False
|
| 34 |
+
MANAGED = False
|
| 35 |
+
MAX_FUZZ_THREADS = 1
|
| 36 |
+
RELEASE_BUILD_BUCKET_PATH = gs://clusterfuzz-builds-wycheproof/{project}/{project}-none-([0-9]+).zip
|
| 37 |
+
PROJECT_NAME = {project}
|
| 38 |
+
SUMMARY_PREFIX = {project}
|
| 39 |
+
REVISION_VARS_URL = https://commondatastorage.googleapis.com/clusterfuzz-builds-wycheproof/{project}/{project}-none-%s.srcmap.json
|
| 40 |
+
FUZZ_LOGS_BUCKET = {project}-logs.clusterfuzz-external.appspot.com
|
| 41 |
+
CORPUS_BUCKET = {project}-corpus.clusterfuzz-external.appspot.com
|
| 42 |
+
QUARANTINE_BUCKET = {project}-quarantine.clusterfuzz-external.appspot.com
|
| 43 |
+
BACKUP_BUCKET = {project}-backup.clusterfuzz-external.appspot.com
|
| 44 |
+
AUTOMATIC_LABELS = Proj-{project},Engine-wycheproof
|
| 45 |
+
"""
|
| 46 |
+
print(job_definition)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
if __name__ == '__main__':
|
| 50 |
+
main()
|