diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/README.md b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e155e2cb91f146dcac565bc60705d01b8ac74c98 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/README.md @@ -0,0 +1,6 @@ +Building all infra images: + +```bash +# run from project root +infra/base-images/all.sh +``` diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_build b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_build new file mode 100644 index 0000000000000000000000000000000000000000..7231fe755ff0c02f9ff6dba1e5235505095e7aa6 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_build @@ -0,0 +1,28 @@ +#! /bin/bash -eux +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# TODO(metzman): Do this in a docket image so we don't need to waste time +# reinstalling. +PYTHONPATH=$FUZZBENCH_PATH python3 -B -u -c "from fuzzers.$FUZZING_ENGINE import fuzzer; fuzzer.build()" + +if [ "$FUZZING_ENGINE" = "coverage" ]; then + cd $OUT + mkdir -p filestore/oss-fuzz-on-demand/coverage-binaries + # We expect an error regarding leading slashes. Just assume this step succeeds. + # TODO(metzman): Fix this when I get a chance. + tar -czvf filestore/oss-fuzz-on-demand/coverage-binaries/coverage-build-$PROJECT.tar.gz * /src /work || exit 0 +fi diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_install_dependencies b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_install_dependencies new file mode 100644 index 0000000000000000000000000000000000000000..94c0b8764805a90e348eed66d70816c723e42297 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-fuzzbench/fuzzbench_install_dependencies @@ -0,0 +1,22 @@ +#! /bin/bash -eux +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +apt-get update && apt-get install -y gcc gfortran python-dev libopenblas-dev liblapack-dev cython libpq-dev +wget -O /tmp/requirements.txt https://raw.githubusercontent.com/google/fuzzbench/master/requirements.txt +pip3 install pip --upgrade +CFLAGS= CXXFLAGS= pip3 install -r /tmp/requirements.txt +rm /tmp/requirements.txt diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-go/Dockerfile b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-go/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..89333395375de51f083062a9c0c9d9985e8296f2 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-go/Dockerfile @@ -0,0 +1,33 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG} + +# Set up Golang environment variables (copied from /root/.bash_profile). +ENV GOPATH /root/go + +# /root/.go/bin is for the standard Go binaries (i.e. go, gofmt, etc). +# $GOPATH/bin is for the binaries from the dependencies installed via "go get". +ENV PATH $PATH:/root/.go/bin:$GOPATH/bin + +COPY gosigfuzz.c $GOPATH/gosigfuzz/ + +RUN install_go.sh + +# TODO(jonathanmetzman): Install this file using install_go.sh. +COPY ossfuzz_coverage_runner.go \ + $GOPATH/ diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go new file mode 100644 index 0000000000000000000000000000000000000000..ab2504888105835a80f6c0d1c0192bd724e2d52e --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-go/ossfuzz_coverage_runner.go @@ -0,0 +1,80 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mypackagebeingfuzzed + +import ( + "io/fs" + "io/ioutil" + "os" + "path/filepath" + "runtime/pprof" + "testing" +) + +func TestFuzzCorpus(t *testing.T) { + dir := os.Getenv("FUZZ_CORPUS_DIR") + if dir == "" { + t.Logf("No fuzzing corpus directory set") + return + } + filename := "" + defer func() { + if r := recover(); r != nil { + t.Error("Fuzz panicked in "+filename, r) + } + }() + profname := os.Getenv("FUZZ_PROFILE_NAME") + if profname != "" { + f, err := os.Create(profname + ".cpu.prof") + if err != nil { + t.Logf("error creating profile file %s\n", err) + } else { + _ = pprof.StartCPUProfile(f) + } + } + _, err := ioutil.ReadDir(dir) + if err != nil { + t.Logf("Not fuzzing corpus directory %s", err) + return + } + // recurse for regressions subdirectory + err = filepath.Walk(dir, func(fname string, info fs.FileInfo, err error) error { + if info.IsDir() { + return nil + } + data, err := ioutil.ReadFile(fname) + if err != nil { + t.Error("Failed to read corpus file", err) + return err + } + filename = fname + FuzzFunction(data) + return nil + }) + if err != nil { + t.Error("Failed to run corpus", err) + } + if profname != "" { + pprof.StopCPUProfile() + f, err := os.Create(profname + ".heap.prof") + if err != nil { + t.Logf("error creating heap profile file %s\n", err) + } + if err = pprof.WriteHeapProfile(f); err != nil { + t.Logf("error writing heap profile file %s\n", err) + } + f.Close() + } +} diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-python/Dockerfile b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-python/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..df40041305a859953acd669a74095fb2d9a7b249 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder-python/Dockerfile @@ -0,0 +1,20 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG} + +RUN install_python.sh diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/bash_parser.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/bash_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..1d816992d009774c01a438023beb20c15162b7ea --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/bash_parser.py @@ -0,0 +1,235 @@ +#!/usr/bin/python3 +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys + +from glob import glob + +import bashlex + + +def find_all_bash_scripts_in_src(): + """Finds all bash scripts that exist in SRC/. This is used to idenfiy scripts + that may be needed for reading during the AST parsing. This is the case + when a given build script calls another build script, then we need to + read those.""" + all_local_scripts = [ + y for x in os.walk('/src/') for y in glob(os.path.join(x[0], '*.sh')) + ] + scripts_we_care_about = [] + to_ignore = {'aflplusplus', 'honggfuzz', '/fuzztest', '/centipede'} + for s in all_local_scripts: + if any([x for x in to_ignore if x in s]): + continue + scripts_we_care_about.append(s) + + print(scripts_we_care_about) + return scripts_we_care_about + + +def should_discard_command(ast_tree) -> bool: + """Returns True if the command shuold be avoided, otherwise False""" + try: + first_word = ast_tree.parts[0].word + except: # pylint: disable=bare-except + return False + + if ('cmake' in first_word and + any('--build' in part.word for part in ast_tree.parts)): + return False + + cmds_to_avoid_replaying = { + 'configure', 'autoheader', 'autoconf', 'autoreconf', 'cmake', 'autogen.sh' + } + if any([cmd for cmd in cmds_to_avoid_replaying if cmd in first_word]): + return True + + # Avoid all "make clean" calls. We dont want to erase previously build + # files. + try: + second_word = ast_tree.parts[1].word + except: # pylint: disable=bare-except + return False + if 'make' in first_word and 'clean' in second_word: + return True + + # No match was found to commands we dont want to build. There is no + # indication we shuold avoid. + return False + + +def is_local_redirection(ast_node, all_local_scripts): + """Return the list of scripts corresponding to the command, in case + the command is an execution of a local script.""" + # print("Checking") + + # Capture local script called with ./random/path/build.sh + + if len(ast_node.parts) >= 2: + try: + ast_node.parts[0].word + except: + return [] + if ast_node.parts[0].word == '.': + suffixes_matching = [] + #print(ast_node.parts[1].word) + for bash_script in all_local_scripts: + #print("- %s"%(bash_script)) + cmd_to_exec = ast_node.parts[1].word.replace('$SRC', 'src') + if bash_script.endswith(cmd_to_exec): + suffixes_matching.append(bash_script) + #print(suffixes_matching) + return suffixes_matching + # Capture a local script called with $SRC/random/path/build.sh + if len(ast_node.parts) >= 1: + if '$SRC' in ast_node.parts[0].word: + suffixes_matching = [] + print(ast_node.parts[0].word) + for bash_script in all_local_scripts: + print("- %s" % (bash_script)) + cmd_to_exec = ast_node.parts[0].word.replace('$SRC', 'src') + if bash_script.endswith(cmd_to_exec): + suffixes_matching.append(bash_script) + print(suffixes_matching) + return suffixes_matching + + return [] + + +def handle_ast_command(ast_node, all_scripts_in_fs, raw_script): + """Generate bash script string for command node""" + new_script = '' + if should_discard_command(ast_node): + return '' + + matches = is_local_redirection(ast_node, all_scripts_in_fs) + if len(matches) == 1: + new_script += parse_script(matches[0], all_scripts_in_fs) + '\n' + return '' + + # Extract the command from the script string + idx_start = ast_node.pos[0] + idx_end = ast_node.pos[1] + new_script += raw_script[idx_start:idx_end] + #new_script += '\n' + + # If mkdir is used, then ensure that '-p' is provided, as + # otherwise we will run into failures. We don't have to worry + # about multiple uses of -p as `mkdir -p -p -p`` is valid. + new_script = new_script.replace('mkdir', 'mkdir -p') + return new_script + + +def handle_ast_list(ast_node, all_scripts_in_fs, raw_script): + """Handles bashlex AST list.""" + new_script = '' + try_hard = 1 + + if not try_hard: + list_start = ast_node.pos[0] + list_end = ast_node.pos[1] + new_script += raw_script[list_start:list_end] # + '\n' + else: + # This is more refined logic. Ideally, this should work, but it's a bit + # more intricate to get right due to e.g. white-space between positions + # and more extensive parsing needed. We don't neccesarily need this + # level of success rate for what we're trying to achieve, so am disabling + # this for now. + for part in ast_node.parts: + if part.kind == 'list': + new_script += handle_ast_list(part, all_scripts_in_fs, raw_script) + elif part.kind == 'command': + new_script += handle_ast_command(part, all_scripts_in_fs, raw_script) + else: + idx_start = part.pos[0] + idx_end = part.pos[1] + new_script += raw_script[idx_start:idx_end] + new_script += ' ' + + # Make sure what was created is valid syntax, and otherwise return empty + try: + bashlex.parse(new_script) + except: # pylint: disable=bare-except + # Maybe return the original here instead of skipping? + return '' + return new_script + + +def handle_ast_compound(ast_node, all_scripts_in_fs, raw_script): + """Handles bashlex compound AST node.""" + new_script = '' + list_start = ast_node.pos[0] + list_end = ast_node.pos[1] + new_script += raw_script[list_start:list_end] + '\n' + return new_script + + +def handle_node(ast_node, all_scripts_in_fs, build_script): + """Generates a bash script string for a given node""" + if ast_node.kind == 'command': + return handle_ast_command(ast_node, all_scripts_in_fs, build_script) + elif ast_node.kind == 'list': + return handle_ast_list(ast_node, all_scripts_in_fs, build_script) + elif ast_node.kind == 'compound': + print('todo: handle compound') + return handle_ast_compound(ast_node, all_scripts_in_fs, build_script) + elif ast_node.kind == 'pipeline': + # Not supported + return '' + else: + raise Exception(f'Missing node handling: {ast_node.kind}') + + +def parse_script(bash_script, all_scripts) -> str: + """Top-level bash script parser""" + new_script = '' + with open(bash_script, 'r', encoding='utf-8') as f: + build_script = f.read() + try: + parts = bashlex.parse(build_script) + except bashlex.errors.ParsingError: + return '' + for part in parts: + new_script += handle_node(part, all_scripts, build_script) + new_script += '\n' + print("-" * 45) + print(part.kind) + print(part.dump()) + + return new_script + + +def main(): + """Main function""" + all_scripts = find_all_bash_scripts_in_src() + replay_bash_script = parse_script(sys.argv[1], all_scripts) + + print("REPLAYABLE BASH SCRIPT") + print("#" * 60) + print(replay_bash_script) + print("#" * 60) + + out_dir = os.getenv('OUT', '/out') + with open(f'{out_dir}/replay-build-script.sh', 'w', encoding='utf-8') as f: + f.write(replay_bash_script) + + src_dir = os.getenv('SRC', '/src') + with open(f'{src_dir}/replay_build.sh', 'w', encoding='utf-8') as f: + f.write(replay_bash_script) + + +if __name__ == "__main__": + main() diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/bisect_clang_test.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/bisect_clang_test.py new file mode 100644 index 0000000000000000000000000000000000000000..a11bf8640d787181d6e35df225c9f17098d02619 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/bisect_clang_test.py @@ -0,0 +1,294 @@ +# Copyright 2019 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Tests for bisect_clang.py""" +import os +from unittest import mock +import unittest + +import bisect_clang + +FILE_DIRECTORY = os.path.dirname(__file__) +LLVM_REPO_PATH = '/llvm-project' + + +def get_git_command(*args): + """Returns a git command for the LLVM repo with |args| as arguments.""" + return ['git', '-C', LLVM_REPO_PATH] + list(args) + + +def patch_environ(testcase_obj): + """Patch environment.""" + env = {} + patcher = mock.patch.dict(os.environ, env) + testcase_obj.addCleanup(patcher.stop) + patcher.start() + + +class BisectClangTestMixin: # pylint: disable=too-few-public-methods + """Useful mixin for bisect_clang unittests.""" + + def setUp(self): # pylint: disable=invalid-name + """Initialization method for unittests.""" + patch_environ(self) + os.environ['SRC'] = '/src' + os.environ['WORK'] = '/work' + + +class GetClangBuildEnvTest(BisectClangTestMixin, unittest.TestCase): + """Tests for get_clang_build_env.""" + + def test_cflags(self): + """Test that CFLAGS are not used compiling clang.""" + os.environ['CFLAGS'] = 'blah' + self.assertNotIn('CFLAGS', bisect_clang.get_clang_build_env()) + + def test_cxxflags(self): + """Test that CXXFLAGS are not used compiling clang.""" + os.environ['CXXFLAGS'] = 'blah' + self.assertNotIn('CXXFLAGS', bisect_clang.get_clang_build_env()) + + def test_other_variables(self): + """Test that other env vars are used when compiling clang.""" + key = 'other' + value = 'blah' + os.environ[key] = value + self.assertEqual(value, bisect_clang.get_clang_build_env()[key]) + + +def read_test_data(filename): + """Returns data from |filename| in the test_data directory.""" + with open(os.path.join(FILE_DIRECTORY, 'test_data', filename)) as file_handle: + return file_handle.read() + + +class SearchBisectOutputTest(BisectClangTestMixin, unittest.TestCase): + """Tests for search_bisect_output.""" + + def test_search_bisect_output(self): + """Test that search_bisect_output finds the responsible commit when one + exists.""" + test_data = read_test_data('culprit-commit.txt') + self.assertEqual('ac9ee01fcbfac745aaedca0393a8e1c8a33acd8d', + bisect_clang.search_bisect_output(test_data)) + + def test_search_bisect_output_none(self): + """Test that search_bisect_output doesnt find a non-existent culprit + commit.""" + self.assertIsNone(bisect_clang.search_bisect_output('hello')) + + +def create_mock_popen( + output=bytes('', 'utf-8'), err=bytes('', 'utf-8'), returncode=0): + """Creates a mock subprocess.Popen.""" + + class MockPopen: + """Mock subprocess.Popen.""" + commands = [] + testcases_written = [] + + def __init__(self, command, *args, **kwargs): # pylint: disable=unused-argument + """Inits the MockPopen.""" + stdout = kwargs.pop('stdout', None) + self.command = command + self.commands.append(command) + self.stdout = None + self.stderr = None + self.returncode = returncode + if hasattr(stdout, 'write'): + self.stdout = stdout + + def communicate(self, input_data=None): # pylint: disable=unused-argument + """Mock subprocess.Popen.communicate.""" + if self.stdout: + self.stdout.write(output) + + if self.stderr: + self.stderr.write(err) + + return output, err + + def poll(self, input_data=None): # pylint: disable=unused-argument + """Mock subprocess.Popen.poll.""" + return self.returncode + + return MockPopen + + +def mock_prepare_build_impl(llvm_project_path): # pylint: disable=unused-argument + """Mocked prepare_build function.""" + return '/work/llvm-build' + + +class BuildClangTest(BisectClangTestMixin, unittest.TestCase): + """Tests for build_clang.""" + + def test_build_clang_test(self): + """Tests that build_clang works as intended.""" + with mock.patch('subprocess.Popen', create_mock_popen()) as mock_popen: + with mock.patch('bisect_clang.prepare_build', mock_prepare_build_impl): + llvm_src_dir = '/src/llvm-project' + bisect_clang.build_clang(llvm_src_dir) + self.assertEqual([['ninja', '-C', '/work/llvm-build', 'install']], + mock_popen.commands) + + +class GitRepoTest(BisectClangTestMixin, unittest.TestCase): + """Tests for GitRepo.""" + + # TODO(metzman): Mock filesystem. Until then, use a real directory. + + def setUp(self): + super().setUp() + self.git = bisect_clang.GitRepo(LLVM_REPO_PATH) + self.good_commit = 'good_commit' + self.bad_commit = 'bad_commit' + self.test_command = 'testcommand' + + def test_do_command(self): + """Test do_command creates a new process as intended.""" + # TODO(metzman): Test directory changing behavior. + command = ['subcommand', '--option'] + with mock.patch('subprocess.Popen', create_mock_popen()) as mock_popen: + self.git.do_command(command) + self.assertEqual([get_git_command('subcommand', '--option')], + mock_popen.commands) + + def _test_test_start_commit_unexpected(self, label, commit, returncode): + """Tests test_start_commit works as intended when the test returns an + unexpected value.""" + + def mock_execute_impl(command, *args, **kwargs): # pylint: disable=unused-argument + if command == self.test_command: + return returncode, '', '' + return 0, '', '' + + with mock.patch('bisect_clang.execute', mock_execute_impl): + with mock.patch('bisect_clang.prepare_build', mock_prepare_build_impl): + with self.assertRaises(bisect_clang.BisectError): + self.git.test_start_commit(commit, label, self.test_command) + + def test_test_start_commit_bad_zero(self): + """Tests test_start_commit works as intended when the test on the first bad + commit returns 0.""" + self._test_test_start_commit_unexpected('bad', self.bad_commit, 0) + + def test_test_start_commit_good_nonzero(self): + """Tests test_start_commit works as intended when the test on the first good + commit returns nonzero.""" + self._test_test_start_commit_unexpected('good', self.good_commit, 1) + + def test_test_start_commit_good_zero(self): + """Tests test_start_commit works as intended when the test on the first good + commit returns 0.""" + self._test_test_start_commit_expected('good', self.good_commit, 0) # pylint: disable=no-value-for-parameter + + @mock.patch('bisect_clang.build_clang') + def _test_test_start_commit_expected(self, label, commit, returncode, + mock_build_clang): + """Tests test_start_commit works as intended when the test returns an + expected value.""" + command_args = [] + + def mock_execute_impl(command, *args, **kwargs): # pylint: disable=unused-argument + command_args.append(command) + if command == self.test_command: + return returncode, '', '' + return 0, '', '' + + with mock.patch('bisect_clang.execute', mock_execute_impl): + self.git.test_start_commit(commit, label, self.test_command) + self.assertEqual([ + get_git_command('checkout', commit), self.test_command, + get_git_command('bisect', label) + ], command_args) + mock_build_clang.assert_called_once_with(LLVM_REPO_PATH) + + def test_test_start_commit_bad_nonzero(self): + """Tests test_start_commit works as intended when the test on the first bad + commit returns nonzero.""" + self._test_test_start_commit_expected('bad', self.bad_commit, 1) # pylint: disable=no-value-for-parameter + + @mock.patch('bisect_clang.GitRepo.test_start_commit') + def test_bisect_start(self, mock_test_start_commit): + """Tests bisect_start works as intended.""" + with mock.patch('subprocess.Popen', create_mock_popen()) as mock_popen: + self.git.bisect_start(self.good_commit, self.bad_commit, + self.test_command) + self.assertEqual(get_git_command('bisect', 'start'), + mock_popen.commands[0]) + mock_test_start_commit.assert_has_calls([ + mock.call('bad_commit', 'bad', 'testcommand'), + mock.call('good_commit', 'good', 'testcommand') + ]) + + def test_do_bisect_command(self): + """Test do_bisect_command executes a git bisect subcommand as intended.""" + subcommand = 'subcommand' + with mock.patch('subprocess.Popen', create_mock_popen()) as mock_popen: + self.git.do_bisect_command(subcommand) + self.assertEqual([get_git_command('bisect', subcommand)], + mock_popen.commands) + + @mock.patch('bisect_clang.build_clang') + def _test_test_commit(self, label, output, returncode, mock_build_clang): + """Test test_commit works as intended.""" + command_args = [] + + def mock_execute_impl(command, *args, **kwargs): # pylint: disable=unused-argument + command_args.append(command) + if command == self.test_command: + return returncode, output, '' + return 0, output, '' + + with mock.patch('bisect_clang.execute', mock_execute_impl): + result = self.git.test_commit(self.test_command) + self.assertEqual([self.test_command, + get_git_command('bisect', label)], command_args) + mock_build_clang.assert_called_once_with(LLVM_REPO_PATH) + return result + + def test_test_commit_good(self): + """Test test_commit labels a good commit as good.""" + self.assertIsNone(self._test_test_commit('good', '', 0)) # pylint: disable=no-value-for-parameter + + def test_test_commit_bad(self): + """Test test_commit labels a bad commit as bad.""" + self.assertIsNone(self._test_test_commit('bad', '', 1)) # pylint: disable=no-value-for-parameter + + def test_test_commit_culprit(self): + """Test test_commit returns the culprit""" + test_data = read_test_data('culprit-commit.txt') + self.assertEqual('ac9ee01fcbfac745aaedca0393a8e1c8a33acd8d', + self._test_test_commit('good', test_data, 0)) # pylint: disable=no-value-for-parameter + + +class GetTargetArchToBuildTest(unittest.TestCase): + """Tests for get_target_arch_to_build.""" + + def test_unrecognized(self): + """Test that an unrecognized architecture raises an exception.""" + with mock.patch('bisect_clang.execute') as mock_execute: + mock_execute.return_value = (None, 'mips', None) + with self.assertRaises(Exception): + bisect_clang.get_clang_target_arch() + + def test_recognized(self): + """Test that a recognized architecture returns the expected value.""" + arch_pairs = {'x86_64': 'X86', 'aarch64': 'AArch64'} + for uname_result, clang_target in arch_pairs.items(): + with mock.patch('bisect_clang.execute') as mock_execute: + mock_execute.return_value = (None, uname_result, None) + self.assertEqual(clang_target, bisect_clang.get_clang_target_arch()) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_fuzztests.sh b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_fuzztests.sh new file mode 100644 index 0000000000000000000000000000000000000000..8377920e53284d940aa467b29c56bd14e0c6c437 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_fuzztests.sh @@ -0,0 +1,126 @@ +#!/bin/bash -eu +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +set -x + +# In order to identify fuzztest test case "bazel query" is used to search +# the project. A search of the entire project is done with a default "...", +# however, some projects may fail to, or have very long processing time, if +# searching the entire project. Additionally, it may include fuzzers in +# dependencies, which should not be build as part of a given project. +# Tensorflow is an example project that will fail when the entire project is +# queried. FUZZTEST_TARGET_FOLDER makes it posible to specify the folder +# where fuzztest fuzzers should be search for. FUZZTEST_TARGET_FOLDER is passed +# to "bazel query" below. +if [[ ${FUZZTEST_TARGET_FOLDER:-"unset"} == "unset" ]]; +then + export TARGET_FOLDER="..." +else + TARGET_FOLDER=${FUZZTEST_TARGET_FOLDER} +fi + +BUILD_ARGS="--config=oss-fuzz --subcommands" +if [[ ${FUZZTEST_EXTRA_ARGS:-"unset"} != "unset" ]]; +then + BUILD_ARGS="$BUILD_ARGS ${FUZZTEST_EXTRA_ARGS}" +fi + +# Trigger setup_configs rule of fuzztest as it generates the necessary +# configuration file based on OSS-Fuzz environment variables. +bazel run @com_google_fuzztest//bazel:setup_configs >> /etc/bazel.bazelrc + +# Bazel target names of the fuzz binaries. +FUZZ_TEST_BINARIES=$(bazel query "kind(\"cc_test\", rdeps(${TARGET_FOLDER}, @com_google_fuzztest//fuzztest:fuzztest_gtest_main))") + +# Bazel output paths of the fuzz binaries. +FUZZ_TEST_BINARIES_OUT_PATHS=$(bazel cquery "kind(\"cc_test\", rdeps(${TARGET_FOLDER}, @com_google_fuzztest//fuzztest:fuzztest_gtest_main))" --output=files) + +# Build the project and fuzz binaries +# Expose `FUZZTEST_EXTRA_TARGETS` environment variable, in the event a project +# includes non-FuzzTest fuzzers then this can be used to compile these in the +# same `bazel build` command as when building the FuzzTest fuzzers. +# This is to avoid having to call `bazel build` twice. +bazel build $BUILD_ARGS -- ${FUZZ_TEST_BINARIES[*]} ${FUZZTEST_EXTRA_TARGETS:-} + +# Iterate the fuzz binaries and list each fuzz entrypoint in the binary. For +# each entrypoint create a wrapper script that calls into the binaries the +# given entrypoint as argument. +# The scripts will be named: +# {binary_name}@{fuzztest_entrypoint} +for fuzz_main_file in $FUZZ_TEST_BINARIES_OUT_PATHS; do + FUZZ_TESTS=$($fuzz_main_file --list_fuzz_tests) + cp ${fuzz_main_file} $OUT/ + fuzz_basename=$(basename $fuzz_main_file) + chmod -x $OUT/$fuzz_basename + for fuzz_entrypoint in $FUZZ_TESTS; do + TARGET_FUZZER="${fuzz_basename}@$fuzz_entrypoint" + + # Write executer script + echo "#!/bin/sh +# LLVMFuzzerTestOneInput for fuzzer detection. +this_dir=\$(dirname \"\$0\") +chmod +x \$this_dir/$fuzz_basename +\$this_dir/$fuzz_basename --fuzz=$fuzz_entrypoint -- \$@" > $OUT/$TARGET_FUZZER + chmod +x $OUT/$TARGET_FUZZER + done +done + +# Synchronise coverage directory to bazel output artifacts. This is a +# best-effort basis in that it will include source code in common +# bazel output folders. +# For projects that store results in non-standard folders or want to +# manage what code to include in the coverage report more specifically, +# the FUZZTEST_DO_SYNC environment variable is made available. Projects +# can then implement a custom way of synchronising source code with the +# coverage build. Set FUZZTEST_DO_SYNC to something other than "yes" and +# no effort will be made to automatically synchronise the source code with +# the code coverage visualisation utility. +if [[ "$SANITIZER" = "coverage" && ${FUZZTEST_DO_SYNC:-"yes"} == "yes" ]] +then + # Synchronize bazel source files to coverage collection. + declare -r REMAP_PATH="${OUT}/proc/self/cwd" + mkdir -p "${REMAP_PATH}" + + # Synchronize the folder bazel-BAZEL_OUT_PROJECT. + declare -r RSYNC_FILTER_ARGS=("--include" "*.h" "--include" "*.cc" "--include" \ + "*.hpp" "--include" "*.cpp" "--include" "*.c" "--include" "*/" "--include" "*.inc" \ + "--exclude" "*") + + project_folders="$(find . -name 'bazel-*' -type l -printf '%P\n' | \ + grep -v -x -F \ + -e 'bazel-bin' \ + -e 'bazel-testlogs')" + for link in $project_folders; do + if [[ -d "${PWD}"/$link/external ]] + then + rsync -avLk "${RSYNC_FILTER_ARGS[@]}" "${PWD}"/$link/external "${REMAP_PATH}" + fi + # k8-opt is a common path for storing bazel output artifacts, e.g. bazel-out/k8-opt. + # It's the output folder for default amd-64 builds, but projects may specify custom + # platform output directories, see: https://github.com/bazelbuild/bazel/issues/13818 + # We support the default at the moment, and if a project needs custom synchronizing of + # output artifacts and code coverage we currently recommend using FUZZTEST_DO_SYNC. + if [[ -d "${PWD}"/$link/k8-opt ]] + then + rsync -avLk "${RSYNC_FILTER_ARGS[@]}" "${PWD}"/$link/k8-opt "${REMAP_PATH}"/$link + fi + done + + # Delete symlinks and sync the current folder. + find . -type l -ls -delete + rsync -av ${PWD}/ "${REMAP_PATH}" +fi diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_go_fuzzer b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_go_fuzzer new file mode 100644 index 0000000000000000000000000000000000000000..df7d3e24d23c1caf7e262040021fa04240efa8bb --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_go_fuzzer @@ -0,0 +1,69 @@ +#!/bin/bash -eu +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +path=$1 +function=$2 +fuzzer=$3 +tags="-tags gofuzz" +if [[ $# -eq 4 ]]; then + tags="-tags $4" +fi + +# makes directory change temporary +( +cd $GOPATH/src/$path || true +# in the case we are in the right directory, with go.mod but no go.sum +go mod tidy || true +# project was downloaded with go get if go list fails +go list $tags $path || { cd $GOPATH/pkg/mod/ && cd `echo $path | cut -d/ -f1-3 | awk '{print $1"@*"}'`; } || cd - +# project does not have go.mod if go list fails again +go list $tags $path || { go mod init $path && go mod tidy ;} + +if [[ $SANITIZER = *coverage* ]]; then + fuzzed_package=`go list $tags -f '{{.Name}}' $path` + abspath=`go list $tags -f {{.Dir}} $path` + cd $abspath + cp $GOPATH/ossfuzz_coverage_runner.go ./"${function,,}"_test.go + sed -i -e 's/FuzzFunction/'$function'/' ./"${function,,}"_test.go + sed -i -e 's/mypackagebeingfuzzed/'$fuzzed_package'/' ./"${function,,}"_test.go + sed -i -e 's/TestFuzzCorpus/Test'$function'Corpus/' ./"${function,,}"_test.go + + # The repo is the module path/name, which is already created above in case it doesn't exist, + # but not always the same as the module path. This is necessary to handle SIV properly. + fuzzed_repo=$(go list $tags -f {{.Module}} "$path") + abspath_repo=`go list -m $tags -f {{.Dir}} $fuzzed_repo || go list $tags -f {{.Dir}} $fuzzed_repo` + # give equivalence to absolute paths in another file, as go test -cover uses golangish pkg.Dir + echo "s=$fuzzed_repo"="$abspath_repo"= > $OUT/$fuzzer.gocovpath + # Additional packages for which to get coverage. + pkgaddcov="" + # to prevent bash from failing about unbound variable + GO_COV_ADD_PKG_SET=${GO_COV_ADD_PKG:-} + if [[ -n "${GO_COV_ADD_PKG_SET}" ]]; then + pkgaddcov=","$GO_COV_ADD_PKG + abspath_repo=`go list -m $tags -f {{.Dir}} $GO_COV_ADD_PKG || go list $tags -f {{.Dir}} $GO_COV_ADD_PKG` + echo "s=^$GO_COV_ADD_PKG"="$abspath_repo"= >> $OUT/$fuzzer.gocovpath + fi + go test -run Test${function}Corpus -v $tags -coverpkg $fuzzed_repo/...$pkgaddcov -c -o $OUT/$fuzzer $path +else + # Compile and instrument all Go files relevant to this fuzz target. + echo "Running go-fuzz $tags -func $function -o $fuzzer.a $path" + go-fuzz $tags -func $function -o $fuzzer.a $path + + # Link Go code ($fuzzer.a) with fuzzing engine to produce fuzz target binary. + $CXX $CXXFLAGS $LIB_FUZZING_ENGINE $fuzzer.a -o $OUT/$fuzzer +fi +) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_native_go_fuzzer b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_native_go_fuzzer new file mode 100644 index 0000000000000000000000000000000000000000..7a7fa67df811fa4d4a24f3f0d80fd17729fbb52b --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/compile_native_go_fuzzer @@ -0,0 +1,60 @@ +#!/bin/bash -eu +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +function build_native_go_fuzzer() { + fuzzer=$1 + function=$2 + path=$3 + tags="-tags gofuzz" + + if [[ $SANITIZER == *coverage* ]]; then + current_dir=$(pwd) + mkdir $OUT/rawfuzzers || true + cd $abs_file_dir + go test $tags -c -run $fuzzer -o $OUT/$fuzzer -cover + cp "${fuzzer_filename}" "${OUT}/rawfuzzers/${fuzzer}" + + fuzzed_repo=$(go list $tags -f {{.Module}} "$path") + abspath_repo=`go list -m $tags -f {{.Dir}} $fuzzed_repo || go list $tags -f {{.Dir}} $fuzzed_repo` + # give equivalence to absolute paths in another file, as go test -cover uses golangish pkg.Dir + echo "s=$fuzzed_repo"="$abspath_repo"= > $OUT/$fuzzer.gocovpath + + cd $current_dir + else + go-118-fuzz-build $tags -o $fuzzer.a -func $function $abs_file_dir + $CXX $CXXFLAGS $LIB_FUZZING_ENGINE $fuzzer.a -o $OUT/$fuzzer + fi +} + +path=$1 +function=$2 +fuzzer=$3 +tags="-tags gofuzz" + +# Get absolute path. +abs_file_dir=$(go list $tags -f {{.Dir}} $path) + +# TODO(adamkorcz): Get rid of "-r" flag here. +fuzzer_filename=$(grep -r -l --include='*.go' -s "$function" "${abs_file_dir}") + +# Test if file contains a line with "func $function" and "testing.F". +if [ $(grep -r "func $function" $fuzzer_filename | grep "testing.F" | wc -l) -eq 1 ] +then + build_native_go_fuzzer $fuzzer $function $abs_file_dir +else + echo "Could not find the function: func ${function}(f *testing.F)" +fi diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/debug_afl b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/debug_afl new file mode 100644 index 0000000000000000000000000000000000000000..c53dae8156d623620be6750bf242ed714a182dcf --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/debug_afl @@ -0,0 +1,40 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Source this file for afl++ debug sessions. +apt-get update +apt-get install -y strace gdb vim joe psmisc + +pushd $SRC/aflplusplus > /dev/null +git checkout dev +git pull +test -n "$1" && { git checkout "$1" ; git pull ; } +CFLAGS_SAVE="$CFLAGS" +CXXFLAGS_SAVE="$CXXFLAGS" +unset CFLAGS +unset CXXFLAGS +make +export CFLAGS="$CFLAGS_SAVE" +export CXXFLAGS="$CXXFLAGS_SAVE" +popd > /dev/null + +export ASAN_OPTIONS="detect_leaks=0:symbolize=0:detect_odr_violation=0:abort_on_error=1" +export AFL_LLVM_LAF_ALL=1 +export AFL_LLVM_CMPLOG=1 +touch "$OUT/afl_cmplog.txt" +export AFL_LLVM_DICT2FILE=$OUT/afl++.dict +ulimit -c unlimited diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/detect_repo_test.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/detect_repo_test.py new file mode 100644 index 0000000000000000000000000000000000000000..0243b3ac513e942825e445bf4786bc593f48a338 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/detect_repo_test.py @@ -0,0 +1,121 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test the functionality of the detect_repo module. +This will consist of the following functional test: + 1. Determine if an OSS-Fuzz projects main repo can be detected from example + commits. + 2. Determine if an OSS-Fuzz project main repo can be detected from a + repo name. +""" +import os +import re +import sys +import tempfile +import unittest +from unittest import mock + +import detect_repo + +# Appending to path for access to repo_manager module. +# pylint: disable=wrong-import-position +sys.path.append( + os.path.dirname(os.path.dirname(os.path.dirname( + os.path.abspath(__file__))))) +import repo_manager +import test_repos +# pylint: enable=wrong-import-position + + +class TestCheckForRepoName(unittest.TestCase): + """Tests for check_for_repo_name.""" + + @mock.patch('os.path.exists', return_value=True) + @mock.patch('detect_repo.execute', + return_value=('https://github.com/google/syzkaller/', None)) + def test_go_get_style_url(self, _, __): + """Tests that check_for_repo_name works on repos that were downloaded using + go get.""" + self.assertTrue(detect_repo.check_for_repo_name('fake-path', 'syzkaller')) + + @mock.patch('os.path.exists', return_value=True) + @mock.patch('detect_repo.execute', + return_value=('https://github.com/google/syzkaller', None)) + def test_missing_git_and_slash_url(self, _, __): + """Tests that check_for_repo_name works on repos who's URLs do not end in + ".git" or "/".""" + self.assertTrue(detect_repo.check_for_repo_name('fake-path', 'syzkaller')) + + @mock.patch('os.path.exists', return_value=True) + @mock.patch('detect_repo.execute', + return_value=('https://github.com/google/syzkaller.git', None)) + def test_normal_style_repo_url(self, _, __): + """Tests that check_for_repo_name works on normally cloned repos.""" + self.assertTrue(detect_repo.check_for_repo_name('fake-path', 'syzkaller')) + + +@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'), + 'INTEGRATION_TESTS=1 not set') +class DetectRepoIntegrationTest(unittest.TestCase): + """Class to test the functionality of the detect_repo module.""" + + def test_infer_main_repo_from_commit(self): + """Tests that the main repo can be inferred based on an example commit.""" + + with tempfile.TemporaryDirectory() as tmp_dir: + # Construct example repo's to check for commits. + for test_repo in test_repos.TEST_REPOS: + repo_manager.clone_repo_and_get_manager(test_repo.git_url, tmp_dir) + self.check_with_repo(test_repo.git_url, + test_repo.git_repo_name, + tmp_dir, + commit=test_repo.old_commit) + + def test_infer_main_repo_from_name(self): + """Tests that the main project repo can be inferred from a repo name.""" + with tempfile.TemporaryDirectory() as tmp_dir: + for test_repo in test_repos.TEST_REPOS: + repo_manager.clone_repo_and_get_manager(test_repo.git_url, tmp_dir) + self.check_with_repo(test_repo.git_url, test_repo.git_repo_name, + tmp_dir) + + def check_with_repo(self, repo_origin, repo_name, tmp_dir, commit=None): + """Checks the detect repo's main method for a specific set of inputs. + + Args: + repo_origin: URL of the git repo. + repo_name: The name of the directory it is cloned to. + tmp_dir: The location of the directory of git repos to be searched. + commit: The commit that should be used to look up the repo. + """ + command = ['python3', 'detect_repo.py', '--src_dir', tmp_dir] + + if commit: + command += ['--example_commit', commit] + else: + command += ['--repo_name', repo_name] + + out, _ = detect_repo.execute(command, + location=os.path.dirname( + os.path.realpath(__file__))) + match = re.search(r'\bDetected repo: ([^ ]+) ([^ ]+)', out.rstrip()) + if match and match.group(1) and match.group(2): + self.assertEqual(match.group(1), repo_origin) + self.assertEqual(match.group(2), os.path.join(tmp_dir, repo_name)) + else: + self.assertIsNone(repo_origin) + self.assertIsNone(repo_name) + + +if __name__ == '__main__': + unittest.main() diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_deps.sh b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_deps.sh new file mode 100644 index 0000000000000000000000000000000000000000..777e4d1b69f3865207fba671457508662d0a49c2 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_deps.sh @@ -0,0 +1,44 @@ +#!/bin/bash -eux +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Install base-builder's dependencies in a architecture-aware way. + + +case $(uname -m) in + x86_64) + dpkg --add-architecture i386 + ;; +esac + +apt-get update && \ + apt-get install -y \ + binutils-dev \ + build-essential \ + curl \ + wget \ + git \ + jq \ + patchelf \ + rsync \ + subversion \ + zip + +case $(uname -m) in + x86_64) + apt-get install -y libc6-dev-i386 + ;; +esac diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_go.sh b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_go.sh new file mode 100644 index 0000000000000000000000000000000000000000..f2a93bd76702f33c0c3319fcc74ddbfd42852057 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_go.sh @@ -0,0 +1,43 @@ +#!/bin/bash -eux +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +cd /tmp + +wget https://go.dev/dl/go1.23.4.linux-amd64.tar.gz +mkdir temp-go +tar -C temp-go/ -xzf go1.23.4.linux-amd64.tar.gz + +mkdir /root/.go/ +mv temp-go/go/* /root/.go/ +rm -rf temp-go + +echo 'Set "GOPATH=/root/go"' +echo 'Set "PATH=$PATH:/root/.go/bin:$GOPATH/bin"' + +go install github.com/mdempsky/go114-fuzz-build@latest +ln -s $GOPATH/bin/go114-fuzz-build $GOPATH/bin/go-fuzz + +# Build signal handler +if [ -f "$GOPATH/gosigfuzz/gosigfuzz.c" ]; then + clang -c $GOPATH/gosigfuzz/gosigfuzz.c -o $GOPATH/gosigfuzz/gosigfuzz.o +fi + +cd /tmp +git clone https://github.com/AdamKorcz/go-118-fuzz-build +cd go-118-fuzz-build +go build +mv go-118-fuzz-build $GOPATH/bin/ diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_java.sh b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_java.sh new file mode 100644 index 0000000000000000000000000000000000000000..d7743c6cd37c9c09f6459c3ed1805d7ddd4b56df --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_java.sh @@ -0,0 +1,31 @@ +#!/bin/bash -eux +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Install OpenJDK 17 and trim its size by removing unused components. This enables using Jazzer's mutation framework. +cd /tmp +curl --silent -L -O https://download.java.net/java/GA/jdk17.0.2/dfd4a8d0985749f896bed50d7138ee7f/8/GPL/openjdk-17.0.2_linux-x64_bin.tar.gz && \ +mkdir -p $JAVA_HOME +tar -xz --strip-components=1 -f openjdk-17.0.2_linux-x64_bin.tar.gz --directory $JAVA_HOME && \ +rm -f openjdk-17.0.2_linux-x64_bin.tar.gz +rm -rf $JAVA_HOME/jmods $JAVA_HOME/lib/src.zip + +# Install OpenJDK 15 and trim its size by removing unused components. Some projects only run with Java 15. +curl --silent -L -O https://download.java.net/java/GA/jdk15.0.2/0d1cfde4252546c6931946de8db48ee2/7/GPL/openjdk-15.0.2_linux-x64_bin.tar.gz && \ +mkdir -p $JAVA_15_HOME +tar -xz --strip-components=1 -f openjdk-15.0.2_linux-x64_bin.tar.gz --directory $JAVA_15_HOME && \ +rm -f openjdk-15.0.2_linux-x64_bin.tar.gz +rm -rf $JAVA_15_HOME/jmods $JAVA_15_HOME/lib/src.zip diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_ruby.sh b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_ruby.sh new file mode 100644 index 0000000000000000000000000000000000000000..76e996727197aa93b49dfbb935dff7b57c077341 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_ruby.sh @@ -0,0 +1,25 @@ +#!/bin/bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +apt update +apt install -y lsb-release software-properties-common gnupg2 binutils xz-utils libyaml-dev +gpg2 --keyserver keyserver.ubuntu.com --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB +curl -sSL https://get.rvm.io | bash + +. /etc/profile.d/rvm.sh + +rvm install ruby-3.3.1 diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_rust.sh b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_rust.sh new file mode 100644 index 0000000000000000000000000000000000000000..45fbec6ed4d6afeec0897f38c9a7c1bab0c4eafa --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_rust.sh @@ -0,0 +1,22 @@ +#!/bin/bash -eux +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +curl https://sh.rustup.rs | sh -s -- -y --default-toolchain=$RUSTUP_TOOLCHAIN --profile=minimal +cargo install cargo-fuzz --locked && rm -rf /rust/registry +# Needed to recompile rust std library for MSAN +rustup component add rust-src +cp -r /usr/local/lib/x86_64-unknown-linux-gnu/* /usr/local/lib/ diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_swift.sh b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_swift.sh new file mode 100644 index 0000000000000000000000000000000000000000..1d0d16701f39965afc493e9cb64bc0a6f76f4c16 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/install_swift.sh @@ -0,0 +1,67 @@ +#!/bin/bash -eux +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + + +SWIFT_PACKAGES="wget \ + binutils \ + git \ + gnupg2 \ + libc6-dev \ + libcurl4 \ + libedit2 \ + libgcc-9-dev \ + libpython2.7 \ + libsqlite3-0 \ + libstdc++-9-dev \ + libxml2 \ + libz3-dev \ + pkg-config \ + tzdata \ + zlib1g-dev" +SWIFT_SYMBOLIZER_PACKAGES="build-essential make cmake ninja-build git python3 g++-multilib binutils-dev zlib1g-dev" +apt-get update && apt install -y $SWIFT_PACKAGES && \ + apt install -y $SWIFT_SYMBOLIZER_PACKAGES --no-install-recommends + + +wget -q https://download.swift.org/swift-5.10.1-release/ubuntu2004/swift-5.10.1-RELEASE/swift-5.10.1-RELEASE-ubuntu20.04.tar.gz +tar xzf swift-5.10.1-RELEASE-ubuntu20.04.tar.gz +cp -r swift-5.10.1-RELEASE-ubuntu20.04/usr/* /usr/ +rm -rf swift-5.10.1-RELEASE-ubuntu20.04.tar.gz swift-5.10.1-RELEASE-ubuntu20.04/ +# TODO: Move to a seperate work dir +git clone https://github.com/llvm/llvm-project.git +cd llvm-project +git checkout 63bf228450b8403e0c5e828d276be47ffbcd00d0 # TODO: Keep in sync with base-clang. +git apply ../llvmsymbol.diff --verbose +cmake -G "Ninja" \ + -DLIBCXX_ENABLE_SHARED=OFF \ + -DLIBCXX_ENABLE_STATIC_ABI_LIBRARY=ON \ + -DLIBCXXABI_ENABLE_SHARED=OFF \ + -DCMAKE_BUILD_TYPE=Release \ + -DLLVM_TARGETS_TO_BUILD=X86 \ + -DCMAKE_C_COMPILER=clang \ + -DCMAKE_CXX_COMPILER=clang++ \ + -DLLVM_BUILD_TESTS=OFF \ + -DLLVM_INCLUDE_TESTS=OFF llvm +ninja -j$(nproc) llvm-symbolizer +cp bin/llvm-symbolizer /usr/local/bin/llvm-symbolizer-swift + +cd $SRC +rm -rf llvm-project llvmsymbol.diff + +# TODO: Cleanup packages +apt-get remove --purge -y wget zlib1g-dev +apt-get autoremove -y diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_afl b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_afl new file mode 100644 index 0000000000000000000000000000000000000000..d6e71f2c3428745fde2b9ff2519f0ff67b3ebaa2 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_afl @@ -0,0 +1,35 @@ +#!/bin/bash -eu +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +echo "Precompiling AFLplusplus" + +pushd $SRC/aflplusplus > /dev/null +make clean +# Unset CFLAGS and CXXFLAGS while building AFL since we don't want to slow it +# down with sanitizers. +SAVE_CXXFLAGS=$CXXFLAGS +SAVE_CFLAGS=$CFLAGS +unset CXXFLAGS +unset CFLAGS +export AFL_IGNORE_UNKNOWN_ENVS=1 +make clean +AFL_NO_X86=1 PYTHON_INCLUDE=/ make +make -C utils/aflpp_driver + +popd > /dev/null + +echo "Done." diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_centipede b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_centipede new file mode 100644 index 0000000000000000000000000000000000000000..362ef6a5b6e51bb5c81cbbc45ddab2660b966e0d --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_centipede @@ -0,0 +1,49 @@ +#!/bin/bash -eu +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +echo -n "Precompiling centipede" + +# Build Centipede with bazel. +cd "$SRC/fuzztest/centipede/" +apt-get update && apt-get install libssl-dev -y +unset CXXFLAGS CFLAGS +# We need to use an older version of BAZEL because fuzztest relies on WORKSPACE +# Ref: https://github.com/google/oss-fuzz/pull/12838#issue-2733821058 +export USE_BAZEL_VERSION=7.4.0 +echo 'build --cxxopt=-stdlib=libc++ --linkopt=-lc++' >> /tmp/centipede.bazelrc +bazel --bazelrc=/tmp/centipede.bazelrc build -c opt :all +unset USE_BAZEL_VERSION + +# Prepare the weak symbols: +# This is necessary because we compile the target binary and the intermediate +# auxiliary binaries with the same cflags. The auxiliary binaries do not need +# data-flow tracing flags, but will still throw errors when they cannot find +# the corresponding functions. +# The weak symbols provides fake implementations for intermediate binaries. +$CXX "$SRC/fuzztest/centipede/weak_sancov_stubs.cc" -c -o "$SRC/fuzztest/centipede/weak.o" + +echo 'Removing extra stuff leftover to avoid bloating image.' + +rm -rf /clang-*.tgz /clang + +BAZEL_BIN_REAL_DIR=$(readlink -f $CENTIPEDE_BIN_DIR) +rm -rf $CENTIPEDE_BIN_DIR +mkdir -p $CENTIPEDE_BIN_DIR +mv $BAZEL_BIN_REAL_DIR/centipede/{centipede,libcentipede_runner.pic.a} $CENTIPEDE_BIN_DIR +rm -rf /root/.cache + +echo 'Done.' diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_honggfuzz b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_honggfuzz new file mode 100644 index 0000000000000000000000000000000000000000..34d2ab8c2497ff14b4d49b8a3828febaa5135cc1 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/precompile_honggfuzz @@ -0,0 +1,45 @@ +#!/bin/bash -eu +# Copyright 2019 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +echo "Precompiling honggfuzz" +export BUILD_OSSFUZZ_STATIC=true + +PACKAGES=( + libunwind8-dev + libblocksruntime-dev + liblzma-dev + libiberty-dev + zlib1g-dev + pkg-config) + +apt-get update && apt-get install -y ${PACKAGES[@]} + +pushd $SRC/honggfuzz > /dev/null +make clean +# These CFLAGs match honggfuzz's default, with the exception of -mtune to +# improve portability and `-D_HF_LINUX_NO_BFD` to remove assembly instructions +# from the filenames. +CC=clang CFLAGS="-O3 -funroll-loops -D_HF_LINUX_NO_BFD" make + +# libhfuzz.a will be added by CC/CXX linker directly during linking, +# but it's defined here to satisfy the build infrastructure +ar rcs honggfuzz.a libhfuzz/*.o libhfcommon/*.o +popd > /dev/null + +apt-get remove -y --purge ${PACKAGES[@]} +apt-get autoremove -y +echo "Done." diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/python_coverage_helper.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/python_coverage_helper.py new file mode 100644 index 0000000000000000000000000000000000000000..4f244a0435c1a783f8b841b78cb8b385bbaa5f1b --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/python_coverage_helper.py @@ -0,0 +1,120 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Extracts file paths to copy files from pyinstaller-generated executables""" +import os +import sys +import shutil +import zipfile + + +# Finds all *.toc files in ./workpath and reads these files in order to +# identify Python files associated with a pyinstaller packaged executable. +# Copies all of the Python files to a temporary directory (/medio) following +# the original directory structure. +def get_all_files_from_toc(toc_file, file_path_set): + """ + Extract filepaths from a .toc file and add to file_path_set + """ + with open(toc_file, 'rb') as toc_file_fd: + for line in toc_file_fd: + try: + line = line.decode() + except: # pylint:disable=bare-except + continue + if '.py' not in line: + continue + + split_line = line.split(' ') + for word in split_line: + word = word.replace('\'', '').replace(',', '').replace('\n', '') + if '.py' not in word: + continue + # Check if .egg is in the path and if so we need to split it + if os.path.isfile(word): + file_path_set.add(word) + elif '.egg' in word: # check if this is an egg + egg_path_split = word.split('.egg') + if len(egg_path_split) != 2: + continue + egg_path = egg_path_split[0] + '.egg' + if not os.path.isfile(egg_path): + continue + + print('Unzipping contents of %s' % egg_path) + + # We have an egg. This needs to be unzipped and then replaced + # with the unzipped data. + tmp_dir_name = 'zipdcontents' + if os.path.isdir(tmp_dir_name): + shutil.rmtree(tmp_dir_name) + + # unzip egg and replace path with unzipped content + with zipfile.ZipFile(egg_path, 'r') as zip_f: + zip_f.extractall(tmp_dir_name) + os.remove(egg_path) + shutil.copytree(tmp_dir_name, egg_path) + + # Now the lines should be accessible, so check again + if os.path.isfile(word): + file_path_set.add(word) + + +def create_file_structure_from_tocs(work_path, out_path): + """ + Extract the Python files that are added as paths in the output of + a pyinstaller operation. The files are determined by reading through + all of the *.toc files in the workpath of pyinstaller. + + The files will be copied into the out_path using a similar file path + as they originally are. If any archive (.egg) files are present in the + .toc files, then unzip the archives and substitute the archive for the + unzipped content, i.e. we will extract the archives and collect the source + files. + """ + print('Extracts files from the pyinstaller workpath') + file_path_set = set() + for path1 in os.listdir(work_path): + full_path = os.path.join(work_path, path1) + if not os.path.isdir(full_path): + continue + + # We have a directory + for path2 in os.listdir(full_path): + if not '.toc' in path2: + continue + full_toc_file = os.path.join(full_path, path2) + get_all_files_from_toc(full_toc_file, file_path_set) + + for file_path in file_path_set: + relative_src = file_path[1:] if file_path[0] == '/' else file_path + dst_path = os.path.join(out_path, relative_src) + os.makedirs(os.path.dirname(dst_path), exist_ok=True) + shutil.copy(file_path, dst_path) + + +def main(): + """ + Main handler. + """ + if len(sys.argv) != 3: + print('Use: python3 python_coverage_helper.py pyinstaller_workpath ' + 'destination_for_output') + sys.exit(1) + work_path = sys.argv[1] + out_path = sys.argv[2] + create_file_structure_from_tocs(work_path, out_path) + + +if __name__ == '__main__': + main() diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/srcmap b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/srcmap new file mode 100644 index 0000000000000000000000000000000000000000..f967074fdc9161fa3c95814e3b24e0f2324af61c --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/srcmap @@ -0,0 +1,66 @@ +#!/bin/bash -eux +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Deterimine srcmap of checked out source code + +SRCMAP=$(tempfile) +echo "{}" > $SRCMAP + +# $1 - json file, $2 - jq program +function jq_inplace() { + F=$(tempfile) && cat $1 | jq "$2" > $F && mv $F $1 +} + +PATHS_TO_SCAN="$SRC" + +if [[ $FUZZING_LANGUAGE == "go" ]]; then + PATHS_TO_SCAN="$PATHS_TO_SCAN $GOPATH" +fi + +# Git +for DOT_GIT_DIR in $(find $PATHS_TO_SCAN -name ".git" -type d); do + GIT_DIR=$(dirname $DOT_GIT_DIR) + cd $GIT_DIR + GIT_URL=$(git config --get remote.origin.url) + GIT_REV=$(git rev-parse HEAD) + jq_inplace $SRCMAP ".\"$GIT_DIR\" = { type: \"git\", url: \"$GIT_URL\", rev: \"$GIT_REV\" }" +done + +# Subversion +for DOT_SVN_DIR in $(find $PATHS_TO_SCAN -name ".svn" -type d); do + SVN_DIR=$(dirname $DOT_SVN_DIR) + cd $SVN_DIR + SVN_URL=$(svn info | grep "^URL:" | sed 's/URL: //g') + SVN_REV=$(svn info -r HEAD | grep "^Revision:" | sed 's/Revision: //g') + jq_inplace $SRCMAP ".\"$SVN_DIR\" = { type: \"svn\", url: \"$SVN_URL\", rev: \"$SVN_REV\" }" +done + +# Mercurial +for DOT_HG_DIR in $(find $PATHS_TO_SCAN -name ".hg" -type d); do + HG_DIR=$(dirname $DOT_HG_DIR) + cd $HG_DIR + HG_URL=$(hg paths default) + HG_REV=$(hg --debug id -r. -i) + jq_inplace $SRCMAP ".\"$HG_DIR\" = { type: \"hg\", url: \"$HG_URL\", rev: \"$HG_REV\" }" +done + +if [ "${OSSFUZZ_REVISION-}" != "" ]; then + jq_inplace $SRCMAP ".\"/src\" = { type: \"git\", url: \"https://github.com/google/oss-fuzz.git\", rev: \"$OSSFUZZ_REVISION\" }" +fi + +cat $SRCMAP +rm $SRCMAP diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/write_labels.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/write_labels.py new file mode 100644 index 0000000000000000000000000000000000000000..3b3a6d39aa6dfc75d6694808e336d5ed1e5226ed --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-builder/write_labels.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Script for writing from project.yaml to .labels file.""" + +import os +import json +import sys + + +def main(): + """Writes labels.""" + if len(sys.argv) != 3: + print('Usage: write_labels.py labels_json out_dir', file=sys.stderr) + sys.exit(1) + + labels_by_target = json.loads(sys.argv[1]) + out = sys.argv[2] + + for target_name, labels in labels_by_target.items(): + # Skip over wildcard value applying to all fuzz targets + if target_name == '*': + continue + with open(os.path.join(out, target_name + '.labels'), 'w') as file_handle: + file_handle.write('\n'.join(labels)) + + +if __name__ == '__main__': + main() diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-image/Dockerfile b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-image/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..f77c7f77d453c6717e9ae9e8be994e305288841a --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-image/Dockerfile @@ -0,0 +1,46 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Base image for all other images. + +ARG parent_image=ubuntu:20.04@sha256:4a45212e9518f35983a976eead0de5eecc555a2f047134e9dd2cfc589076a00d + +FROM $parent_image + +ENV DEBIAN_FRONTEND noninteractive +# Install tzadata to match ClusterFuzz +# (https://github.com/google/oss-fuzz/issues/9280). + +# Use Azure mirrors for consistent apt repository access. +RUN cp /etc/apt/sources.list /etc/apt/sources.list.backup && \ + sed -i 's|http://archive.ubuntu.com/ubuntu/|http://azure.archive.ubuntu.com/ubuntu/|g' /etc/apt/sources.list && \ + sed -i 's|http://security.ubuntu.com/ubuntu/|http://azure.archive.ubuntu.com/ubuntu/|g' /etc/apt/sources.list + + +RUN apt-get update && \ + apt-get upgrade -y && \ + apt-get install -y libc6-dev binutils libgcc-9-dev tzdata locales locales-all && \ + apt-get autoremove -y + +ENV OUT=/out +ENV SRC=/src +ENV WORK=/work +ENV PATH="$PATH:/out" +ENV HWASAN_OPTIONS=random_tags=0 +#set locale to utf8 +ENV LC_ALL=C.UTF-8 + +RUN mkdir -p $OUT $SRC $WORK && chmod a+rwx $OUT $SRC $WORK diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner-debug/Dockerfile b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner-debug/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..05d15f03a48a9cbfb8b6b5383984cfef786bd7db --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner-debug/Dockerfile @@ -0,0 +1,26 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-runner:${IMG_TAG} +RUN apt-get update && apt-get install -y valgrind zip + +# Installing GDB 12, re https://github.com/google/oss-fuzz/issues/7513. +RUN apt-get install -y build-essential libgmp-dev && \ + wget https://ftp.gnu.org/gnu/gdb/gdb-12.1.tar.xz && \ + tar -xf gdb-12.1.tar.xz && cd gdb-12.1 && ./configure && \ + make -j $(expr $(nproc) / 2) && make install && cd .. && \ + rm -rf gdb-12.1* && apt-get remove --purge -y build-essential libgmp-dev diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/Dockerfile b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..7f4ba223bdecff1f1f5c8ba78b0bfef9a6cc48f5 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/Dockerfile @@ -0,0 +1,139 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Build rust stuff in its own image. We only need the resulting binaries. +# Keeping the rust toolchain in the image wastes 1 GB. +ARG IMG_TAG=latest +FROM ghcr.io/aixcc-finals/base-image:${IMG_TAG} as temp-runner-binary-builder + +RUN apt-get update && apt-get install -y cargo libyaml-dev +RUN cargo install rustfilt + +# Using multi-stage build to copy some LLVM binaries needed in the runner image. +FROM ghcr.io/aixcc-finals/base-clang:${IMG_TAG} AS base-clang +FROM ghcr.io/aixcc-finals/base-builder-ruby:${IMG_TAG} AS base-ruby + +# The base builder image compiles a specific Python version. Using a multi-stage build +# to copy that same Python interpreter into the runner image saves build time and keeps +# the Python versions in sync. +FROM ghcr.io/aixcc-finals/base-builder:${IMG_TAG} AS base-builder + +# Real image that will be used later. +FROM ghcr.io/aixcc-finals/base-image:${IMG_TAG} + +COPY --from=temp-runner-binary-builder /root/.cargo/bin/rustfilt /usr/local/bin + +# Copy the binaries needed for code coverage and crash symbolization. +COPY --from=base-clang /usr/local/bin/llvm-cov \ + /usr/local/bin/llvm-profdata \ + /usr/local/bin/llvm-symbolizer \ + /usr/local/bin/ + +# Copy the pre-compiled Python binaries and libraries +COPY --from=base-builder /usr/local/bin/python3.10 /usr/local/bin/python3.10 +COPY --from=base-builder /usr/local/lib/libpython3.10.so.1.0 /usr/local/lib/libpython3.10.so.1.0 +COPY --from=base-builder /usr/local/include/python3.10 /usr/local/include/python3.10 +COPY --from=base-builder /usr/local/lib/python3.10 /usr/local/lib/python3.10 +COPY --from=base-builder /usr/local/bin/pip3 /usr/local/bin/pip3 + +# Create symbolic links to ensure compatibility +RUN ldconfig && \ + ln -s /usr/local/bin/python3.10 /usr/local/bin/python3 && \ + ln -s /usr/local/bin/python3.10 /usr/local/bin/python + +COPY install_deps.sh / +RUN /install_deps.sh && rm /install_deps.sh + +ENV CODE_COVERAGE_SRC=/opt/code_coverage +# Pin coverage to the same as in the base builder: +# https://github.com/google/oss-fuzz/blob/master/infra/base-images/base-builder/install_python.sh#L22 +RUN git clone https://chromium.googlesource.com/chromium/src/tools/code_coverage $CODE_COVERAGE_SRC && \ + cd /opt/code_coverage && \ + git checkout edba4873b5e8a390e977a64c522db2df18a8b27d && \ + pip3 install wheel && \ + # If version "Jinja2==2.10" is in requirements.txt, bump it to a patch version that + # supports upgrading its MarkupSafe dependency to a Python 3.10 compatible release: + sed -i 's/Jinja2==2.10/Jinja2==2.10.3/' requirements.txt && \ + pip3 install -r requirements.txt && \ + pip3 install MarkupSafe==2.0.1 && \ + pip3 install coverage==6.3.2 + +# Default environment options for various sanitizers. +# Note that these match the settings used in ClusterFuzz and +# shouldn't be changed unless a corresponding change is made on +# ClusterFuzz side as well. +ENV ASAN_OPTIONS="alloc_dealloc_mismatch=0:allocator_may_return_null=1:allocator_release_to_os_interval_ms=500:check_malloc_usable_size=0:detect_container_overflow=1:detect_odr_violation=0:detect_leaks=1:detect_stack_use_after_return=1:fast_unwind_on_fatal=0:handle_abort=1:handle_segv=1:handle_sigill=1:max_uar_stack_size_log=16:print_scariness=1:quarantine_size_mb=10:strict_memcmp=1:strip_path_prefix=/workspace/:symbolize=1:use_sigaltstack=1:dedup_token_length=3" +ENV MSAN_OPTIONS="print_stats=1:strip_path_prefix=/workspace/:symbolize=1:dedup_token_length=3" +ENV UBSAN_OPTIONS="print_stacktrace=1:print_summary=1:silence_unsigned_overflow=1:strip_path_prefix=/workspace/:symbolize=1:dedup_token_length=3" +ENV FUZZER_ARGS="-rss_limit_mb=2560 -timeout=25" +ENV AFL_FUZZER_ARGS="-m none" + +# Set up Golang environment variables (copied from /root/.bash_profile). +ENV GOPATH /root/go + +# /root/.go/bin is for the standard Go binaries (i.e. go, gofmt, etc). +# $GOPATH/bin is for the binaries from the dependencies installed via "go get". +ENV PATH $PATH:$GOPATH/bin +COPY gocoverage $GOPATH/gocoverage + +COPY install_go.sh / +RUN /install_go.sh && rm -rf /install_go.sh /root/.go + +# Install OpenJDK 15 and trim its size by removing unused components. +ENV JAVA_HOME=/usr/lib/jvm/java-17-openjdk-amd64 +ENV JAVA_15_HOME=/usr/lib/jvm/java-15-openjdk-amd64 +ENV JVM_LD_LIBRARY_PATH=$JAVA_HOME/lib/server +ENV PATH=$PATH:$JAVA_HOME/bin + +COPY install_java.sh / +RUN /install_java.sh && rm /install_java.sh + +# Install JaCoCo for JVM coverage. +RUN wget https://repo1.maven.org/maven2/org/jacoco/org.jacoco.cli/0.8.7/org.jacoco.cli-0.8.7-nodeps.jar -O /opt/jacoco-cli.jar && \ + wget https://repo1.maven.org/maven2/org/jacoco/org.jacoco.agent/0.8.7/org.jacoco.agent-0.8.7-runtime.jar -O /opt/jacoco-agent.jar && \ + echo "37df187b76888101ecd745282e9cd1ad4ea508d6 /opt/jacoco-agent.jar" | shasum --check && \ + echo "c1814e7bba5fd8786224b09b43c84fd6156db690 /opt/jacoco-cli.jar" | shasum --check + +COPY install_javascript.sh / +RUN /install_javascript.sh && rm /install_javascript.sh + +# Copy built ruby and ruzzy from builder +COPY --from=base-ruby /usr/local/rvm /usr/local/rvm +COPY --from=base-ruby /install/ruzzy /install/ruzzy +COPY ruzzy /usr/bin/ruzzy +ENV PATH="$PATH:/usr/local/rvm/rubies/ruby-3.3.1/bin" +# RubyGems installation directory +ENV GEM_HOME="$OUT/fuzz-gem" +ENV GEM_PATH="/install/ruzzy" + +# Do this last to make developing these files easier/faster due to caching. +COPY bad_build_check \ + coverage \ + coverage_helper \ + download_corpus \ + jacoco_report_converter.py \ + nyc_report_converter.py \ + rcfilt \ + reproduce \ + run_fuzzer \ + parse_options.py \ + generate_differential_cov_report.py \ + profraw_update.py \ + targets_list \ + test_all.py \ + test_one.py \ + python_coverage_runner_help.py \ + /usr/local/bin/ diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/README.md b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e1e29e3802fc485f0a4df667baf89cea7fad5dfd --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/README.md @@ -0,0 +1,31 @@ +# base-runner +> Base image for fuzzer runners. + +```bash +docker run -ti ghcr.io/aixcc-finals/base-runner +``` + +## Commands + +| Command | Description | +|---------|-------------| +| `reproduce ` | build all fuzz targets and run specified one with testcase `/testcase` and given options. +| `run_fuzzer ` | runs specified fuzzer combining options with `.options` file | +| `test_all.py` | runs every binary in `/out` as a fuzzer for a while to ensure it works. | +| `coverage ` | generate a coverage report for the given fuzzer. | + +# Examples + +- *Reproduce using latest OSS-Fuzz build:* + +
+docker run --rm -ti -v <testcase_path>:/testcase gcr.io/oss-fuzz/$PROJECT_NAME reproduce <fuzzer_name>
+
+ +- *Reproduce using local source checkout:* + +
+docker run --rm -ti -v <source_path>:/src/$PROJECT_NAME \
+                    -v <testcase_path>:/testcase gcr.io/oss-fuzz/$PROJECT_NAME \
+                    reproduce <fuzzer_name>
+
diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/bad_build_check b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/bad_build_check new file mode 100644 index 0000000000000000000000000000000000000000..8aa901db6c654da365092f6764490b5f3e819668 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/bad_build_check @@ -0,0 +1,494 @@ +#!/bin/bash -u +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# A minimal number of runs to test fuzz target with a non-empty input. +MIN_NUMBER_OF_RUNS=4 + +# The "example" target has 73 with ASan, 65 with UBSan, and 6648 with MSan. +# Real world targets have greater values (arduinojson: 407, zlib: 664). +# Mercurial's bdiff_fuzzer has 116 PCs when built with ASan. +THRESHOLD_FOR_NUMBER_OF_EDGES=100 + +# A fuzz target is supposed to have at least two functions, such as +# LLVMFuzzerTestOneInput and an API that is being called from there. +THRESHOLD_FOR_NUMBER_OF_FUNCTIONS=2 + +# Threshold values for different sanitizers used by instrumentation checks. +ASAN_CALLS_THRESHOLD_FOR_ASAN_BUILD=1000 +ASAN_CALLS_THRESHOLD_FOR_NON_ASAN_BUILD=0 + +# The value below can definitely be higher (like 500-1000), but avoid being too +# agressive here while still evaluating the DFT-based fuzzing approach. +DFSAN_CALLS_THRESHOLD_FOR_DFSAN_BUILD=100 +DFSAN_CALLS_THRESHOLD_FOR_NON_DFSAN_BUILD=0 + +MSAN_CALLS_THRESHOLD_FOR_MSAN_BUILD=1000 +# Some engines (e.g. honggfuzz) may make a very small number of calls to msan +# for memory poisoning. +MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD=3 + +# Usually, a non UBSan build (e.g. ASan) has 165 calls to UBSan runtime. The +# majority of targets built with UBSan have 200+ UBSan calls, but there are +# some very small targets that may have < 200 UBSan calls even in a UBSan build. +# Use the threshold value of 168 (slightly > 165) for UBSan build. +UBSAN_CALLS_THRESHOLD_FOR_UBSAN_BUILD=168 + +# It would be risky to use the threshold value close to 165 for non UBSan build, +# as UBSan runtime may change any time and thus we could have different number +# of calls to UBSan runtime even in ASan build. With that, we use the threshold +# value of 200 that would detect unnecessary UBSan instrumentation in the vast +# majority of targets, except of a handful very small ones, which would not be +# a big concern either way as the overhead for them would not be significant. +UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD=200 + +# ASan builds on i386 generally have about 250 UBSan runtime calls. +if [[ $ARCHITECTURE == 'i386' ]] +then + UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD=280 +fi + + +# Verify that the given fuzz target is correctly built to run with a particular +# engine. +function check_engine { + local FUZZER=$1 + local FUZZER_NAME=$(basename $FUZZER) + local FUZZER_OUTPUT="/tmp/$FUZZER_NAME.output" + local CHECK_FAILED=0 + + if [[ "$FUZZING_ENGINE" == libfuzzer ]]; then + # Store fuzz target's output into a temp file to be used for further checks. + $FUZZER -seed=1337 -runs=$MIN_NUMBER_OF_RUNS &>$FUZZER_OUTPUT + CHECK_FAILED=$(egrep "ERROR: no interesting inputs were found. Is the code instrumented" -c $FUZZER_OUTPUT) + if (( $CHECK_FAILED > 0 )); then + echo "BAD BUILD: $FUZZER does not seem to have coverage instrumentation." + cat $FUZZER_OUTPUT + # Bail out as the further check does not make any sense, there are 0 PCs. + return 1 + fi + + local NUMBER_OF_EDGES=$(grep -Po "INFO: Loaded [[:digit:]]+ module.*\(.*(counters|guards)\):[[:space:]]+\K[[:digit:]]+" $FUZZER_OUTPUT) + + # If a fuzz target fails to start, grep won't find anything, so bail out early to let check_startup_crash deal with it. + [[ -z "$NUMBER_OF_EDGES" ]] && return + + if (( $NUMBER_OF_EDGES < $THRESHOLD_FOR_NUMBER_OF_EDGES )); then + echo "BAD BUILD: $FUZZER seems to have only partial coverage instrumentation." + fi + elif [[ "$FUZZING_ENGINE" == afl ]]; then + AFL_FORKSRV_INIT_TMOUT=30000 AFL_NO_UI=1 SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 35s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT + CHECK_PASSED=$(egrep "All set and ready to roll" -c $FUZZER_OUTPUT) + if (( $CHECK_PASSED == 0 )); then + echo "BAD BUILD: fuzzing $FUZZER with afl-fuzz failed." + cat $FUZZER_OUTPUT + return 1 + fi + elif [[ "$FUZZING_ENGINE" == honggfuzz ]]; then + SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 20s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT + CHECK_PASSED=$(egrep "^Sz:[0-9]+ Tm:[0-9]+" -c $FUZZER_OUTPUT) + if (( $CHECK_PASSED == 0 )); then + echo "BAD BUILD: fuzzing $FUZZER with honggfuzz failed." + cat $FUZZER_OUTPUT + return 1 + fi + elif [[ "$FUZZING_ENGINE" == dataflow ]]; then + $FUZZER &> $FUZZER_OUTPUT + local NUMBER_OF_FUNCTIONS=$(grep -Po "INFO:\s+\K[[:digit:]]+(?=\s+instrumented function.*)" $FUZZER_OUTPUT) + [[ -z "$NUMBER_OF_FUNCTIONS" ]] && NUMBER_OF_FUNCTIONS=0 + if (( $NUMBER_OF_FUNCTIONS < $THRESHOLD_FOR_NUMBER_OF_FUNCTIONS )); then + echo "BAD BUILD: $FUZZER does not seem to be properly built in 'dataflow' config." + cat $FUZZER_OUTPUT + return 1 + fi + elif [[ "$FUZZING_ENGINE" == centipede \ + && ("${HELPER:-}" == True || "$SANITIZER" == none ) ]]; then + # Performs run test on unsanitized binaries with auxiliary sanitized + # binaries if they are built with helper.py. + # Performs run test on unsanitized binaries without auxiliary sanitized + # binaries if they are from trial build and production build. + # TODO(Dongge): Support run test with sanitized binaries for trial and + # production build. + SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 20s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT + CHECK_PASSED=$(egrep "\[S0.0] begin-fuzz: ft: 0 corp: 0/0" -c $FUZZER_OUTPUT) + if (( $CHECK_PASSED == 0 )); then + echo "BAD BUILD: fuzzing $FUZZER with centipede failed." + cat $FUZZER_OUTPUT + return 1 + fi + fi + + return 0 +} + +# Verify that the given fuzz target has been built properly and works. +function check_startup_crash { + local FUZZER=$1 + local FUZZER_NAME=$(basename $FUZZER) + local FUZZER_OUTPUT="/tmp/$FUZZER_NAME.output" + local CHECK_PASSED=0 + + if [[ "$FUZZING_ENGINE" = libfuzzer ]]; then + # Skip seed corpus as there is another explicit check that uses seed corpora. + SKIP_SEED_CORPUS=1 run_fuzzer $FUZZER_NAME -seed=1337 -runs=$MIN_NUMBER_OF_RUNS &>$FUZZER_OUTPUT + CHECK_PASSED=$(egrep "Done $MIN_NUMBER_OF_RUNS runs" -c $FUZZER_OUTPUT) + elif [[ "$FUZZING_ENGINE" = afl ]]; then + AFL_FORKSRV_INIT_TMOUT=30000 AFL_NO_UI=1 SKIP_SEED_CORPUS=1 timeout --preserve-status -s INT 35s run_fuzzer $FUZZER_NAME &>$FUZZER_OUTPUT + if [ $(egrep "target binary (crashed|terminated)" -c $FUZZER_OUTPUT) -eq 0 ]; then + CHECK_PASSED=1 + fi + elif [[ "$FUZZING_ENGINE" = dataflow ]]; then + # TODO(https://github.com/google/oss-fuzz/issues/1632): add check for + # binaries compiled with dataflow engine when the interface becomes stable. + CHECK_PASSED=1 + else + # TODO: add checks for another fuzzing engines if possible. + CHECK_PASSED=1 + fi + + if [ "$CHECK_PASSED" -eq "0" ]; then + echo "BAD BUILD: $FUZZER seems to have either startup crash or exit:" + cat $FUZZER_OUTPUT + return 1 + fi + + return 0 +} + +# Mixed sanitizers check for ASan build. +function check_asan_build { + local FUZZER=$1 + local ASAN_CALLS=$2 + local DFSAN_CALLS=$3 + local MSAN_CALLS=$4 + local UBSAN_CALLS=$5 + + # Perform all the checks for more detailed error message. + if (( $ASAN_CALLS < $ASAN_CALLS_THRESHOLD_FOR_ASAN_BUILD )); then + echo "BAD BUILD: $FUZZER does not seem to be compiled with ASan." + return 1 + fi + + if (( $DFSAN_CALLS > $DFSAN_CALLS_THRESHOLD_FOR_NON_DFSAN_BUILD )); then + echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with DFSan." + return 1 + fi + + if (( $MSAN_CALLS > $MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD )); then + echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with MSan." + return 1 + fi + + if (( $UBSAN_CALLS > $UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD )); then + echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with UBSan." + return 1 + fi + + return 0 +} + +# Mixed sanitizers check for DFSan build. +function check_dfsan_build { + local FUZZER=$1 + local ASAN_CALLS=$2 + local DFSAN_CALLS=$3 + local MSAN_CALLS=$4 + local UBSAN_CALLS=$5 + + # Perform all the checks for more detailed error message. + if (( $ASAN_CALLS > $ASAN_CALLS_THRESHOLD_FOR_NON_ASAN_BUILD )); then + echo "BAD BUILD: DFSan build of $FUZZER seems to be compiled with ASan." + return 1 + fi + + if (( $DFSAN_CALLS < $DFSAN_CALLS_THRESHOLD_FOR_DFSAN_BUILD )); then + echo "BAD BUILD: $FUZZER does not seem to be compiled with DFSan." + return 1 + fi + + if (( $MSAN_CALLS > $MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD )); then + echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with MSan." + return 1 + fi + + if (( $UBSAN_CALLS > $UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD )); then + echo "BAD BUILD: ASan build of $FUZZER seems to be compiled with UBSan." + return 1 + fi + + return 0 +} + + +# Mixed sanitizers check for MSan build. +function check_msan_build { + local FUZZER=$1 + local ASAN_CALLS=$2 + local DFSAN_CALLS=$3 + local MSAN_CALLS=$4 + local UBSAN_CALLS=$5 + + # Perform all the checks for more detailed error message. + if (( $ASAN_CALLS > $ASAN_CALLS_THRESHOLD_FOR_NON_ASAN_BUILD )); then + echo "BAD BUILD: MSan build of $FUZZER seems to be compiled with ASan." + return 1 + fi + + if (( $DFSAN_CALLS > $DFSAN_CALLS_THRESHOLD_FOR_NON_DFSAN_BUILD )); then + echo "BAD BUILD: MSan build of $FUZZER seems to be compiled with DFSan." + return 1 + fi + + if (( $MSAN_CALLS < $MSAN_CALLS_THRESHOLD_FOR_MSAN_BUILD )); then + echo "BAD BUILD: $FUZZER does not seem to be compiled with MSan." + return 1 + fi + + if (( $UBSAN_CALLS > $UBSAN_CALLS_THRESHOLD_FOR_NON_UBSAN_BUILD )); then + echo "BAD BUILD: MSan build of $FUZZER seems to be compiled with UBSan." + return 1 + fi + + return 0 +} + +# Mixed sanitizers check for UBSan build. +function check_ubsan_build { + local FUZZER=$1 + local ASAN_CALLS=$2 + local DFSAN_CALLS=$3 + local MSAN_CALLS=$4 + local UBSAN_CALLS=$5 + + if [[ "$FUZZING_ENGINE" != libfuzzer ]]; then + # Ignore UBSan checks for fuzzing engines other than libFuzzer because: + # A) we (probably) are not going to use those with UBSan + # B) such builds show indistinguishable number of calls to UBSan + return 0 + fi + + # Perform all the checks for more detailed error message. + if (( $ASAN_CALLS > $ASAN_CALLS_THRESHOLD_FOR_NON_ASAN_BUILD )); then + echo "BAD BUILD: UBSan build of $FUZZER seems to be compiled with ASan." + return 1 + fi + + if (( $DFSAN_CALLS > $DFSAN_CALLS_THRESHOLD_FOR_NON_DFSAN_BUILD )); then + echo "BAD BUILD: UBSan build of $FUZZER seems to be compiled with DFSan." + return 1 + fi + + if (( $MSAN_CALLS > $MSAN_CALLS_THRESHOLD_FOR_NON_MSAN_BUILD )); then + echo "BAD BUILD: UBSan build of $FUZZER seems to be compiled with MSan." + return 1 + fi + + if (( $UBSAN_CALLS < $UBSAN_CALLS_THRESHOLD_FOR_UBSAN_BUILD )); then + echo "BAD BUILD: $FUZZER does not seem to be compiled with UBSan." + return 1 + fi +} + +# Verify that the given fuzz target is compiled with correct sanitizer. +function check_mixed_sanitizers { + local FUZZER=$1 + local result=0 + local CALL_INSN= + + if [ "${FUZZING_LANGUAGE:-}" = "jvm" ]; then + # Sanitizer runtime is linked into the Jazzer driver, so this check does not + # apply. + return 0 + fi + + if [ "${FUZZING_LANGUAGE:-}" = "javascript" ]; then + # Jazzer.js currently does not support using sanitizers with native Node.js addons. + # This is not relevant anyways since supporting this will be done by preloading + # the sanitizers in the wrapper script starting Jazzer.js. + return 0 + fi + + if [ "${FUZZING_LANGUAGE:-}" = "python" ]; then + # Sanitizer runtime is loaded via LD_PRELOAD, so this check does not apply. + return 0 + fi + + # For fuzztest fuzzers point to the binary instead of launcher script. + if [[ $FUZZER == *"@"* ]]; then + FUZZER=(${FUZZER//@/ }[0]) + fi + + CALL_INSN= + if [[ $ARCHITECTURE == "x86_64" ]] + then + CALL_INSN="callq?\s+[0-9a-f]+\s+<" + elif [[ $ARCHITECTURE == "i386" ]] + then + CALL_INSN="call\s+[0-9a-f]+\s+<" + elif [[ $ARCHITECTURE == "aarch64" ]] + then + CALL_INSN="bl\s+[0-9a-f]+\s+<" + else + echo "UNSUPPORTED ARCHITECTURE" + exit 1 + fi + local ASAN_CALLS=$(objdump -dC $FUZZER | egrep "${CALL_INSN}__asan" -c) + local DFSAN_CALLS=$(objdump -dC $FUZZER | egrep "${CALL_INSN}__dfsan" -c) + local MSAN_CALLS=$(objdump -dC $FUZZER | egrep "${CALL_INSN}__msan" -c) + local UBSAN_CALLS=$(objdump -dC $FUZZER | egrep "${CALL_INSN}__ubsan" -c) + + + if [[ "$SANITIZER" = address ]]; then + check_asan_build $FUZZER $ASAN_CALLS $DFSAN_CALLS $MSAN_CALLS $UBSAN_CALLS + result=$? + elif [[ "$SANITIZER" = dataflow ]]; then + check_dfsan_build $FUZZER $ASAN_CALLS $DFSAN_CALLS $MSAN_CALLS $UBSAN_CALLS + result=$? + elif [[ "$SANITIZER" = memory ]]; then + check_msan_build $FUZZER $ASAN_CALLS $DFSAN_CALLS $MSAN_CALLS $UBSAN_CALLS + result=$? + elif [[ "$SANITIZER" = undefined ]]; then + check_ubsan_build $FUZZER $ASAN_CALLS $DFSAN_CALLS $MSAN_CALLS $UBSAN_CALLS + result=$? + elif [[ "$SANITIZER" = thread ]]; then + # TODO(metzman): Implement this. + result=0 + fi + + return $result +} + +# Verify that the given fuzz target doesn't crash on the seed corpus. +function check_seed_corpus { + local FUZZER=$1 + local FUZZER_NAME="$(basename $FUZZER)" + local FUZZER_OUTPUT="/tmp/$FUZZER_NAME.output" + + if [[ "$FUZZING_ENGINE" != libfuzzer ]]; then + return 0 + fi + + # Set up common fuzzing arguments, otherwise "run_fuzzer" errors out. + if [ -z "$FUZZER_ARGS" ]; then + export FUZZER_ARGS="-rss_limit_mb=2560 -timeout=25" + fi + + bash -c "run_fuzzer $FUZZER_NAME -runs=0" &> $FUZZER_OUTPUT + + # Don't output anything if fuzz target hasn't crashed. + if [ $? -ne 0 ]; then + echo "BAD BUILD: $FUZZER has a crashing input in its seed corpus:" + cat $FUZZER_OUTPUT + return 1 + fi + + return 0 +} + +function check_architecture { + local FUZZER=$1 + local FUZZER_NAME=$(basename $FUZZER) + + if [ "${FUZZING_LANGUAGE:-}" = "jvm" ]; then + # The native dependencies of a JVM project are not packaged, but loaded + # dynamically at runtime and thus cannot be checked here. + return 0; + fi + + if [ "${FUZZING_LANGUAGE:-}" = "javascript" ]; then + # Jazzer.js fuzzers are wrapper scripts that start the fuzz target with + # the Jazzer.js CLI. + return 0; + fi + + if [ "${FUZZING_LANGUAGE:-}" = "python" ]; then + FUZZER=${FUZZER}.pkg + fi + + # For fuzztest fuzzers point to the binary instead of launcher script. + if [[ $FUZZER == *"@"* ]]; then + FUZZER=(${FUZZER//@/ }[0]) + fi + + FILE_OUTPUT=$(file $FUZZER) + if [[ $ARCHITECTURE == "x86_64" ]] + then + echo $FILE_OUTPUT | grep "x86-64" > /dev/null + elif [[ $ARCHITECTURE == "i386" ]] + then + echo $FILE_OUTPUT | grep "80386" > /dev/null + elif [[ $ARCHITECTURE == "aarch64" ]] + then + echo $FILE_OUTPUT | grep "aarch64" > /dev/null + else + echo "UNSUPPORTED ARCHITECTURE" + return 1 + fi + result=$? + if [[ $result != 0 ]] + then + echo "BAD BUILD $FUZZER is not built for architecture: $ARCHITECTURE" + echo "file command output: $FILE_OUTPUT" + echo "check_mixed_sanitizers test will fail." + fi + return $result +} + +function main { + local FUZZER=$1 + local AUXILIARY_FUZZER=${2:-} + local checks_failed=0 + local result=0 + + export RUN_FUZZER_MODE="batch" + check_engine $FUZZER + result=$? + checks_failed=$(( $checks_failed + $result )) + + check_architecture $FUZZER + result=$? + checks_failed=$(( $checks_failed + $result )) + + if [[ "$FUZZING_ENGINE" == centipede \ + && "$SANITIZER" != none && "${HELPER:-}" == True ]]; then + check_mixed_sanitizers $AUXILIARY_FUZZER + else + check_mixed_sanitizers $FUZZER + fi + result=$? + checks_failed=$(( $checks_failed + $result )) + + check_startup_crash $FUZZER + result=$? + checks_failed=$(( $checks_failed + $result )) + + # TODO: re-enable after introducing bug auto-filing for bad builds. + # check_seed_corpus $FUZZER + return $checks_failed +} + + +if [ $# -ne 1 -a $# -ne 2 ]; then + echo "Usage: $0 []" + exit 1 +fi + +# Fuzz target path. +FUZZER=$1 +AUXILIARY_FUZZER=${2:-} + +main $FUZZER $AUXILIARY_FUZZER +exit $? diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage new file mode 100644 index 0000000000000000000000000000000000000000..585b4d457e753e12025344efd735c571b38fb580 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage @@ -0,0 +1,549 @@ +#!/bin/bash -u +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +cd $OUT + +if (( $# > 0 )); then + FUZZ_TARGETS="$@" +else + FUZZ_TARGETS="$(find . -maxdepth 1 -type f -executable -printf '%P\n' | \ + grep -v -x -F \ + -e 'llvm-symbolizer' \ + -e 'jazzer_agent_deploy.jar' \ + -e 'jazzer_driver' \ + -e 'jazzer_driver_with_sanitizer' \ + -e 'sanitizer_with_fuzzer.so')" +fi + +COVERAGE_OUTPUT_DIR=${COVERAGE_OUTPUT_DIR:-$OUT} + +DUMPS_DIR="$COVERAGE_OUTPUT_DIR/dumps" +FUZZERS_COVERAGE_DUMPS_DIR="$DUMPS_DIR/fuzzers_coverage" +MERGED_COVERAGE_DIR="$COVERAGE_OUTPUT_DIR/merged_coverage" +FUZZER_STATS_DIR="$COVERAGE_OUTPUT_DIR/fuzzer_stats" +TEXTCOV_REPORT_DIR="$COVERAGE_OUTPUT_DIR/textcov_reports" +LOGS_DIR="$COVERAGE_OUTPUT_DIR/logs" +REPORT_ROOT_DIR="$COVERAGE_OUTPUT_DIR/report" +REPORT_BY_TARGET_ROOT_DIR="$COVERAGE_OUTPUT_DIR/report_target" +PLATFORM=linux +REPORT_PLATFORM_DIR="$COVERAGE_OUTPUT_DIR/report/$PLATFORM" + +for directory in $DUMPS_DIR $FUZZER_STATS_DIR $LOGS_DIR $REPORT_ROOT_DIR $TEXTCOV_REPORT_DIR\ + $REPORT_PLATFORM_DIR $REPORT_BY_TARGET_ROOT_DIR $FUZZERS_COVERAGE_DUMPS_DIR $MERGED_COVERAGE_DIR; do + rm -rf $directory + mkdir -p $directory +done + +PROFILE_FILE="$DUMPS_DIR/merged.profdata" +SUMMARY_FILE="$REPORT_PLATFORM_DIR/summary.json" +COVERAGE_TARGET_FILE="$FUZZER_STATS_DIR/coverage_targets.txt" + +# Use path mapping, as $SRC directory from the builder is copied into $OUT/$SRC. +PATH_EQUIVALENCE_ARGS="-path-equivalence=/,$OUT" + +# It's important to use $COVERAGE_EXTRA_ARGS as the last argument, because it +# can contain paths to source files / directories which are positional args. +LLVM_COV_COMMON_ARGS="$PATH_EQUIVALENCE_ARGS \ + -ignore-filename-regex=.*src/libfuzzer/.* $COVERAGE_EXTRA_ARGS" + +# Options to extract branch coverage. +BRANCH_COV_ARGS="--show-branches=count --show-expansions" + +# Timeout for running a single fuzz target. +TIMEOUT=1h + +# This will be used by llvm-cov command to generate the actual report. +objects="" + +# Number of CPUs available, this is needed for running tests in parallel. +# Set the max number of parallel jobs to be the CPU count and a max of 10. +NPROC=$(nproc) +MAX_PARALLEL_COUNT=10 + +CORPUS_DIR=${CORPUS_DIR:-"/corpus"} + +function run_fuzz_target { + local target=$1 + + # '%1m' will produce separate dump files for every object. For example, if a + # fuzz target loads a shared library, we will have dumps for both of them. + local profraw_file="$DUMPS_DIR/$target.%1m.profraw" + local profraw_file_mask="$DUMPS_DIR/$target.*.profraw" + local profdata_file="$DUMPS_DIR/$target.profdata" + local corpus_real="$CORPUS_DIR/${target}" + + # -merge=1 requires an output directory, create a new, empty dir for that. + local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}" + rm -rf $corpus_dummy && mkdir -p $corpus_dummy + + # Use -merge=1 instead of -runs=0 because merge is crash resistant and would + # let to get coverage using all corpus files even if there are crash inputs. + # Merge should not introduce any significant overhead compared to -runs=0, + # because (A) corpuses are already minimized; (B) we do not use sancov, and so + # libFuzzer always finishes merge with an empty output dir. + # Use 100s timeout instead of 25s as code coverage builds can be very slow. + local args="-merge=1 -timeout=100 $corpus_dummy $corpus_real" + + export LLVM_PROFILE_FILE=$profraw_file + timeout $TIMEOUT $OUT/$target $args &> $LOGS_DIR/$target.log + if (( $? != 0 )); then + echo "Error occured while running $target:" + cat $LOGS_DIR/$target.log + fi + + rm -rf $corpus_dummy + if (( $(du -c $profraw_file_mask | tail -n 1 | cut -f 1) == 0 )); then + # Skip fuzz targets that failed to produce profile dumps. + return 0 + fi + + # If necessary translate to latest profraw version. + if [[ $target == *"@"* ]]; then + # Extract fuzztest binary name from fuzztest wrapper script. + target=(${target//@/ }[0]) + fi + profraw_update.py $OUT/$target -i $profraw_file_mask + llvm-profdata merge -j=1 -sparse $profraw_file_mask -o $profdata_file + + # Delete unnecessary and (potentially) large .profraw files. + rm $profraw_file_mask + + shared_libraries=$(coverage_helper shared_libs -build-dir=$OUT -object=$target) + + llvm-cov export -summary-only -instr-profile=$profdata_file -object=$target \ + $shared_libraries $LLVM_COV_COMMON_ARGS > $FUZZER_STATS_DIR/$target.json + + # For introspector. + llvm-cov show -instr-profile=$profdata_file -object=$target -line-coverage-gt=0 $shared_libraries $BRANCH_COV_ARGS $LLVM_COV_COMMON_ARGS > ${TEXTCOV_REPORT_DIR}/$target.covreport +} + +function run_go_fuzz_target { + local target=$1 + + echo "Running go target $target" + export FUZZ_CORPUS_DIR="$CORPUS_DIR/${target}/" + export FUZZ_PROFILE_NAME="$DUMPS_DIR/$target.perf" + + # setup for native go fuzzers + cd $OUT + mkdir -p "testdata/fuzz/${target}" + cp -r "${FUZZ_CORPUS_DIR}" "testdata/fuzz/" + + # rewrite libFuzzer corpus to Std Go corpus if native fuzzing + grep "TestFuzzCorpus" $target > /dev/null 2>&1 && $SYSGOPATH/bin/convertcorpus $target "testdata/fuzz/${target}" + cd - + + timeout $TIMEOUT $OUT/$target -test.coverprofile $DUMPS_DIR/$target.profdata &> $LOGS_DIR/$target.log + if (( $? != 0 )); then + echo "Error occured while running $target:" + cat $LOGS_DIR/$target.log + fi + + # cleanup after native go fuzzers + rm -r "${OUT}/testdata/fuzz/${target}" + + # The Go 1.18 fuzzers are renamed to "*_fuzz_.go" during "infra/helper.py build_fuzzers". + # They are are therefore refered to as "*_fuzz_.go" in the profdata files. + # Since the copies named "*_fuzz_.go" do not exist in the file tree during + # the coverage build, we change the references in the .profdata files + # to the original file names. + #sed -i "s/_test.go_fuzz_.go/_test.go/g" $DUMPS_DIR/$target.profdata + # translate from golangish paths to current absolute paths + cat $OUT/$target.gocovpath | while read i; do sed -i $i $DUMPS_DIR/$target.profdata; done + # cf PATH_EQUIVALENCE_ARGS + sed -i 's=/='$OUT'/=' $DUMPS_DIR/$target.profdata + $SYSGOPATH/bin/gocovsum $DUMPS_DIR/$target.profdata > $FUZZER_STATS_DIR/$target.json +} + +function run_python_fuzz_target { + local target=$1 + local zipped_sources="$DUMPS_DIR/$target.deps.zip" + local corpus_real="$CORPUS_DIR/${target}" + # Write dummy stats file + echo "{}" > "$FUZZER_STATS_DIR/$target.json" + + # Run fuzzer + $OUT/$target $corpus_real -atheris_runs=$(ls -la $corpus_real | wc -l) > $LOGS_DIR/$target.log 2>&1 + if (( $? != 0 )); then + echo "Error happened getting coverage of $target" + echo "This is likely because Atheris did not exit gracefully" + cat $LOGS_DIR/$target.log + return 0 + fi + mv .coverage $OUT/.coverage_$target +} + +function run_java_fuzz_target { + local target=$1 + + local exec_file="$DUMPS_DIR/$target.exec" + local class_dump_dir="$DUMPS_DIR/${target}_classes/" + mkdir "$class_dump_dir" + local corpus_real="$CORPUS_DIR/${target}" + + # -merge=1 requires an output directory, create a new, empty dir for that. + local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}" + rm -rf $corpus_dummy && mkdir -p $corpus_dummy + + # Use 100s timeout instead of 25s as code coverage builds can be very slow. + local jacoco_args="destfile=$exec_file,classdumpdir=$class_dump_dir,excludes=com.code_intelligence.jazzer.*\\:com.sun.tools.attach.VirtualMachine" + local args="-merge=1 -timeout=100 --nohooks \ + --additional_jvm_args=-javaagent\\:/opt/jacoco-agent.jar=$jacoco_args \ + $corpus_dummy $corpus_real" + + timeout $TIMEOUT $OUT/$target $args &> $LOGS_DIR/$target.log + if (( $? != 0 )); then + echo "Error occured while running $target:" + cat $LOGS_DIR/$target.log + fi + + if (( $(du -c $exec_file | tail -n 1 | cut -f 1) == 0 )); then + # Skip fuzz targets that failed to produce .exec files. + echo "$target failed to produce .exec file." + return 0 + fi + + # Generate XML report only as input to jacoco_report_converter. + # Source files are not needed for the summary. + local xml_report="$DUMPS_DIR/${target}.xml" + local summary_file="$FUZZER_STATS_DIR/$target.json" + java -jar /opt/jacoco-cli.jar report $exec_file \ + --xml $xml_report \ + --classfiles $class_dump_dir + + # Write llvm-cov summary file. + jacoco_report_converter.py $xml_report $summary_file +} + +function run_javascript_fuzz_target { + local target=$1 + local corpus_real="$CORPUS_DIR/${target}" + + # -merge=1 requires an output directory, create a new, empty dir for that. + local corpus_dummy="$OUT/dummy_corpus_dir_for_${target}" + rm -rf $corpus_dummy && mkdir -p $corpus_dummy + + # IstanbulJS currently does not work when the tested program creates + # subprocesses. For this reason, we first minimize the corpus removing + # any crashing inputs so that we can report source-based code coverage + # with a single sweep over the minimized corpus + local merge_args="-merge=1 -timeout=100 $corpus_dummy $corpus_real" + timeout $TIMEOUT $OUT/$target $merge_args &> $LOGS_DIR/$target.log + + # nyc saves the coverage reports in a directory with the default name "coverage" + local coverage_dir="$DUMPS_DIR/coverage_dir_for_${target}" + rm -rf $coverage_dir && mkdir -p $coverage_dir + + local nyc_json_coverage_file="$coverage_dir/coverage-final.json" + local nyc_json_summary_file="$coverage_dir/coverage-summary.json" + + local args="-runs=0 $corpus_dummy" + local jazzerjs_args="--coverage --coverageDirectory $coverage_dir --coverageReporters json --coverageReporters json-summary" + + JAZZERJS_EXTRA_ARGS=$jazzerjs_args $OUT/$target $args &> $LOGS_DIR/$target.log + + if (( $? != 0 )); then + echo "Error occured while running $target:" + cat $LOGS_DIR/$target.log + fi + + if [ ! -s $nyc_json_coverage_file ]; then + # Skip fuzz targets that failed to produce coverage-final.json file. + echo "$target failed to produce coverage-final.json file." + return 0 + fi + + cp $nyc_json_coverage_file $FUZZERS_COVERAGE_DUMPS_DIR/$target.json + + local summary_file="$FUZZER_STATS_DIR/$target.json" + + nyc_report_converter.py $nyc_json_summary_file $summary_file +} + +function generate_html { + local profdata=$1 + local shared_libraries=$2 + local objects=$3 + local output_dir=$4 + + rm -rf "$output_dir" + mkdir -p "$output_dir/$PLATFORM" + + local llvm_cov_args="-instr-profile=$profdata $objects $LLVM_COV_COMMON_ARGS" + llvm-cov show -format=html -output-dir=$output_dir -Xdemangler rcfilt $llvm_cov_args + + # Export coverage summary in JSON format. + local summary_file=$output_dir/$PLATFORM/summary.json + + llvm-cov export -summary-only $llvm_cov_args > $summary_file + + coverage_helper -v post_process -src-root-dir=/ -summary-file=$summary_file \ + -output-dir=$output_dir $PATH_EQUIVALENCE_ARGS +} + +export SYSGOPATH=$GOPATH +export GOPATH=$OUT/$GOPATH +# Run each fuzz target, generate raw coverage dumps. +for fuzz_target in $FUZZ_TARGETS; do + # Test if fuzz target is a golang one. + if [[ $FUZZING_LANGUAGE == "go" ]]; then + # Continue if not a fuzz target. + if [[ $FUZZING_ENGINE != "none" ]]; then + grep "FUZZ_CORPUS_DIR" $fuzz_target > /dev/null 2>&1 || grep "testing\.T" $fuzz_target > /dev/null 2>&1 || continue + fi + # Log the target in the targets file. + echo ${fuzz_target} >> $COVERAGE_TARGET_FILE + + # Run the coverage collection. + run_go_fuzz_target $fuzz_target & + elif [[ $FUZZING_LANGUAGE == "python" ]]; then + echo "Entering python fuzzing" + # Log the target in the targets file. + echo ${fuzz_target} >> $COVERAGE_TARGET_FILE + + # Run the coverage collection. + run_python_fuzz_target $fuzz_target + elif [[ $FUZZING_LANGUAGE == "jvm" ]]; then + # Continue if not a fuzz target. + if [[ $FUZZING_ENGINE != "none" ]]; then + grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue + fi + + echo "Running $fuzz_target" + # Log the target in the targets file. + echo ${fuzz_target} >> $COVERAGE_TARGET_FILE + + # Run the coverage collection. + run_java_fuzz_target $fuzz_target & + elif [[ $FUZZING_LANGUAGE == "javascript" ]]; then + # Continue if not a fuzz target. + if [[ $FUZZING_ENGINE != "none" ]]; then + grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue + fi + + echo "Running $fuzz_target" + # Log the target in the targets file. + echo ${fuzz_target} >> $COVERAGE_TARGET_FILE + + # Run the coverage collection. + run_javascript_fuzz_target $fuzz_target & + else + # Continue if not a fuzz target. + if [[ $FUZZING_ENGINE != "none" ]]; then + grep "LLVMFuzzerTestOneInput" $fuzz_target > /dev/null 2>&1 || continue + fi + + echo "Running $fuzz_target" + # Log the target in the targets file. + echo ${fuzz_target} >> $COVERAGE_TARGET_FILE + + # Run the coverage collection. + run_fuzz_target $fuzz_target & + + # Rewrite object if its a FUZZTEST target + if [[ $fuzz_target == *"@"* ]]; then + # Extract fuzztest binary name from fuzztest wrapper script. + fuzz_target=(${fuzz_target//@/ }[0]) + fi + if [[ -z $objects ]]; then + # The first object needs to be passed without -object= flag. + objects="$fuzz_target" + else + objects="$objects -object=$fuzz_target" + fi + fi + + + # Limit the number of processes to be spawned. + n_child_proc=$(jobs -rp | wc -l) + while [[ "$n_child_proc" -eq "$NPROC" || "$n_child_proc" -gt "$MAX_PARALLEL_COUNT" ]]; do + sleep 4 + n_child_proc=$(jobs -rp | wc -l) + done +done + +# Wait for background processes to finish. +wait + +if [[ $FUZZING_LANGUAGE == "go" ]]; then + echo $DUMPS_DIR + $SYSGOPATH/bin/gocovmerge $DUMPS_DIR/*.profdata > fuzz.cov + gotoolcover -html=fuzz.cov -o $REPORT_ROOT_DIR/index.html + $SYSGOPATH/bin/gocovsum fuzz.cov > $SUMMARY_FILE + cp $REPORT_ROOT_DIR/index.html $REPORT_PLATFORM_DIR/index.html + $SYSGOPATH/bin/pprof-merge $DUMPS_DIR/*.perf.cpu.prof + mv merged.data $REPORT_ROOT_DIR/cpu.prof + $SYSGOPATH/bin/pprof-merge $DUMPS_DIR/*.perf.heap.prof + mv merged.data $REPORT_ROOT_DIR/heap.prof + #TODO some proxy for go tool pprof -http=127.0.0.1:8001 $DUMPS_DIR/cpu.prof + echo "Finished generating code coverage report for Go fuzz targets." +elif [[ $FUZZING_LANGUAGE == "python" ]]; then + # Extract source files from all dependency zip folders + mkdir -p /pythoncovmergedfiles/medio + PYCOVDIR=/pycovdir/ + mkdir $PYCOVDIR + for fuzzer in $FUZZ_TARGETS; do + fuzzer_deps=${fuzzer}.pkg.deps.zip + unzip $OUT/${fuzzer_deps} + rsync -r ./medio /pythoncovmergedfiles/medio + rm -rf ./medio + + # Translate paths in unzipped folders to paths that we can use + mv $OUT/.coverage_$fuzzer .coverage + python3 /usr/local/bin/python_coverage_runner_help.py translate /pythoncovmergedfiles/medio + cp .new_coverage $PYCOVDIR/.coverage_$fuzzer + cp .new_coverage $OUT/coverage_d_$fuzzer + done + + # Combine coverage + cd $PYCOVDIR + python3 /usr/local/bin/python_coverage_runner_help.py combine .coverage_* + python3 /usr/local/bin/python_coverage_runner_help.py html + # Produce all_cov file used by fuzz introspector. + python3 /usr/local/bin/python_coverage_runner_help.py json -o ${TEXTCOV_REPORT_DIR}/all_cov.json + + # Generate .json with similar format to llvm-cov output. + python3 /usr/local/bin/python_coverage_runner_help.py \ + convert-to-summary-json ${TEXTCOV_REPORT_DIR}/all_cov.json $SUMMARY_FILE + + # Copy coverage date out + cp htmlcov/status.json ${TEXTCOV_REPORT_DIR}/html_status.json + + mv htmlcov/* $REPORT_PLATFORM_DIR/ + mv .coverage_* $REPORT_PLATFORM_DIR/ +elif [[ $FUZZING_LANGUAGE == "jvm" ]]; then + + # From this point on the script does not tolerate any errors. + set -e + + # Merge .exec files from the individual targets. + jacoco_merged_exec=$DUMPS_DIR/jacoco.merged.exec + java -jar /opt/jacoco-cli.jar merge $DUMPS_DIR/*.exec \ + --destfile $jacoco_merged_exec + + # Prepare classes directory for jacoco process + classes_dir=$DUMPS_DIR/classes + mkdir $classes_dir + + # Only copy class files found in $OUT/$SRC to ensure they are + # lively compiled from the project, avoiding inclusion of + # dependency classes. This also includes the fuzzer classes. + find "$OUT/$SRC" -type f -name "*.class" | while read -r class_file; do + # Skip module-info.class + if [[ "$(basename "$class_file")" == "module-info.class" ]]; then + continue + fi + + # Use javap to extract the fully qualified name of the class and copy it to $classes_dir + fqn=$(javap -verbose "$class_file" 2>/dev/null | grep "this_class:" | grep -oP '(?<=// ).*') + if [ -n "$fqn" ]; then + mkdir -p $classes_dir/$(dirname $fqn) + cp $class_file $classes_dir/$fqn.class + fi + done + + # Heuristically determine source directories based on Maven structure. + # Always include the $SRC root as it likely contains the fuzzer sources. + sourcefiles_args=(--sourcefiles $OUT/$SRC) + source_dirs=$(find $OUT/$SRC -type d -name 'java') + for source_dir in $source_dirs; do + sourcefiles_args+=(--sourcefiles "$source_dir") + done + + # Generate HTML and XML reports. + xml_report=$REPORT_PLATFORM_DIR/index.xml + java -jar /opt/jacoco-cli.jar report $jacoco_merged_exec \ + --html $REPORT_PLATFORM_DIR \ + --xml $xml_report \ + --classfiles $classes_dir \ + "${sourcefiles_args[@]}" + + # Also serve the raw exec file and XML report, which can be useful for + # automated analysis. + cp $jacoco_merged_exec $REPORT_PLATFORM_DIR/jacoco.exec + cp $xml_report $REPORT_PLATFORM_DIR/jacoco.xml + cp $xml_report $TEXTCOV_REPORT_DIR/jacoco.xml + + # Write llvm-cov summary file. + jacoco_report_converter.py $xml_report $SUMMARY_FILE + + set +e +elif [[ $FUZZING_LANGUAGE == "javascript" ]]; then + + # From this point on the script does not tolerate any errors. + set -e + + json_report=$MERGED_COVERAGE_DIR/coverage.json + nyc merge $FUZZERS_COVERAGE_DUMPS_DIR $json_report + + nyc report -t $MERGED_COVERAGE_DIR --report-dir $REPORT_PLATFORM_DIR --reporter=html --reporter=json-summary + + nyc_json_summary_file=$REPORT_PLATFORM_DIR/coverage-summary.json + + # Write llvm-cov summary file. + nyc_report_converter.py $nyc_json_summary_file $SUMMARY_FILE + + set +e +else + + # From this point on the script does not tolerate any errors. + set -e + + # Merge all dumps from the individual targets. + rm -f $PROFILE_FILE + llvm-profdata merge -sparse $DUMPS_DIR/*.profdata -o $PROFILE_FILE + + # TODO(mmoroz): add script from Chromium for rendering directory view reports. + # The first path in $objects does not have -object= prefix (llvm-cov format). + shared_libraries=$(coverage_helper shared_libs -build-dir=$OUT -object=$objects) + objects="$objects $shared_libraries" + + generate_html $PROFILE_FILE "$shared_libraries" "$objects" "$REPORT_ROOT_DIR" + + # Per target reports. + for fuzz_target in $FUZZ_TARGETS; do + if [[ $fuzz_target == *"@"* ]]; then + profdata_path=$DUMPS_DIR/$fuzz_target.profdata + report_dir=$REPORT_BY_TARGET_ROOT_DIR/$fuzz_target + # Extract fuzztest binary name from fuzztest wrapper script. + fuzz_target=(${fuzz_target//@/ }[0]) + else + profdata_path=$DUMPS_DIR/$fuzz_target.profdata + report_dir=$REPORT_BY_TARGET_ROOT_DIR/$fuzz_target + fi + if [[ ! -f "$profdata_path" ]]; then + echo "WARNING: $fuzz_target has no profdata generated." + continue + fi + + generate_html $profdata_path "$shared_libraries" "$fuzz_target" "$report_dir" + done + + set +e +fi + +# Make sure report is readable. +chmod -R +r $REPORT_ROOT_DIR $REPORT_BY_TARGET_ROOT_DIR +find $REPORT_ROOT_DIR $REPORT_BY_TARGET_ROOT_DIR -type d -exec chmod +x {} + + +# HTTP_PORT is optional. +set +u +if [[ -n $HTTP_PORT ]]; then + # Serve the report locally. + echo "Serving the report on http://127.0.0.1:$HTTP_PORT/linux/index.html" + cd $REPORT_ROOT_DIR + python3 -m http.server $HTTP_PORT +fi diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage_helper b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage_helper new file mode 100644 index 0000000000000000000000000000000000000000..4d29ceac8f5d048a43a7ec6cc0524d7a23936781 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/coverage_helper @@ -0,0 +1,17 @@ +#!/bin/bash -u +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +python3 $CODE_COVERAGE_SRC/coverage_utils.py $@ diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/download_corpus b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/download_corpus new file mode 100644 index 0000000000000000000000000000000000000000..1b7ebe8a263b5c3410dcbbd5b8e861c5cf134d4a --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/download_corpus @@ -0,0 +1,30 @@ +#!/bin/bash -u +# Copyright 2018 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +if (( $# < 1 )); then + echo "Usage: $0 \"path_download_to url_download_from\" (can be repeated)" >&2 + exit 1 +fi + +for pair in "$@"; do + read path url <<< "$pair" + wget -q -O $path $url +done + +# Always exit with 0 as we do not track wget return codes and should not rely +# on the latest command execution. +exit 0 diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/generate_differential_cov_report.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/generate_differential_cov_report.py new file mode 100644 index 0000000000000000000000000000000000000000..3f9fc22230c71a5c7f01068a871d44942489aad0 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/generate_differential_cov_report.py @@ -0,0 +1,228 @@ +#!/usr/bin/env python3 +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Script for generating differential coverage reports. +generate_differential_cov_report.py \ + +""" +import os +import shutil +import subprocess +import sys + + +class ProfData: + """Class representing a profdata file.""" + + def __init__(self, text): + self.function_profs = [] + for function_prof in text.split('\n\n'): + if not function_prof: + continue + self.function_profs.append(FunctionProf(function_prof)) + + def to_string(self): + """Convert back to a string.""" + return '\n'.join( + [function_prof.to_string() for function_prof in self.function_profs]) + + def find_function(self, function, idx=None): + """Find the same function in this profdata.""" + if idx is not None: + try: + possibility = self.function_profs[idx] + if function.func_hash == possibility.func_hash: + return possibility + except IndexError: + pass + for function_prof in self.function_profs: + if function_prof.func_hash == function.func_hash: + return function_prof + return None + + def subtract(self, subtrahend): + """Subtract subtrahend from this profdata.""" + for idx, function_prof in enumerate(self.function_profs): + subtrahend_function_prof = subtrahend.find_function(function_prof, idx) + function_prof.subtract(subtrahend_function_prof) + + +class FunctionProf: + """Profile of a function.""" + FUNC_HASH_COMMENT_LINE = '# Func Hash:' + NUM_COUNTERS_COMMENT_LINE = '# Num Counters:' + COUNTER_VALUES_COMMENT_LINE = '# Counter Values:' + + def __init__(self, text): + print(text) + lines = text.splitlines() + self.function = lines[0] + assert self.FUNC_HASH_COMMENT_LINE == lines[1] + self.func_hash = lines[2] + assert self.NUM_COUNTERS_COMMENT_LINE == lines[3] + self.num_counters = int(lines[4]) + assert self.COUNTER_VALUES_COMMENT_LINE == lines[5] + self.counter_values = [1 if int(line) else 0 for line in lines[6:]] + + def to_string(self): + """Convert back to text.""" + lines = [ + self.function, + self.FUNC_HASH_COMMENT_LINE, + self.func_hash, + self.NUM_COUNTERS_COMMENT_LINE, + str(self.num_counters), + self.COUNTER_VALUES_COMMENT_LINE, + ] + [str(num) for num in self.counter_values] + return '\n'.join(lines) + + def subtract(self, subtrahend_prof): + """Subtract this other function from this function.""" + if not subtrahend_prof: + print(self.function, 'has no subtrahend') + # Nothing to subtract. + return + self.counter_values = [ + max(counter1 - counter2, 0) for counter1, counter2 in zip( + self.counter_values, subtrahend_prof.counter_values) + ] + + +def get_profdata_files(directory): + """Returns profdata files in |directory|.""" + profdatas = [] + for filename in os.listdir(directory): + filename = os.path.join(directory, filename) + if filename.endswith('.profdata'): + profdatas.append(filename) + return profdatas + + +def convert_profdata_to_text(profdata): + """Convert a profdata binary file to a profdata text file.""" + profdata_text = f'{profdata}.txt' + if os.path.exists(profdata_text): + os.remove(profdata_text) + command = [ + 'llvm-profdata', 'merge', '-j=1', '-sparse', profdata, '--text', '-o', + profdata_text + ] + print(command) + subprocess.run(command, check=True) + return profdata_text + + +def convert_text_profdata_to_bin(profdata_text): + """Convert a profdata text file to a profdata binary file.""" + profdata = profdata_text.replace('.txt', '').replace('.profdata', + '') + '.profdata' + print('bin profdata', profdata) + if os.path.exists(profdata): + os.remove(profdata) + command = [ + 'llvm-profdata', 'merge', '-j=1', '-sparse', profdata_text, '-o', profdata + ] + print(command) + subprocess.run(command, check=True) + return profdata + + +def get_difference(minuend_filename, subtrahend_filename): + """Subtract subtrahend_filename from minuend_filename.""" + with open(minuend_filename, 'r', encoding='utf-8') as minuend_file: + print('minuend', minuend_filename) + minuend = ProfData(minuend_file.read()) + with open(subtrahend_filename, 'r', encoding='utf-8') as subtrahend_file: + print('subtrahend', subtrahend_filename) + subtrahend = ProfData(subtrahend_file.read()) + + minuend.subtract(subtrahend) + return minuend + + +def profdatas_to_objects(profdatas): + """Get the corresponding objects for each profdata.""" + return [ + os.path.splitext(os.path.basename(profdata))[0] for profdata in profdatas + ] + + +def generate_differential_cov_reports(minuend_profdatas, subtrahend_profdatas, + difference_dir): + """Calculate the differences between all profdatas and generate differential + coverage reports.""" + profdata_objects = profdatas_to_objects(minuend_profdatas) + real_profdata_objects = [ + binobject for binobject in profdata_objects if binobject != 'merged' + ] + for minuend, subtrahend, binobject in zip(minuend_profdatas, + subtrahend_profdatas, + profdata_objects): + minuend_text = convert_profdata_to_text(minuend) + subtrahend_text = convert_profdata_to_text(subtrahend) + difference = get_difference(minuend_text, subtrahend_text) + basename = os.path.basename(minuend_text) + difference_text = os.path.join(difference_dir, basename) + with open(difference_text, 'w', encoding='utf-8') as file_handle: + file_handle.write(difference.to_string()) + difference_profdata = convert_text_profdata_to_bin(difference_text) + if not difference_profdata.endswith('merged.profdata'): + generate_html_report(difference_profdata, [binobject], + os.path.join(difference_dir, binobject)) + else: + generate_html_report(difference_profdata, real_profdata_objects, + os.path.join(difference_dir, 'merged')) + + +def generate_html_report(profdata, objects, directory): + """Generate an HTML coverage report.""" + # TODO(metzman): Deal with shared libs. + html_dir = os.path.join(directory, 'reports') + if os.path.exists(html_dir): + os.remove(html_dir) + os.makedirs(html_dir) + out_dir = os.getenv('OUT', '/out') + command = [ + 'llvm-cov', 'show', f'-path-equivalence=/,{out_dir}', '-format=html', + '-Xdemangler', 'rcfilt', f'-instr-profile={profdata}' + ] + + objects = [os.path.join(out_dir, binobject) for binobject in objects] + command += objects + ['-o', html_dir] + print(' '.join(command)) + subprocess.run(command, check=True) + + +def main(): + """Generate differential coverage reports.""" + if len(sys.argv) != 4: + print( + f'Usage: {sys.argv[0]} ') + minuend_dir = sys.argv[1] + subtrahend_dir = sys.argv[2] + difference_dir = sys.argv[3] + if os.path.exists(difference_dir): + shutil.rmtree(difference_dir) + os.makedirs(difference_dir, exist_ok=True) + minuend_profdatas = get_profdata_files(minuend_dir) + subtrahend_profdatas = get_profdata_files(subtrahend_dir) + generate_differential_cov_reports(minuend_profdatas, subtrahend_profdatas, + difference_dir) + + +if __name__ == '__main__': + main() diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_deps.sh b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_deps.sh new file mode 100644 index 0000000000000000000000000000000000000000..fc0569b339add75ce09c0772fbaa938a4aca0db1 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_deps.sh @@ -0,0 +1,37 @@ +#!/bin/bash -eux +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Install dependencies in a platform-aware way. + +apt-get update && apt-get install -y \ + binutils \ + file \ + ca-certificates \ + fonts-dejavu \ + git \ + libcap2 \ + rsync \ + unzip \ + wget \ + zip --no-install-recommends + +case $(uname -m) in + x86_64) + # We only need to worry about i386 if we are on x86_64. + apt-get install -y lib32gcc1 libc6-i386 + ;; +esac diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_go.sh b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_go.sh new file mode 100644 index 0000000000000000000000000000000000000000..4d6066b0f0a8c868d34099204b18c9ce140f09e1 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_go.sh @@ -0,0 +1,41 @@ +#!/bin/bash -eux +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Install go on x86_64, don't do anything on ARM. + +case $(uname -m) in + x86_64) + # Download and install Go 1.19. + wget -q https://storage.googleapis.com/golang/getgo/installer_linux -O $SRC/installer_linux + chmod +x $SRC/installer_linux + SHELL="bash" $SRC/installer_linux -version 1.19 + rm $SRC/installer_linux + # Set up Golang coverage modules. + printf $(find . -name gocoverage) + cd $GOPATH/gocoverage && /root/.go/bin/go install ./... + cd convertcorpus && /root/.go/bin/go install . + cd /root/.go/src/cmd/cover && /root/.go/bin/go build && mv cover $GOPATH/bin/gotoolcover + ;; + aarch64) + # Don't install go because installer is not provided. + echo "Not installing go: aarch64." + ;; + *) + echo "Error: unsupported architecture: $(uname -m)" + exit 1 + ;; +esac diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_java.sh b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_java.sh new file mode 100644 index 0000000000000000000000000000000000000000..1cf3855265031837b0d64058de3e7742d9383934 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_java.sh @@ -0,0 +1,46 @@ +#!/bin/bash -eux +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Install java in a platform-aware way. + +ARCHITECTURE= +case $(uname -m) in + x86_64) + ARCHITECTURE=x64 + ;; + aarch64) + ARCHITECTURE=aarch64 + ;; + *) + echo "Error: unsupported architecture: $(uname -m)" + exit 1 + ;; +esac + +wget -q https://download.java.net/java/GA/jdk17.0.2/dfd4a8d0985749f896bed50d7138ee7f/8/GPL/openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz -O /tmp/openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz +wget -q https://download.java.net/java/GA/jdk15.0.2/0d1cfde4252546c6931946de8db48ee2/7/GPL/openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz -O /tmp/openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz +cd /tmp +mkdir -p $JAVA_HOME +tar -xz --strip-components=1 -f openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz --directory $JAVA_HOME +rm -f openjdk-17.0.2_linux-"$ARCHITECTURE"_bin.tar.gz +rm -rf $JAVA_HOME/jmods $JAVA_HOME/lib/src.zip + +# Install OpenJDK 15 and trim its size by removing unused components. Some projects only run with Java 15. +mkdir -p $JAVA_15_HOME +tar -xz --strip-components=1 -f openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz --directory $JAVA_15_HOME +rm -f openjdk-15.0.2_linux-"$ARCHITECTURE"_bin.tar.gz +rm -rf $JAVA_15_HOME/jmods $JAVA_15_HOME/lib/src.zip diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_javascript.sh b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_javascript.sh new file mode 100644 index 0000000000000000000000000000000000000000..183cdb418569b281b83eb1130038ea47dd219020 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/install_javascript.sh @@ -0,0 +1,30 @@ +#!/bin/bash -eux +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +# see installation instructions: https://github.com/nodesource/distributions#available-architectures +apt-get update +apt-get install -y ca-certificates curl gnupg +mkdir -p /etc/apt/keyrings +curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg + +NODE_MAJOR=20 +echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list + +apt-get update +apt-get install nodejs -y + +# Install latest versions of nyc for source-based coverage reporting +npm install --global nyc diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/jacoco_report_converter.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/jacoco_report_converter.py new file mode 100644 index 0000000000000000000000000000000000000000..bf9d89faf3a10fb1e7c3e4f7eb49538567bb8a00 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/jacoco_report_converter.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python3 +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Helper script for creating an llvm-cov style JSON summary from a JaCoCo XML +report.""" +import json +import os +import sys +import xml.etree.ElementTree as ET + + +def convert(xml): + """Turns a JaCoCo XML report into an llvm-cov JSON summary.""" + summary = { + 'type': 'oss-fuzz.java.coverage.json.export', + 'version': '1.0.0', + 'data': [{ + 'totals': {}, + 'files': [], + }], + } + + report = ET.fromstring(xml) + totals = make_element_summary(report) + summary['data'][0]['totals'] = totals + + # Since Java compilation does not track source file location, we match + # coverage info to source files via the full class name, e.g. we search for + # a path in /out/src ending in foo/bar/Baz.java for the class foo.bar.Baz. + # Under the assumptions that a given project only ever contains a single + # version of a class and that no class name appears as a suffix of another + # class name, we can assign coverage info to every source file matched in that + # way. + src_files = list_src_files() + + for class_element in report.findall('./package/class'): + # Skip fuzzer classes + if is_fuzzer_class(class_element): + continue + + # Skip non class elements + if 'sourcefilename' not in class_element.attrib: + continue + + class_name = class_element.attrib['name'] + package_name = os.path.dirname(class_name) + basename = class_element.attrib['sourcefilename'] + # This path is 'foo/Bar.java' for the class element + # . + canonical_path = os.path.join(package_name, basename) + + class_summary = make_element_summary(class_element) + for src_file in relative_to_src_path(src_files, canonical_path): + summary['data'][0]['files'].append({ + 'filename': src_file, + 'summary': class_summary, + }) + + return json.dumps(summary) + + +def list_src_files(): + """Returns a map from basename to full path for all files in $OUT/$SRC.""" + filename_to_paths = {} + out_path = os.environ['OUT'] + '/' + src_path = os.environ['SRC'] + src_in_out = out_path + src_path + for dirpath, _, filenames in os.walk(src_in_out): + for filename in filenames: + full_path = dirpath + '/' + filename + # Map /out//src/... to /src/... + file_path = full_path[len(out_path):] + filename_to_paths.setdefault(filename, []).append(file_path) + return filename_to_paths + + +def is_fuzzer_class(class_element): + """Check if the class is fuzzer class.""" + method_elements = class_element.find('./method[@name=\"fuzzerTestOneInput\"]') + if method_elements: + return True + + return False + + +def relative_to_src_path(src_files, canonical_path): + """Returns all paths in src_files ending in canonical_path.""" + basename = os.path.basename(canonical_path) + if basename not in src_files: + return [] + candidate_paths = src_files[basename] + return [ + path for path in candidate_paths if path.endswith("/" + canonical_path) + ] + + +def make_element_summary(element): + """Returns a coverage summary for an element in the XML report.""" + summary = {} + + function_counter = element.find('./counter[@type=\'METHOD\']') + summary['functions'] = make_counter_summary(function_counter) + + line_counter = element.find('./counter[@type=\'LINE\']') + summary['lines'] = make_counter_summary(line_counter) + + # JaCoCo tracks branch coverage, which counts the covered control-flow edges + # between llvm-cov's regions instead of the covered regions themselves. For + # non-trivial code parts, the difference is usually negligible. However, if + # all methods of a class consist of a single region only (no branches), + # JaCoCo does not report any branch coverage even if there is instruction + # coverage. Since this would give incorrect results for CI Fuzz purposes, we + # increase the regions counter by 1 if there is any amount of instruction + # coverage. + instruction_counter = element.find('./counter[@type=\'INSTRUCTION\']') + has_some_coverage = instruction_counter is not None and int( + instruction_counter.attrib["covered"]) > 0 + branch_covered_adjustment = 1 if has_some_coverage else 0 + region_counter = element.find('./counter[@type=\'BRANCH\']') + summary['regions'] = make_counter_summary( + region_counter, covered_adjustment=branch_covered_adjustment) + + return summary + + +def make_counter_summary(counter_element, covered_adjustment=0): + """Turns a JaCoCo element into an llvm-cov totals entry.""" + summary = {} + covered = covered_adjustment + missed = 0 + if counter_element is not None: + covered += int(counter_element.attrib['covered']) + missed += int(counter_element.attrib['missed']) + summary['covered'] = covered + summary['notcovered'] = missed + summary['count'] = summary['covered'] + summary['notcovered'] + if summary['count'] != 0: + summary['percent'] = (100.0 * summary['covered']) / summary['count'] + else: + summary['percent'] = 0 + return summary + + +def main(): + """Produces an llvm-cov style JSON summary from a JaCoCo XML report.""" + if len(sys.argv) != 3: + sys.stderr.write('Usage: %s \n' % + sys.argv[0]) + return 1 + + with open(sys.argv[1], 'r') as xml_file: + xml_report = xml_file.read() + json_summary = convert(xml_report) + with open(sys.argv[2], 'w') as json_file: + json_file.write(json_summary) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/nyc_report_converter.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/nyc_report_converter.py new file mode 100644 index 0000000000000000000000000000000000000000..53044754c26b2a74a6b7afa7f5f22b52881767d1 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/nyc_report_converter.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Helper script for creating a llvm-cov style JSON summary from a nyc +JSON summary.""" +import json +import sys + + +def convert(nyc_json_summary): + """Turns a nyc JSON report into a llvm-cov JSON summary.""" + summary = { + 'type': + 'oss-fuzz.javascript.coverage.json.export', + 'version': + '1.0.0', + 'data': [{ + 'totals': + file_summary(nyc_json_summary['total']), + 'files': [{ + 'filename': src_file, + 'summary': file_summary(nyc_json_summary[src_file]) + } for src_file in nyc_json_summary if src_file != 'total'], + }], + } + + return json.dumps(summary) + + +def file_summary(nyc_file_summary): + """Returns a summary for a given file in the nyc JSON summary report.""" + return { + 'functions': element_summary(nyc_file_summary['functions']), + 'lines': element_summary(nyc_file_summary['lines']), + 'regions': element_summary(nyc_file_summary['branches']) + } + + +def element_summary(element): + """Returns a summary of a coverage element in the nyc JSON summary + of the file""" + return { + 'count': element['total'], + 'covered': element['covered'], + 'notcovered': element['total'] - element['covered'] - element['skipped'], + 'percent': element['pct'] if element['pct'] != 'Unknown' else 0 + } + + +def main(): + """Produces a llvm-cov style JSON summary from a nyc JSON summary.""" + if len(sys.argv) != 3: + sys.stderr.write('Usage: %s \n' % + sys.argv[0]) + return 1 + + with open(sys.argv[1], 'r') as nyc_json_summary_file: + nyc_json_summary = json.load(nyc_json_summary_file) + json_summary = convert(nyc_json_summary) + with open(sys.argv[2], 'w') as json_output_file: + json_output_file.write(json_summary) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/parse_options.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/parse_options.py new file mode 100644 index 0000000000000000000000000000000000000000..6612a309587f82269f9af5f00d178940d6932e06 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/parse_options.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python3 +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Helper script for parsing custom fuzzing options.""" +import configparser +import sys + + +def parse_options(options_file_path, options_section): + """Parses the given file and returns options from the given section.""" + parser = configparser.ConfigParser() + parser.read(options_file_path) + + if not parser.has_section(options_section): + return None + + options = parser[options_section] + + if options_section == 'libfuzzer': + options_string = ' '.join( + '-%s=%s' % (key, value) for key, value in options.items()) + else: + # Sanitizer options. + options_string = ':'.join( + '%s=%s' % (key, value) for key, value in options.items()) + + return options_string + + +def main(): + """Processes the arguments and prints the options in the correct format.""" + if len(sys.argv) < 3: + sys.stderr.write('Usage: %s \n' % + sys.argv[0]) + return 1 + + options = parse_options(sys.argv[1], sys.argv[2]) + if options is not None: + print(options) + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/profraw_update.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/profraw_update.py new file mode 100644 index 0000000000000000000000000000000000000000..d1ce1fd86396ed02a03fbeb6a54de1d595c2855b --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/profraw_update.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python3 +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Helper script for upgrading a profraw file to latest version.""" + +from collections import namedtuple +import struct +import subprocess +import sys + +HeaderGeneric = namedtuple('HeaderGeneric', 'magic version') +HeaderVersion9 = namedtuple( + 'HeaderVersion9', + 'BinaryIdsSize DataSize PaddingBytesBeforeCounters CountersSize \ + PaddingBytesAfterCounters NumBitmapBytes PaddingBytesAfterBitmapBytes NamesSize CountersDelta BitmapDelta NamesDelta ValueKindLast' +) + +PROFRAW_MAGIC = 0xff6c70726f667281 + + +def relativize_address(data, offset, databegin, sect_prf_cnts, sect_prf_data): + """Turns an absolute offset into a relative one.""" + value = struct.unpack('Q', data[offset:offset + 8])[0] + if sect_prf_cnts <= value < sect_prf_data: + # If the value is an address in the right section, make it relative. + value = (value - databegin) & 0xffffffffffffffff + value = struct.pack('Q', value) + for i in range(8): + data[offset + i] = value[i] + # address was made relative + return True + # no changes done + return False + + +def upgrade(data, sect_prf_cnts, sect_prf_data): + """Upgrades profraw data, knowing the sections addresses.""" + generic_header = HeaderGeneric._make(struct.unpack('QQ', data[:16])) + if generic_header.magic != PROFRAW_MAGIC: + raise Exception('Bad magic.') + base_version = generic_header.version + + if base_version >= 9: + # Nothing to do. + return data + if base_version < 5 or base_version == 6: + raise Exception('Unhandled version.') + + if generic_header.version == 5: + generic_header = generic_header._replace(version=7) + # Upgrade from version 5 to 7 by adding binaryids field. + data = data[:8] + struct.pack('Q', generic_header.version) + struct.pack( + 'Q', 0) + data[16:] + if generic_header.version == 7: + # cf https://reviews.llvm.org/D111123 + generic_header = generic_header._replace(version=8) + data = data[:8] + struct.pack('Q', generic_header.version) + data[16:] + if generic_header.version == 8: + # see https://reviews.llvm.org/D138846 + generic_header = generic_header._replace(version=9) + # Upgrade from version 8 to 9 by adding NumBitmapBytes, PaddingBytesAfterBitmapBytes and BitmapDelta fields. + data = data[:8] + struct.pack( + 'Q', generic_header.version) + data[16:56] + struct.pack( + 'QQ', 0, 0) + data[56:72] + struct.pack('Q', 0) + data[72:] + + v9_header = HeaderVersion9._make(struct.unpack('QQQQQQQQQQQQ', data[16:112])) + + if base_version <= 8 and v9_header.BinaryIdsSize % 8 != 0: + # Adds padding for binary ids. + # cf commit b9f547e8e51182d32f1912f97a3e53f4899ea6be + # cf https://reviews.llvm.org/D110365 + padlen = 8 - (v9_header.BinaryIdsSize % 8) + v7_header = v9_header._replace(BinaryIdsSize=v9_header.BinaryIdsSize + + padlen) + data = data[:16] + struct.pack('Q', v9_header.BinaryIdsSize) + data[24:] + data = data[:112 + v9_header.BinaryIdsSize] + bytes( + padlen) + data[112 + v9_header.BinaryIdsSize:] + + if base_version <= 8: + offset = 112 + v9_header.BinaryIdsSize + for d in range(v9_header.DataSize): + # Add BitmapPtr and aligned u32(NumBitmapBytes) + data = data[:offset + 3 * 8] + struct.pack( + 'Q', 0) + data[offset + 3 * 8:offset + 6 * 8] + struct.pack( + 'Q', 0) + data[offset + 6 * 8:] + value = struct.unpack('Q', + data[offset + 2 * 8:offset + 3 * 8])[0] - 16 * d + data = data[:offset + 2 * 8] + struct.pack('Q', + value) + data[offset + 3 * 8:] + offset += 8 * 8 + + if base_version >= 8: + # Nothing more to do. + return data + + # Last changes are relaed to bump from 7 to version 8 making CountersPtr relative. + dataref = sect_prf_data + # 80 is offset of CountersDelta. + if not relativize_address(data, 80, dataref, sect_prf_cnts, sect_prf_data): + return data + + offset = 112 + v9_header.BinaryIdsSize + # This also works for C+Rust binaries compiled with + # clang-14/rust-nightly-clang-13. + for _ in range(v9_header.DataSize): + # 16 is the offset of CounterPtr in ProfrawData structure. + relativize_address(data, offset + 16, dataref, sect_prf_cnts, sect_prf_data) + # We need this because of CountersDelta -= sizeof(*SrcData); + # seen in __llvm_profile_merge_from_buffer. + dataref += 44 + 2 * (v9_header.ValueKindLast + 1) + if was8: + #profraw9 added RelativeBitmapPtr and NumBitmapBytes (8+4 rounded up to 16) + dataref -= 16 + # This is the size of one ProfrawData structure. + offset += 44 + 2 * (v9_header.ValueKindLast + 1) + + return data + + +def main(): + """Helper script for upgrading a profraw file to latest version.""" + if len(sys.argv) < 3: + sys.stderr.write('Usage: %s options? ...\n' % sys.argv[0]) + return 1 + + # First find llvm profile sections addresses in the elf, quick and dirty. + process = subprocess.Popen(['readelf', '-S', sys.argv[1]], + stdout=subprocess.PIPE) + output, err = process.communicate() + if err: + print('readelf failed') + return 2 + for line in iter(output.split(b'\n')): + if b'__llvm_prf_cnts' in line: + sect_prf_cnts = int(line.split()[3], 16) + elif b'__llvm_prf_data' in line: + sect_prf_data = int(line.split()[3], 16) + + out_name = "default.profup" + in_place = False + start = 2 + if sys.argv[2] == "-i": + in_place = True + start = start + 1 + elif sys.argv[2] == "-o": + out_name = sys.argv[3] + start = 4 + + if len(sys.argv) < start: + sys.stderr.write('Usage: %s options ...\n' % sys.argv[0]) + return 1 + + for i in range(start, len(sys.argv)): + # Then open and read the input profraw file. + with open(sys.argv[i], 'rb') as input_file: + profraw_base = bytearray(input_file.read()) + # Do the upgrade, returning a bytes object. + profraw_latest = upgrade(profraw_base, sect_prf_cnts, sect_prf_data) + # Write the output to the file given to the command line. + if in_place: + out_name = sys.argv[i] + with open(out_name, 'wb') as output_file: + output_file.write(profraw_latest) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/python_coverage_runner_help.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/python_coverage_runner_help.py new file mode 100644 index 0000000000000000000000000000000000000000..5d5f627128455ed4d5d70c166ef8c7fe218ace99 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/python_coverage_runner_help.py @@ -0,0 +1,181 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Helper to manage coverage.py related operations. Does two main +things: (1) pass commands into the coverage.py library and (2) +translate .coverage created from a pyinstaller executable into +paths that match local files. This is needed for html report creation. +""" +import os +import re +import json +import sys +from coverage.cmdline import main as coverage_main +from coverage.data import CoverageData + + +def should_exclude_file(filepath): + """Returns whether the path should be excluded from the coverage report.""" + # Skip all atheris code + if "atheris" in filepath: + return True + + # Filter out all standard python libraries + if ('/usr/local/lib/python' in filepath and + 'site-packages' not in filepath and 'dist-packages' not in filepath): + return True + + # Avoid all PyInstaller modules. + if 'PyInstaller' in filepath: + return True + + return False + + +def translate_lines(cov_data, new_cov_data, all_file_paths): + """ + Translate lines in a .coverage file created by coverage.py such that + the file paths points to local files instead. This is needed when collecting + coverage from executables created by pyinstaller. + """ + for pyinstaller_file_path in cov_data.measured_files(): + stripped_py_file_path = pyinstaller_file_path + if stripped_py_file_path.startswith('/tmp/_MEI'): + stripped_py_file_path = '/'.join(stripped_py_file_path.split('/')[3:]) + if stripped_py_file_path.startswith('/out/'): + stripped_py_file_path = stripped_py_file_path.replace('/out/', '') + + # Check if this file exists in our file paths: + for local_file_path in all_file_paths: + if should_exclude_file(local_file_path): + continue + if local_file_path.endswith(stripped_py_file_path): + print('Found matching: %s' % (local_file_path)) + new_cov_data.add_lines( + {local_file_path: cov_data.lines(pyinstaller_file_path)}) + + +def translate_coverage(all_file_paths): + """ + Translate pyinstaller-generated file paths in .coverage (produced by + coverage.py) into local file paths. Place result in .new_coverage. + """ + covdata_pre_translation = CoverageData('.coverage') + covdata_post_translation = CoverageData('.new_coverage') + + covdata_pre_translation.read() + translate_lines(covdata_pre_translation, covdata_post_translation, + all_file_paths) + covdata_post_translation.write() + + +def convert_coveragepy_cov_to_summary_json(src, dst): + """ + Converts a json file produced by coveragepy into a summary.json file + similary to llvm-cov output. `src` is the source coveragepy json file, + `dst` is the destination json file, which will be overwritten. + """ + dst_dict = {'data': [{'files': {}}]} + lines_covered = 0 + lines_count = 0 + with open(src, "r") as src_f: + src_json = json.loads(src_f.read()) + if 'files' in src_json: + for elem in src_json.get('files'): + if 'summary' not in src_json['files'][elem]: + continue + src_dict = src_json['files'][elem]['summary'] + count = src_dict['covered_lines'] + src_dict['missing_lines'] + covered = src_dict['covered_lines'] + notcovered = src_dict['missing_lines'] + percent = src_dict['percent_covered'] + + # Accumulate line coverage + lines_covered += covered + lines_count += count + + dst_dict['data'][0]['files'][elem] = { + 'summary': { + 'lines': { + 'count': count, + 'covered': covered, + 'notcovered': notcovered, + 'percent': percent + } + } + } + if lines_count > 0: + lines_covered_percent = lines_covered / lines_count + else: + lines_covered_percent = 0.0 + dst_dict['data'][0]['totals'] = { + 'branches': { + 'count': 0, + 'covered': 0, + 'notcovered': 0, + 'percent': 0.0 + }, + 'functions': { + 'count': 0, + 'covered': 0, + 'percent': 0.0 + }, + 'instantiations': { + 'count': 0, + 'covered': 0, + 'percent': 0.0 + }, + 'lines': { + 'count': lines_count, + 'covered': lines_covered, + 'percent': lines_covered_percent + }, + 'regions': { + 'count': 0, + 'covered': 0, + 'notcovered': 0, + 'percent': 0.0 + } + } + + with open(dst, 'w') as dst_f: + dst_f.write(json.dumps(dst_dict)) + + +def main(): + """ + Main handler. + """ + if sys.argv[1] == 'translate': + print('Translating the coverage') + files_path = sys.argv[2] + all_file_paths = list() + for root, _, files in os.walk(files_path): + for relative_file_path in files: + abs_file_path = os.path.abspath(os.path.join(root, relative_file_path)) + all_file_paths.append(abs_file_path) + print('Done with path walk') + translate_coverage(all_file_paths) + elif sys.argv[1] == 'convert-to-summary-json': + src = sys.argv[2] + dst = sys.argv[3] + convert_coveragepy_cov_to_summary_json(src, dst) + else: + # Pass commands into coverage package + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(coverage_main()) + + +if __name__ == '__main__': + main() diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/rcfilt b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/rcfilt new file mode 100644 index 0000000000000000000000000000000000000000..1c621100c3be0ae4f50d0e4cf817120b8f169b5a --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/rcfilt @@ -0,0 +1,21 @@ +#!/bin/bash -u +# Copyright 2020 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Symbol demangling for both C++ and Rust +# +################################################################################ + +# simply pipe +rustfilt | c++filt -n diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/reproduce b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/reproduce new file mode 100644 index 0000000000000000000000000000000000000000..2c074d05e51bd0068f5ed4795159cc69eda0426f --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/reproduce @@ -0,0 +1,34 @@ +#!/bin/bash -eux +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +FUZZER=$1 +shift + +if [ ! -v TESTCASE ]; then + TESTCASE="/testcase" +fi + +if [ ! -f $TESTCASE ]; then + echo "Error: $TESTCASE not found, use: docker run -v :$TESTCASE ..." + exit 1 +fi + +export RUN_FUZZER_MODE="interactive" +export FUZZING_ENGINE="libfuzzer" +export SKIP_SEED_CORPUS="1" + +run_fuzzer $FUZZER $@ $TESTCASE diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/run_fuzzer b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/run_fuzzer new file mode 100644 index 0000000000000000000000000000000000000000..6e55917cb62bf43b524eed257f585f54154c3362 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/run_fuzzer @@ -0,0 +1,228 @@ +#!/bin/bash -eu +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +# Fuzzer runner. Appends .options arguments and seed corpus to users args. +# Usage: $0 + +sysctl -w vm.mmap_rnd_bits=28 + +export PATH=$OUT:$PATH +cd $OUT + +DEBUGGER=${DEBUGGER:-} + +FUZZER=$1 +shift + +# This env var is set by CIFuzz. CIFuzz fills this directory with the corpus +# from ClusterFuzz. +CORPUS_DIR=${CORPUS_DIR:-} +if [ -z "$CORPUS_DIR" ] +then + CORPUS_DIR="/tmp/${FUZZER}_corpus" + rm -rf $CORPUS_DIR && mkdir -p $CORPUS_DIR +fi + +SANITIZER=${SANITIZER:-} +if [ -z $SANITIZER ]; then + # If $SANITIZER is not specified (e.g. calling from `reproduce` command), it + # is not important and can be set to any value. + SANITIZER="default" +fi + +if [[ "$RUN_FUZZER_MODE" = interactive ]]; then + FUZZER_OUT="$OUT/${FUZZER}_${FUZZING_ENGINE}_${SANITIZER}_out" +else + FUZZER_OUT="/tmp/${FUZZER}_${FUZZING_ENGINE}_${SANITIZER}_out" +fi + +function get_dictionary() { + local options_file="$FUZZER.options" + local dict_file="$FUZZER.dict" + local dict="" + if [[ -f "$options_file" ]]; then + dict=$(sed -n 's/^\s*dict\s*=\s*\(.*\)/\1/p' "$options_file" | tail -1) + fi + if [[ -z "$dict" && -f "$dict_file" ]]; then + dict="$dict_file" + fi + [[ -z "$dict" ]] && return + if [[ "$FUZZING_ENGINE" = "libfuzzer" ]]; then + printf -- "-dict=%s" "$dict" + elif [[ "$FUZZING_ENGINE" = "afl" ]]; then + printf -- "-x %s" "$dict" + elif [[ "$FUZZING_ENGINE" = "honggfuzz" ]]; then + printf -- "--dict %s" "$dict" + elif [[ "$FUZZING_ENGINE" = "centipede" ]]; then + printf -- "--dictionary %s" "$dict" + else + printf "Unexpected FUZZING_ENGINE: $FUZZING_ENGINE, ignoring\n" >&2 + fi +} + +function get_extra_binaries() { + [[ "$FUZZING_ENGINE" != "centipede" ]] && return + + extra_binaries="$OUT/__centipede_${SANITIZER}/${FUZZER}" + if compgen -G "$extra_binaries" >> /dev/null; then + printf -- "--extra_binaries %s" \""$extra_binaries\"" + fi + +} + +rm -rf $FUZZER_OUT && mkdir -p $FUZZER_OUT + +SEED_CORPUS="${FUZZER}_seed_corpus.zip" + +# TODO: Investigate why this code block is skipped +# by all default fuzzers in bad_build_check. +# They all set SKIP_SEED_CORPUS=1. +if [ -f $SEED_CORPUS ] && [ -z ${SKIP_SEED_CORPUS:-} ]; then + echo "Using seed corpus: $SEED_CORPUS" + unzip -o -d ${CORPUS_DIR}/ $SEED_CORPUS > /dev/null +fi + +OPTIONS_FILE="${FUZZER}.options" +CUSTOM_LIBFUZZER_OPTIONS="" + +if [ -f $OPTIONS_FILE ]; then + custom_asan_options=$(parse_options.py $OPTIONS_FILE asan) + if [ ! -z $custom_asan_options ]; then + export ASAN_OPTIONS="$ASAN_OPTIONS:$custom_asan_options" + fi + + custom_msan_options=$(parse_options.py $OPTIONS_FILE msan) + if [ ! -z $custom_msan_options ]; then + export MSAN_OPTIONS="$MSAN_OPTIONS:$custom_msan_options" + fi + + custom_ubsan_options=$(parse_options.py $OPTIONS_FILE ubsan) + if [ ! -z $custom_ubsan_options ]; then + export UBSAN_OPTIONS="$UBSAN_OPTIONS:$custom_ubsan_options" + fi + + CUSTOM_LIBFUZZER_OPTIONS=$(parse_options.py $OPTIONS_FILE libfuzzer) +fi + +if [[ "$FUZZING_ENGINE" = afl ]]; then + + # Set afl++ environment options. + export ASAN_OPTIONS="$ASAN_OPTIONS:abort_on_error=1:symbolize=0:detect_odr_violation=0:" + export MSAN_OPTIONS="$MSAN_OPTIONS:exit_code=86:symbolize=0" + export UBSAN_OPTIONS="$UBSAN_OPTIONS:symbolize=0" + export AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES=1 + export AFL_SKIP_CPUFREQ=1 + export AFL_TRY_AFFINITY=1 + export AFL_FAST_CAL=1 + export AFL_CMPLOG_ONLY_NEW=1 + export AFL_FORKSRV_INIT_TMOUT=30000 + export AFL_IGNORE_PROBLEMS=1 + export AFL_IGNORE_UNKNOWN_ENVS=1 + + # If $OUT/afl_cmplog.txt is present this means the target was compiled for + # CMPLOG. So we have to add the proper parameters to afl-fuzz. + test -e "$OUT/afl_cmplog.txt" && AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -c $OUT/$FUZZER" + + # If $OUT/afl++.dict we load it as a dictionary for afl-fuzz. + test -e "$OUT/afl++.dict" && AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -x $OUT/afl++.dict" + + # Ensure timeout is a bit larger than 1sec as some of the OSS-Fuzz fuzzers + # are slower than this. + AFL_FUZZER_ARGS="$AFL_FUZZER_ARGS -t 5000+" + + # AFL expects at least 1 file in the input dir. + echo input > ${CORPUS_DIR}/input + + CMD_LINE="$OUT/afl-fuzz $AFL_FUZZER_ARGS -i $CORPUS_DIR -o $FUZZER_OUT $(get_dictionary) $* -- $OUT/$FUZZER" + + echo afl++ setup: + env|grep AFL_ + cat "$OUT/afl_options.txt" + +elif [[ "$FUZZING_ENGINE" = honggfuzz ]]; then + + # Honggfuzz expects at least 1 file in the input dir. + echo input > $CORPUS_DIR/input + # --exit_upon_crash: exit whith a first crash seen + # -V: verify crashes + # -R (report): save report file to this location + # -W (working dir): where the crashes go + # -v (verbose): don't use VTE UI, just stderr + # -z: use software-instrumentation of clang (trace-pc-guard....) + # -P: use persistent mode of fuzzing (i.e. LLVMFuzzerTestOneInput) + # -f: location of the initial (and destination) file corpus + # -n: number of fuzzing threads (and processes) + CMD_LINE="$OUT/honggfuzz -n 1 --exit_upon_crash -V -R /tmp/${FUZZER}_honggfuzz.report -W $FUZZER_OUT -v -z -P -f \"$CORPUS_DIR\" $(get_dictionary) $* -- \"$OUT/$FUZZER\"" + + if [[ $(LC_ALL=C grep -P "\x01_LIBHFUZZ_NETDRIVER_BINARY_SIGNATURE_\x02\xFF" "$FUZZER" ) ]]; then + # Honggfuzz Netdriver port. This must match the port in Clusterfuzz. + export HFND_TCP_PORT=8666 + fi +elif [[ "$FUZZING_ENGINE" = centipede ]]; then + + # Create the work and corpus directory for Centipede. + CENTIPEDE_WORKDIR="${CENTIPEDE_WORKDIR:-$OUT}" + + # Centipede only saves crashes to crashes/ in workdir. + rm -rf $FUZZER_OUT + + # --workdir: Dir that stores corpus&features in Centipede's own format. + # --corpus_dir: Location of the initial (and destination) file corpus. + # --fork_server: Execute the target(s) via the fork server. + # Run in fork mode to continue fuzzing indefinitely in case of + # OOMs, timeouts, and crashes. + # --exit_on_crash=0: Do not exit on crash. + # --timeout=1200: The process that executes target binary will abort + # if an input runs >= 1200s. + # --rss_limit_mb=0: Disable RSS limit. + # --address_space_limit_mb=0: No address space limit. + # --binary: The target binary under test without sanitizer. + # --extra_binary: The target binaries under test with sanitizers. + CMD_LINE="$OUT/centipede --workdir=$CENTIPEDE_WORKDIR --corpus_dir=\"$CORPUS_DIR\" --fork_server=1 --exit_on_crash=1 --timeout=1200 --rss_limit_mb=4096 --address_space_limit_mb=5120 $(get_dictionary) --binary=\"$OUT/${FUZZER}\" $(get_extra_binaries) $*" +else + + CMD_LINE="$OUT/$FUZZER $FUZZER_ARGS $*" + + if [ -z ${SKIP_SEED_CORPUS:-} ]; then + CMD_LINE="$CMD_LINE $CORPUS_DIR" + fi + + if [[ ! -z ${CUSTOM_LIBFUZZER_OPTIONS} ]]; then + CMD_LINE="$CMD_LINE $CUSTOM_LIBFUZZER_OPTIONS" + fi + + if [[ ! "$CMD_LINE" =~ "-dict=" ]]; then + if [ -f "$FUZZER.dict" ]; then + CMD_LINE="$CMD_LINE -dict=$FUZZER.dict" + fi + fi + + CMD_LINE="$CMD_LINE < /dev/null" + +fi + +echo $CMD_LINE + +# Unset OUT so the fuzz target can't rely on it. +unset OUT + +if [ ! -z "$DEBUGGER" ]; then + CMD_LINE="$DEBUGGER $CMD_LINE" +fi + +bash -c "$CMD_LINE" + diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/ruzzy b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/ruzzy new file mode 100644 index 0000000000000000000000000000000000000000..663e8f5b9c4288cde4e1c422343ca595e4afaab6 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/ruzzy @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +ASAN_OPTIONS="allocator_may_return_null=1:detect_leaks=0:use_sigaltstack=0" LD_PRELOAD=$(ruby -e 'require "ruzzy"; print Ruzzy::ASAN_PATH') \ + ruby $@ diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/targets_list b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/targets_list new file mode 100644 index 0000000000000000000000000000000000000000..95615c81118c7991cea1c3e800ea9b5c304040ed --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/targets_list @@ -0,0 +1,10 @@ +#!/bin/bash + +for binary in $(find $OUT/ -executable -type f); do + [[ "$binary" != *.so ]] || continue + [[ $(basename "$binary") != jazzer_driver* ]] || continue + file "$binary" | grep -e ELF -e "shell script" > /dev/null 2>&1 || continue + grep "LLVMFuzzerTestOneInput" "$binary" > /dev/null 2>&1 || continue + + basename "$binary" +done diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all.py new file mode 100644 index 0000000000000000000000000000000000000000..428e49ac0b6d7179dcab5003e6104e7a88c98906 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all.py @@ -0,0 +1,295 @@ +#!/usr/bin/env python3 +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Does bad_build_check on all fuzz targets in $OUT.""" + +import contextlib +import multiprocessing +import os +import re +import subprocess +import stat +import sys +import tempfile + +BASE_TMP_FUZZER_DIR = '/tmp/not-out' + +EXECUTABLE = stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH + +IGNORED_TARGETS = [ + r'do_stuff_fuzzer', r'checksum_fuzzer', r'fuzz_dump', r'fuzz_keyring', + r'xmltest', r'fuzz_compression_sas_rle', r'ares_*_fuzzer' +] + +IGNORED_TARGETS_RE = re.compile('^' + r'$|^'.join(IGNORED_TARGETS) + '$') + + +def move_directory_contents(src_directory, dst_directory): + """Moves contents of |src_directory| to |dst_directory|.""" + # Use mv because mv preserves file permissions. If we don't preserve file + # permissions that can mess up CheckFuzzerBuildTest in cifuzz_test.py and + # other cases where one is calling test_all on files not in OSS-Fuzz's real + # out directory. + src_contents = [ + os.path.join(src_directory, filename) + for filename in os.listdir(src_directory) + ] + command = ['mv'] + src_contents + [dst_directory] + subprocess.check_call(command) + + +def is_elf(filepath): + """Returns True if |filepath| is an ELF file.""" + result = subprocess.run(['file', filepath], + stdout=subprocess.PIPE, + check=False) + return b'ELF' in result.stdout + + +def is_shell_script(filepath): + """Returns True if |filepath| is a shell script.""" + result = subprocess.run(['file', filepath], + stdout=subprocess.PIPE, + check=False) + return b'shell script' in result.stdout + + +def find_fuzz_targets(directory): + """Returns paths to fuzz targets in |directory|.""" + # TODO(https://github.com/google/oss-fuzz/issues/4585): Use libClusterFuzz for + # this. + fuzz_targets = [] + for filename in os.listdir(directory): + path = os.path.join(directory, filename) + if filename == 'llvm-symbolizer': + continue + if filename.startswith('afl-'): + continue + if filename.startswith('jazzer_'): + continue + if not os.path.isfile(path): + continue + if not os.stat(path).st_mode & EXECUTABLE: + continue + # Fuzz targets can either be ELF binaries or shell scripts (e.g. wrapper + # scripts for Python and JVM targets or rules_fuzzing builds with runfiles + # trees). + if not is_elf(path) and not is_shell_script(path): + continue + if os.getenv('FUZZING_ENGINE') not in {'none', 'wycheproof'}: + with open(path, 'rb') as file_handle: + binary_contents = file_handle.read() + if b'LLVMFuzzerTestOneInput' not in binary_contents: + continue + fuzz_targets.append(path) + return fuzz_targets + + +def do_bad_build_check(fuzz_target): + """Runs bad_build_check on |fuzz_target|. Returns a + Subprocess.ProcessResult.""" + print('INFO: performing bad build checks for', fuzz_target) + if centipede_needs_auxiliaries(): + print('INFO: Finding Centipede\'s auxiliary for target', fuzz_target) + auxiliary_path = find_centipede_auxiliary(fuzz_target) + print('INFO: Using auxiliary binary:', auxiliary_path) + auxiliary = [auxiliary_path] + else: + auxiliary = [] + + command = ['bad_build_check', fuzz_target] + auxiliary + with tempfile.TemporaryDirectory() as temp_centipede_workdir: + # Do this so that centipede doesn't fill up the disk during bad build check + env = os.environ.copy() + env['CENTIPEDE_WORKDIR'] = temp_centipede_workdir + return subprocess.run(command, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + env=env, + check=False) + + +def get_broken_fuzz_targets(bad_build_results, fuzz_targets): + """Returns a list of broken fuzz targets and their process results in + |fuzz_targets| where each item in |bad_build_results| is the result of + bad_build_check on the corresponding element in |fuzz_targets|.""" + broken = [] + for result, fuzz_target in zip(bad_build_results, fuzz_targets): + if result.returncode != 0: + broken.append((fuzz_target, result)) + return broken + + +def has_ignored_targets(out_dir): + """Returns True if |out_dir| has any fuzz targets we are supposed to ignore + bad build checks of.""" + out_files = set(os.listdir(out_dir)) + for filename in out_files: + if re.match(IGNORED_TARGETS_RE, filename): + return True + return False + + +@contextlib.contextmanager +def use_different_out_dir(): + """Context manager that moves OUT to subdirectory of BASE_TMP_FUZZER_DIR. This + is useful for catching hardcoding. Note that this sets the environment + variable OUT and therefore must be run before multiprocessing.Pool is created. + Resets OUT at the end.""" + # Use a fake OUT directory to catch path hardcoding that breaks on + # ClusterFuzz. + initial_out = os.getenv('OUT') + os.makedirs(BASE_TMP_FUZZER_DIR, exist_ok=True) + # Use a random subdirectory of BASE_TMP_FUZZER_DIR to allow running multiple + # instances of test_all in parallel (useful for integration testing). + with tempfile.TemporaryDirectory(dir=BASE_TMP_FUZZER_DIR) as out: + # Set this so that run_fuzzer which is called by bad_build_check works + # properly. + os.environ['OUT'] = out + # We move the contents of the directory because we can't move the + # directory itself because it is a mount. + move_directory_contents(initial_out, out) + try: + yield out + finally: + move_directory_contents(out, initial_out) + os.environ['OUT'] = initial_out + + +def test_all_outside_out(allowed_broken_targets_percentage): + """Wrapper around test_all that changes OUT and returns the result.""" + with use_different_out_dir() as out: + return test_all(out, allowed_broken_targets_percentage) + + +def centipede_needs_auxiliaries(): + """Checks if auxiliaries are needed for Centipede.""" + # Centipede always requires unsanitized binaries as the main fuzz targets, + # and separate sanitized binaries as auxiliaries. + # 1. Building sanitized binaries with helper.py (i.e., local or GitHub CI): + # Unsanitized ones will be built automatically into the same docker container. + # Script bad_build_check tests both + # a) If main fuzz targets can run with the auxiliaries, and + # b) If the auxiliaries are built with the correct sanitizers. + # 2. In Trial build and production build: + # Two kinds of binaries will be in separated buckets / docker containers. + # Script bad_build_check tests either + # a) If the unsanitized binaries can run without the sanitized ones, or + # b) If the sanitized binaries are built with the correct sanitizers. + return (os.getenv('FUZZING_ENGINE') == 'centipede' and + os.getenv('SANITIZER') != 'none' and os.getenv('HELPER') == 'True') + + +def find_centipede_auxiliary(main_fuzz_target_path): + """Finds the sanitized binary path that corresponds to |main_fuzz_target| for + bad_build_check.""" + target_dir, target_name = os.path.split(main_fuzz_target_path) + sanitized_binary_dir = os.path.join(target_dir, + f'__centipede_{os.getenv("SANITIZER")}') + sanitized_binary_path = os.path.join(sanitized_binary_dir, target_name) + + if os.path.isfile(sanitized_binary_path): + return sanitized_binary_path + + # Neither of the following two should ever happen, returns None to indicate + # an error. + if os.path.isdir(sanitized_binary_dir): + print('ERROR: Unable to identify Centipede\'s sanitized target' + f'{sanitized_binary_path} in {os.listdir(sanitized_binary_dir)}') + else: + print('ERROR: Unable to identify Centipede\'s sanitized target directory' + f'{sanitized_binary_dir} in {os.listdir(target_dir)}') + return None + + +def test_all(out, allowed_broken_targets_percentage): # pylint: disable=too-many-return-statements + """Do bad_build_check on all fuzz targets.""" + # TODO(metzman): Refactor so that we can convert test_one to python. + fuzz_targets = find_fuzz_targets(out) + if not fuzz_targets: + print('ERROR: No fuzz targets found.') + return False + + if centipede_needs_auxiliaries(): + for fuzz_target in fuzz_targets: + if not find_centipede_auxiliary(fuzz_target): + print(f'ERROR: Couldn\'t find auxiliary for {fuzz_target}.') + return False + + pool = multiprocessing.Pool() + bad_build_results = pool.map(do_bad_build_check, fuzz_targets) + pool.close() + pool.join() + broken_targets = get_broken_fuzz_targets(bad_build_results, fuzz_targets) + broken_targets_count = len(broken_targets) + if not broken_targets_count: + return True + + print('Retrying failed fuzz targets sequentially', broken_targets_count) + pool = multiprocessing.Pool(1) + retry_targets = [] + for broken_target, result in broken_targets: + retry_targets.append(broken_target) + bad_build_results = pool.map(do_bad_build_check, retry_targets) + pool.close() + pool.join() + broken_targets = get_broken_fuzz_targets(bad_build_results, broken_targets) + broken_targets_count = len(broken_targets) + if not broken_targets_count: + return True + + print('Broken fuzz targets', broken_targets_count) + total_targets_count = len(fuzz_targets) + broken_targets_percentage = 100 * broken_targets_count / total_targets_count + for broken_target, result in broken_targets: + print(broken_target) + # Use write because we can't print binary strings. + sys.stdout.buffer.write(result.stdout + result.stderr + b'\n') + + if broken_targets_percentage > allowed_broken_targets_percentage: + print('ERROR: {broken_targets_percentage}% of fuzz targets seem to be ' + 'broken. See the list above for a detailed information.'.format( + broken_targets_percentage=broken_targets_percentage)) + if has_ignored_targets(out): + print('Build check automatically passing because of ignored targets.') + return True + return False + print('{total_targets_count} fuzzers total, {broken_targets_count} ' + 'seem to be broken ({broken_targets_percentage}%).'.format( + total_targets_count=total_targets_count, + broken_targets_count=broken_targets_count, + broken_targets_percentage=broken_targets_percentage)) + return True + + +def get_allowed_broken_targets_percentage(): + """Returns the value of the environment value + 'ALLOWED_BROKEN_TARGETS_PERCENTAGE' as an int or returns a reasonable + default.""" + return int(os.getenv('ALLOWED_BROKEN_TARGETS_PERCENTAGE') or '10') + + +def main(): + """Does bad_build_check on all fuzz targets in parallel. Returns 0 on success. + Returns 1 on failure.""" + allowed_broken_targets_percentage = get_allowed_broken_targets_percentage() + if not test_all_outside_out(allowed_broken_targets_percentage): + return 1 + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all_test.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all_test.py new file mode 100644 index 0000000000000000000000000000000000000000..b3077ec1e4f6d1509b27caab141180a27202b872 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/test_all_test.py @@ -0,0 +1,38 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Tests test_all.py""" +import unittest +from unittest import mock + +import test_all + + +class TestTestAll(unittest.TestCase): + """Tests for the test_all_function.""" + + @mock.patch('test_all.find_fuzz_targets', return_value=[]) + @mock.patch('builtins.print') + def test_test_all_no_fuzz_targets(self, mock_print, _): + """Tests that test_all returns False when there are no fuzz targets.""" + outdir = '/out' + allowed_broken_targets_percentage = 0 + self.assertFalse( + test_all.test_all(outdir, allowed_broken_targets_percentage)) + mock_print.assert_called_with('ERROR: No fuzz targets found.') + + +if __name__ == '__main__': + unittest.main() diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/test_one.py b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/test_one.py new file mode 100644 index 0000000000000000000000000000000000000000..e391ec96dcc3b9b74fd2049f88806c2578df5ff8 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/base-images/base-runner/test_one.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ +"""Does bad_build_check on a fuzz target in $OUT.""" +import os +import sys + +import test_all + + +def test_one(fuzz_target): + """Does bad_build_check on one fuzz target. Returns True on success.""" + with test_all.use_different_out_dir(): + fuzz_target_path = os.path.join(os.environ['OUT'], fuzz_target) + result = test_all.do_bad_build_check(fuzz_target_path) + if result.returncode != 0: + sys.stdout.buffer.write(result.stdout + result.stderr + b'\n') + return False + return True + + +def main(): + """Does bad_build_check on one fuzz target. Returns 1 on failure, 0 on + success.""" + if len(sys.argv) != 2: + print('Usage: %d ', sys.argv[0]) + return 1 + + fuzz_target_binary = sys.argv[1] + return 0 if test_one(fuzz_target_binary) else 1 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/ci/requirements.txt b/local-test-tika-delta-02/fuzz-tooling/infra/ci/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..303274fd5fbec5d190a3baa80dd449f579a2b5a9 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/ci/requirements.txt @@ -0,0 +1,9 @@ +# Requirements for submitting code changes to infra/ (needed by presubmit.py). +parameterized==0.7.4 +pyfakefs==4.5.6 +pylint==2.5.3 +pytest==7.1.2 +pytest-xdist==2.5.0 +PyYAML==6.0 +requests==2.31.0 +yapf==0.32.0 diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/CHANGELOG b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/CHANGELOG new file mode 100644 index 0000000000000000000000000000000000000000..bc6699c0bd7688bffc217b5fe22bfb97487df366 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/CHANGELOG @@ -0,0 +1,3 @@ + Next Release: + + Improve detection of changed files better by fixing https://github.com/google/oss-fuzz/issues/7011 diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/affected_fuzz_targets_test.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/affected_fuzz_targets_test.py new file mode 100644 index 0000000000000000000000000000000000000000..3197ebbf1ae89dd327a2d4ea191f082360165389 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/affected_fuzz_targets_test.py @@ -0,0 +1,113 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for affected_fuzz_targets.py""" +import os +import shutil +import tempfile +import unittest +from unittest import mock + +import parameterized + +import affected_fuzz_targets +import clusterfuzz_deployment +import get_coverage +import test_helpers +import workspace_utils + +# pylint: disable=protected-access + +# NOTE: This integration test relies on +# https://github.com/google/oss-fuzz/tree/master/projects/example project. +EXAMPLE_PROJECT = 'example' + +EXAMPLE_FILE_CHANGED = 'test.txt' + +TEST_DATA_OUT_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), + 'test_data', 'build-out') + + +class RemoveUnaffectedFuzzTargetsTest(unittest.TestCase): + """Tests remove_unaffected_fuzzers.""" + + TEST_FUZZER_1 = os.path.join(TEST_DATA_OUT_PATH, 'example_crash_fuzzer') + TEST_FUZZER_2 = os.path.join(TEST_DATA_OUT_PATH, 'example_nocrash_fuzzer') + + # yapf: disable + @parameterized.parameterized.expand([ + # Tests specific affected fuzzers are kept. + ([[EXAMPLE_FILE_CHANGED], None], 2,), + + # Tests specific affected fuzzer is kept. + ([[EXAMPLE_FILE_CHANGED], ['not/a/real/file']], 1), + + # Tests all fuzzers are kept if none are deemed affected. + ([None, None], 2), + + # Tests that multiple fuzzers are kept if multiple fuzzers are affected. + ([[EXAMPLE_FILE_CHANGED], [EXAMPLE_FILE_CHANGED]], 2), + ]) + # yapf: enable + def test_remove_unaffected_fuzz_targets(self, side_effect, expected_dir_len): + """Tests that remove_unaffected_fuzzers has the intended effect.""" + config = test_helpers.create_run_config( + cfl_platform='github', + oss_fuzz_project_name=EXAMPLE_PROJECT, + workspace='/workspace') + workspace = workspace_utils.Workspace(config) + deployment = clusterfuzz_deployment.get_clusterfuzz_deployment( + config, workspace) + # We can't use fakefs in this test because this test executes + # utils.is_fuzz_target_local. This function relies on the executable bit + # being set, which doesn't work properly in fakefs. + with tempfile.TemporaryDirectory() as tmp_dir, mock.patch( + 'get_coverage.OSSFuzzCoverage.get_files_covered_by_target' + ) as mock_get_files: + with mock.patch('get_coverage._get_oss_fuzz_fuzzer_stats_dir_url', + return_value=1): + mock_get_files.side_effect = side_effect + shutil.copy(self.TEST_FUZZER_1, tmp_dir) + shutil.copy(self.TEST_FUZZER_2, tmp_dir) + + affected_fuzz_targets.remove_unaffected_fuzz_targets( + deployment, tmp_dir, [EXAMPLE_FILE_CHANGED], '') + self.assertEqual(expected_dir_len, len(os.listdir(tmp_dir))) + + +class IsFuzzTargetAffectedTest(unittest.TestCase): + """Tests for is_fuzz_target_affected.""" + + def setUp(self): + self.fuzz_target_path = '/fuzz_target' + + def test_relative_paths(self): + """Tests that is_fuzz_target_affected works as intended when the covered + files are relative paths.""" + with mock.patch.object( + get_coverage.FilesystemCoverage, + 'get_files_covered_by_target', + ) as get_files_covered_by_target: + get_files_covered_by_target.return_value = [ + '/work/build/../../src/systemd/src/basic/alloc-util.c' + ] + coverage = get_coverage.FilesystemCoverage('/', '/') + + self.assertTrue( + affected_fuzz_targets.is_fuzz_target_affected( + coverage, self.fuzz_target_path, + ['/src/systemd/src/basic/alloc-util.c'])) + + +if __name__ == '__main__': + unittest.main() diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/build_fuzzers_test.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/build_fuzzers_test.py new file mode 100644 index 0000000000000000000000000000000000000000..36a4d4b2287401b29bf3f8b31c203296142b7179 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/build_fuzzers_test.py @@ -0,0 +1,403 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests the functionality of the cifuzz module.""" +import os +import shutil +import sys +import tempfile +import unittest +from unittest import mock + +import parameterized + +# pylint: disable=wrong-import-position +INFRA_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(INFRA_DIR) + +OSS_FUZZ_DIR = os.path.dirname(INFRA_DIR) + +import build_fuzzers +import continuous_integration +import repo_manager +import test_helpers + +# NOTE: This integration test relies on +# https://github.com/google/oss-fuzz/tree/master/projects/example project. +EXAMPLE_PROJECT = 'example' + +# Location of data used for testing. +TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), + 'test_data') + +# An example fuzzer that triggers an crash. +# Binary is a copy of the example project's do_stuff_fuzzer and can be +# generated by running "python3 infra/helper.py build_fuzzers example". +EXAMPLE_CRASH_FUZZER = 'example_crash_fuzzer' + +# An example fuzzer that does not trigger a crash. +# Binary is a modified version of example project's do_stuff_fuzzer. It is +# created by removing the bug in my_api.cpp. +EXAMPLE_NOCRASH_FUZZER = 'example_nocrash_fuzzer' + +# A fuzzer to be built in build_fuzzers integration tests. +EXAMPLE_BUILD_FUZZER = 'do_stuff_fuzzer' + +# pylint: disable=no-self-use,protected-access,too-few-public-methods,unused-argument + + +def docker_command_has_env_var_arg(command, env_var_arg): + """Returns True if a docker command has a specific env var argument.""" + for idx, element in enumerate(command): + if idx == 0: + # We're looking for the variable which can't be the first argument. + continue + + if element == env_var_arg and command[idx - 1] == '-e': + return True + return False + + +class BuildFuzzersTest(unittest.TestCase): + """Unit tests for build_fuzzers.""" + + @mock.patch('build_specified_commit.detect_main_repo', + return_value=('example.com', '/path')) + @mock.patch('repo_manager._clone', return_value=None) + @mock.patch('continuous_integration.checkout_specified_commit') + @mock.patch('helper.docker_run', return_value=False) # We want to quit early. + def test_cifuzz_env_var(self, mock_docker_run, _, __, ___): + """Tests that the CIFUZZ env var is set.""" + + with tempfile.TemporaryDirectory() as tmp_dir: + build_fuzzers.build_fuzzers( + test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name=EXAMPLE_PROJECT, + workspace=tmp_dir, + pr_ref='refs/pull/1757/merge')) + + docker_run_command = mock_docker_run.call_args_list[0][0][0] + + self.assertTrue( + docker_command_has_env_var_arg(docker_run_command, 'CIFUZZ=True')) + + @mock.patch('build_specified_commit.detect_main_repo', + return_value=('example.com', '/path')) + @mock.patch('repo_manager._clone', return_value=None) + @mock.patch('continuous_integration.checkout_specified_commit') + @mock.patch('helper.docker_run', return_value=False) # We want to quit early. + def test_extra_env_var(self, mock_docker_run, _, __, ___): + """Tests that the CIFUZZ env var is set.""" + + extra_env_var = 'CFL_EXTRA_TOKEN' + with tempfile.TemporaryDirectory() as tmp_dir: + with mock.patch.dict(os.environ, {extra_env_var: 'BLAH'}): + build_fuzzers.build_fuzzers( + test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name=EXAMPLE_PROJECT, + workspace=tmp_dir, + pr_ref='refs/pull/1757/merge')) + + docker_run_command = mock_docker_run.call_args_list[0][0][0] + self.assertTrue( + docker_command_has_env_var_arg(docker_run_command, extra_env_var)) + + +class InternalGithubBuildTest(unittest.TestCase): + """Tests for building OSS-Fuzz projects on GitHub actions.""" + PROJECT_REPO_NAME = 'myproject' + SANITIZER = 'address' + GIT_SHA = 'fake' + PR_REF = 'fake' + + def _create_builder(self, tmp_dir, oss_fuzz_project_name='myproject'): + """Creates an InternalGithubBuilder and returns it.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=oss_fuzz_project_name, + project_repo_name=self.PROJECT_REPO_NAME, + workspace=tmp_dir, + sanitizer=self.SANITIZER, + git_sha=self.GIT_SHA, + pr_ref=self.PR_REF, + cfl_platform='github') + cfl_platform = continuous_integration.get_ci(config) + builder = build_fuzzers.Builder(config, cfl_platform) + builder.repo_manager = repo_manager.RepoManager('/fake') + return builder + + @mock.patch('helper.docker_run', return_value=True) + @mock.patch('continuous_integration.checkout_specified_commit', + side_effect=None) + def test_correct_host_repo_path(self, _, __): + """Tests that the correct self.host_repo_path is set by + build_image_and_checkout_src. Specifically, we want the name of the + directory the repo is in to match the name used in the docker + image/container, so that it will replace the host's copy properly.""" + image_repo_path = '/src/repo_dir' + with tempfile.TemporaryDirectory() as tmp_dir, mock.patch( + 'build_specified_commit.detect_main_repo', + return_value=('inferred_url', image_repo_path)): + builder = self._create_builder(tmp_dir) + builder.build_image_and_checkout_src() + + self.assertEqual(os.path.basename(builder.host_repo_path), + os.path.basename(image_repo_path)) + + @mock.patch('clusterfuzz_deployment.ClusterFuzzLite.upload_build', + return_value=True) + def test_upload_build_disabled(self, mock_upload_build): + """Test upload build (disabled).""" + with tempfile.TemporaryDirectory() as tmp_dir: + builder = self._create_builder(tmp_dir) + builder.upload_build() + + mock_upload_build.assert_not_called() + + @mock.patch('repo_manager.RepoManager.get_current_commit', + return_value='commit') + @mock.patch('clusterfuzz_deployment.ClusterFuzzLite.upload_build', + return_value=True) + def test_upload_build(self, mock_upload_build, mock_get_current_commit): + """Test upload build.""" + with tempfile.TemporaryDirectory() as tmp_dir: + builder = self._create_builder(tmp_dir, oss_fuzz_project_name='') + builder.config.upload_build = True + builder.upload_build() + + mock_upload_build.assert_called_with('commit') + + +@unittest.skipIf(not os.getenv('INTEGRATION_TESTS'), + 'INTEGRATION_TESTS=1 not set') +class BuildFuzzersIntegrationTest(unittest.TestCase): + """Integration tests for build_fuzzers.""" + + def setUp(self): + self.temp_dir_ctx_manager = test_helpers.docker_temp_dir() + self.workspace = self.temp_dir_ctx_manager.__enter__() + self.out_dir = os.path.join(self.workspace, 'build-out') + test_helpers.patch_environ(self) + + base_runner_path = os.path.join(INFRA_DIR, 'base-images', 'base-runner') + os.environ['PATH'] = os.environ['PATH'] + os.pathsep + base_runner_path + + def tearDown(self): + self.temp_dir_ctx_manager.__exit__(None, None, None) + + def test_external_github_project(self): + """Tests building fuzzers from an external project on Github.""" + project_repo_name = 'external-project' + git_url = 'https://github.com/jonathanmetzman/cifuzz-external-example.git' + # This test is dependant on the state of + # github.com/jonathanmetzman/cifuzz-external-example. + config = test_helpers.create_build_config( + project_repo_name=project_repo_name, + workspace=self.workspace, + git_url=git_url, + git_sha='HEAD', + cfl_platform='github', + base_commit='HEAD^1') + self.assertTrue(build_fuzzers.build_fuzzers(config)) + self.assertTrue( + os.path.exists(os.path.join(self.out_dir, EXAMPLE_BUILD_FUZZER))) + + def test_external_generic_project(self): + """Tests building fuzzers from an external project not on Github.""" + project_repo_name = 'cifuzz-external-example' + git_url = 'https://github.com/jonathanmetzman/cifuzz-external-example.git' + # This test is dependant on the state of + # github.com/jonathanmetzman/cifuzz-external-example. + manager = repo_manager.clone_repo_and_get_manager( + 'https://github.com/jonathanmetzman/cifuzz-external-example', + self.workspace) + project_src_path = manager.repo_dir + config = test_helpers.create_build_config( + project_repo_name=project_repo_name, + workspace=self.workspace, + git_url=git_url, + filestore='no_filestore', + git_sha='HEAD', + project_src_path=project_src_path, + base_commit='HEAD^1') + self.assertTrue(build_fuzzers.build_fuzzers(config)) + self.assertTrue( + os.path.exists(os.path.join(self.out_dir, EXAMPLE_BUILD_FUZZER))) + + def test_valid_commit(self): + """Tests building fuzzers with valid inputs.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name='oss-fuzz', + workspace=self.workspace, + git_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523', + base_commit='da0746452433dc18bae699e355a9821285d863c8', + cfl_platform='github') + self.assertTrue(build_fuzzers.build_fuzzers(config)) + self.assertTrue( + os.path.exists(os.path.join(self.out_dir, EXAMPLE_BUILD_FUZZER))) + + def test_valid_pull_request(self): + """Tests building fuzzers with valid pull request.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name='oss-fuzz', + workspace=self.workspace, + pr_ref='refs/pull/1757/merge', + base_ref='master', + cfl_platform='github') + self.assertTrue(build_fuzzers.build_fuzzers(config)) + self.assertTrue( + os.path.exists(os.path.join(self.out_dir, EXAMPLE_BUILD_FUZZER))) + + def test_invalid_pull_request(self): + """Tests building fuzzers with invalid pull request.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name='oss-fuzz', + workspace=self.workspace, + pr_ref='ref-1/merge', + base_ref='master', + cfl_platform='github') + self.assertTrue(build_fuzzers.build_fuzzers(config)) + + def test_invalid_oss_fuzz_project_name(self): + """Tests building fuzzers with invalid project name.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name='not_a_valid_project', + project_repo_name='oss-fuzz', + workspace=self.workspace, + git_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523') + self.assertFalse(build_fuzzers.build_fuzzers(config)) + + def test_invalid_repo_name(self): + """Tests building fuzzers with invalid repo name.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name='not-real-repo', + workspace=self.workspace, + git_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523') + self.assertFalse(build_fuzzers.build_fuzzers(config)) + + def test_invalid_git_sha(self): + """Tests building fuzzers with invalid commit SHA.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name='oss-fuzz', + workspace=self.workspace, + git_sha='', + cfl_platform='github') + with self.assertRaises(AssertionError): + build_fuzzers.build_fuzzers(config) + + def test_invalid_workspace(self): + """Tests building fuzzers with invalid workspace.""" + config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + project_repo_name='oss-fuzz', + workspace=os.path.join(self.workspace, 'not', 'a', 'dir'), + git_sha='0b95fe1039ed7c38fea1f97078316bfc1030c523') + self.assertFalse(build_fuzzers.build_fuzzers(config)) + + +class CheckFuzzerBuildTest(unittest.TestCase): + """Tests the check_fuzzer_build function in the cifuzz module.""" + + SANITIZER = 'address' + LANGUAGE = 'c++' + + def setUp(self): + self.temp_dir_obj = tempfile.TemporaryDirectory() + workspace_path = os.path.join(self.temp_dir_obj.name, 'workspace') + self.config = test_helpers.create_build_config( + oss_fuzz_project_name=EXAMPLE_PROJECT, + sanitizer=self.SANITIZER, + language=self.LANGUAGE, + workspace=workspace_path, + pr_ref='refs/pull/1757/merge') + self.workspace = test_helpers.create_workspace(workspace_path) + shutil.copytree(TEST_DATA_PATH, workspace_path) + test_helpers.patch_environ(self, runner=True) + + def tearDown(self): + self.temp_dir_obj.cleanup() + + def test_correct_fuzzer_build(self): + """Checks check_fuzzer_build function returns True for valid fuzzers.""" + self.assertTrue(build_fuzzers.check_fuzzer_build(self.config)) + + def test_not_a_valid_path(self): + """Tests that False is returned when a nonexistent path is given.""" + self.config.workspace = 'not/a/valid/path' + self.assertFalse(build_fuzzers.check_fuzzer_build(self.config)) + + def test_no_valid_fuzzers(self): + """Tests that False is returned when an empty directory is given.""" + with tempfile.TemporaryDirectory() as tmp_dir: + self.config.workspace = tmp_dir + os.mkdir(os.path.join(self.config.workspace, 'build-out')) + self.assertFalse(build_fuzzers.check_fuzzer_build(self.config)) + + @mock.patch('utils.execute', return_value=(None, None, 0)) + def test_allow_broken_fuzz_targets_percentage(self, mock_execute): + """Tests that ALLOWED_BROKEN_TARGETS_PERCENTAGE is set when running + docker if passed to check_fuzzer_build.""" + percentage = '0' + self.config.allowed_broken_targets_percentage = percentage + build_fuzzers.check_fuzzer_build(self.config) + self.assertEqual( + mock_execute.call_args[1]['env']['ALLOWED_BROKEN_TARGETS_PERCENTAGE'], + percentage) + + +@unittest.skip('Test is too long to be run with presubmit.') +class BuildSantizerIntegrationTest(unittest.TestCase): + """Integration tests for the build_fuzzers. + Note: This test relies on "curl" being an OSS-Fuzz project.""" + PROJECT_NAME = 'curl' + PR_REF = 'fake_pr' + + @classmethod + def _create_config(cls, tmp_dir, sanitizer): + return test_helpers.create_build_config( + oss_fuzz_project_name=cls.PROJECT_NAME, + project_repo_name=cls.PROJECT_NAME, + workspace=tmp_dir, + pr_ref=cls.PR_REF, + sanitizer=sanitizer) + + @parameterized.parameterized.expand([('memory',), ('undefined',)]) + def test_valid_project_curl(self, sanitizer): + """Tests that MSAN can be detected from project.yaml""" + with tempfile.TemporaryDirectory() as tmp_dir: + self.assertTrue( + build_fuzzers.build_fuzzers(self._create_config(tmp_dir, sanitizer))) + + +class GetDockerBuildFuzzersArgsNotContainerTest(unittest.TestCase): + """Tests that _get_docker_build_fuzzers_args_not_container works as + intended.""" + + def test_get_docker_build_fuzzers_args_no_container(self): + """Tests that _get_docker_build_fuzzers_args_not_container works + as intended.""" + host_repo_path = '/host/repo' + result = build_fuzzers._get_docker_build_fuzzers_args_not_container( + host_repo_path) + expected_result = ['-v', '/host/repo:/host/repo'] + self.assertEqual(result, expected_result) + + +if __name__ == '__main__': + unittest.main() diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/cifuzz_combined_entrypoint.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/cifuzz_combined_entrypoint.py new file mode 100644 index 0000000000000000000000000000000000000000..920e32e42965c314c7fcc5c7808315bb68e051a0 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/cifuzz_combined_entrypoint.py @@ -0,0 +1,40 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Builds fuzzers and runs fuzzers. Entrypoint used for external users""" +import logging +import sys + +import build_fuzzers_entrypoint +import run_fuzzers_entrypoint + + +def main(): + """Builds and runs fuzzers for CI tools. + + NOTE: Any crash report will be in the filepath: $WORKSPACE/out/testcase + This can be used with GitHub's upload-artifact action to surface the logs. + + Required environment variables: + Returns: + 0 on success or 1 on failure. + """ + logging.debug("Using cifuzz_combined_entrypoint.") + result = build_fuzzers_entrypoint.build_fuzzers_entrypoint() + if result != 0: + return result + return run_fuzzers_entrypoint.run_fuzzers_entrypoint() + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/clusterfuzz_deployment.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/clusterfuzz_deployment.py new file mode 100644 index 0000000000000000000000000000000000000000..b36fc78dec9ab35b045aa6b42b790e1904a98e67 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/clusterfuzz_deployment.py @@ -0,0 +1,385 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for interacting with the ClusterFuzz deployment.""" +import logging +import os +import sys +import urllib.error +import urllib.request + +import config_utils +import continuous_integration +import filestore_utils +import http_utils +import get_coverage +import repo_manager + +# pylint: disable=wrong-import-position,import-error +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +import utils + + +class BaseClusterFuzzDeployment: + """Base class for ClusterFuzz deployments.""" + + def __init__(self, config, workspace): + self.config = config + self.workspace = workspace + self.ci_system = continuous_integration.get_ci(config) + + def download_latest_build(self): + """Downloads the latest build from ClusterFuzz. + + Returns: + A path to where the OSS-Fuzz build was stored, or None if it wasn't. + """ + raise NotImplementedError('Child class must implement method.') + + def upload_build(self, commit): + """Uploads the build with the given commit sha to the filestore.""" + raise NotImplementedError('Child class must implement method.') + + def download_corpus(self, target_name, corpus_dir): + """Downloads the corpus for |target_name| from ClusterFuzz to |corpus_dir|. + + Returns: + A path to where the OSS-Fuzz build was stored, or None if it wasn't. + """ + raise NotImplementedError('Child class must implement method.') + + def upload_crashes(self): + """Uploads crashes in |crashes_dir| to filestore.""" + raise NotImplementedError('Child class must implement method.') + + def upload_corpus(self, target_name, corpus_dir, replace=False): # pylint: disable=no-self-use,unused-argument + """Uploads the corpus for |target_name| to filestore.""" + raise NotImplementedError('Child class must implement method.') + + def upload_coverage(self): + """Uploads the coverage report to the filestore.""" + raise NotImplementedError('Child class must implement method.') + + def get_coverage(self, repo_path): + """Returns the project coverage object for the project.""" + raise NotImplementedError('Child class must implement method.') + + +def _make_empty_dir_if_nonexistent(path): + """Makes an empty directory at |path| if it does not exist.""" + os.makedirs(path, exist_ok=True) + + +class ClusterFuzzLite(BaseClusterFuzzDeployment): + """Class representing a deployment of ClusterFuzzLite.""" + + COVERAGE_NAME = 'latest' + LATEST_BUILD_WINDOW = 3 + + def __init__(self, config, workspace): + super().__init__(config, workspace) + self.filestore = filestore_utils.get_filestore(self.config) + + def download_latest_build(self): + if os.path.exists(self.workspace.clusterfuzz_build): + # This path is necessary because download_latest_build can be called + # multiple times.That is the case because it is called only when we need + # to see if a bug is novel, i.e. until we want to check a bug is novel we + # don't want to waste time calling this, but therefore this method can be + # called if multiple bugs are found. + return self.workspace.clusterfuzz_build + + repo_dir = self.ci_system.repo_dir + if not repo_dir: + raise RuntimeError('Repo checkout does not exist.') + + _make_empty_dir_if_nonexistent(self.workspace.clusterfuzz_build) + repo = repo_manager.RepoManager(repo_dir) + + diff_base = self.ci_system.get_diff_base() + if not diff_base: + diff_base = 'HEAD^' + + # Builds are stored by commit, so try the latest |LATEST_BUILD_WINDOW| + # commits before the current diff base. + # TODO(ochang): If API usage becomes an issue, this can be optimized by the + # filestore accepting a list of filenames to try. + try: + # TODO(metzman): Why do we default to 'origin', we should avoid going down + # this path entirely and not need to catch an exception. + commit_list = repo.get_commit_list(diff_base, + limit=self.LATEST_BUILD_WINDOW) + except ValueError as error: + logging.error('Can\'t get commit list: %s', error) + return None + + for old_commit in commit_list: + logging.info('Trying to downloading previous build %s.', old_commit) + build_name = self._get_build_name(old_commit) + try: + if self.filestore.download_build(build_name, + self.workspace.clusterfuzz_build): + logging.info('Done downloading previous build.') + return self.workspace.clusterfuzz_build + + logging.info('Build for %s does not exist.', old_commit) + except Exception as err: # pylint: disable=broad-except + logging.error('Could not download build for %s because of: %s', + old_commit, err) + + return None + + def download_corpus(self, target_name, corpus_dir): + _make_empty_dir_if_nonexistent(corpus_dir) + logging.info('Downloading corpus for %s to %s.', target_name, corpus_dir) + corpus_name = self._get_corpus_name(target_name) + try: + self.filestore.download_corpus(corpus_name, corpus_dir) + logging.info('Done downloading corpus. Contains %d elements.', + len(os.listdir(corpus_dir))) + except Exception as err: # pylint: disable=broad-except + logging.error('Failed to download corpus for target: %s. Error: %s', + target_name, str(err)) + return corpus_dir + + def _get_build_name(self, name): + return f'{self.config.sanitizer}-{name}' + + def _get_corpus_name(self, target_name): # pylint: disable=no-self-use + """Returns the name of the corpus artifact.""" + return target_name + + def upload_corpus(self, target_name, corpus_dir, replace=False): + """Upload the corpus produced by |target_name|.""" + logging.info('Uploading corpus in %s for %s.', corpus_dir, target_name) + name = self._get_corpus_name(target_name) + try: + self.filestore.upload_corpus(name, corpus_dir, replace=replace) + logging.info('Done uploading corpus.') + except Exception as err: # pylint: disable=broad-except + logging.error('Failed to upload corpus for target: %s. Error: %s.', + target_name, err) + + def upload_build(self, commit): + """Upload the build produced by CIFuzz as the latest build.""" + logging.info('Uploading latest build in %s.', self.workspace.out) + build_name = self._get_build_name(commit) + try: + result = self.filestore.upload_build(build_name, self.workspace.out) + logging.info('Done uploading latest build.') + return result + except Exception as err: # pylint: disable=broad-except + logging.error('Failed to upload latest build: %s. Error: %s', + self.workspace.out, err) + + def upload_crashes(self): + """Uploads crashes.""" + artifact_dirs = os.listdir(self.workspace.artifacts) + if not artifact_dirs: + logging.info('No crashes in %s. Not uploading.', self.workspace.artifacts) + return + + for crash_target in artifact_dirs: + artifact_dir = os.path.join(self.workspace.artifacts, crash_target) + if not os.path.isdir(artifact_dir): + logging.warning('%s is not an expected artifact directory, skipping.', + crash_target) + continue + + logging.info('Uploading crashes in %s.', artifact_dir) + try: + self.filestore.upload_crashes(crash_target, artifact_dir) + logging.info('Done uploading crashes.') + except Exception as err: # pylint: disable=broad-except + logging.error('Failed to upload crashes. Error: %s', err) + + def upload_coverage(self): + """Uploads the coverage report to the filestore.""" + self.filestore.upload_coverage(self.COVERAGE_NAME, + self.workspace.coverage_report) + + def get_coverage(self, repo_path): + """Returns the project coverage object for the project.""" + _make_empty_dir_if_nonexistent(self.workspace.clusterfuzz_coverage) + try: + if not self.filestore.download_coverage( + self.COVERAGE_NAME, self.workspace.clusterfuzz_coverage): + logging.error('Could not download coverage.') + return None + return get_coverage.FilesystemCoverage( + repo_path, self.workspace.clusterfuzz_coverage) + except Exception as err: # pylint: disable=broad-except + logging.error('Could not get coverage: %s.', err) + return None + + +class OSSFuzz(BaseClusterFuzzDeployment): + """The OSS-Fuzz ClusterFuzz deployment.""" + + # Location of clusterfuzz builds on GCS. + CLUSTERFUZZ_BUILDS = 'clusterfuzz-builds' + + # Zip file name containing the corpus. + CORPUS_ZIP_NAME = 'public.zip' + + def get_latest_build_name(self): + """Gets the name of the latest OSS-Fuzz build of a project. + + Returns: + A string with the latest build version or None. + """ + version_file = ( + f'{self.config.oss_fuzz_project_name}-{self.config.sanitizer}' + '-latest.version') + version_url = utils.url_join(utils.GCS_BASE_URL, self.CLUSTERFUZZ_BUILDS, + self.config.oss_fuzz_project_name, + version_file) + try: + response = urllib.request.urlopen(version_url) + except urllib.error.HTTPError: + logging.error('Error getting latest build version for %s from: %s.', + self.config.oss_fuzz_project_name, version_url) + return None + return response.read().decode() + + def download_latest_build(self): + """Downloads the latest OSS-Fuzz build from GCS. + + Returns: + A path to where the OSS-Fuzz build was stored, or None if it wasn't. + """ + if os.path.exists(self.workspace.clusterfuzz_build): + # This function can be called multiple times, don't download the build + # again. + return self.workspace.clusterfuzz_build + + _make_empty_dir_if_nonexistent(self.workspace.clusterfuzz_build) + + latest_build_name = self.get_latest_build_name() + if not latest_build_name: + return None + + logging.info('Downloading latest build.') + oss_fuzz_build_url = utils.url_join(utils.GCS_BASE_URL, + self.CLUSTERFUZZ_BUILDS, + self.config.oss_fuzz_project_name, + latest_build_name) + if http_utils.download_and_unpack_zip(oss_fuzz_build_url, + self.workspace.clusterfuzz_build): + logging.info('Done downloading latest build.') + return self.workspace.clusterfuzz_build + + return None + + def upload_build(self, commit): # pylint: disable=no-self-use + """Noop Implementation of upload_build.""" + logging.info('Not uploading latest build because on OSS-Fuzz.') + + def upload_corpus(self, target_name, corpus_dir, replace=False): # pylint: disable=no-self-use,unused-argument + """Noop Implementation of upload_corpus.""" + logging.info('Not uploading corpus because on OSS-Fuzz.') + + def upload_crashes(self): # pylint: disable=no-self-use + """Noop Implementation of upload_crashes.""" + logging.info('Not uploading crashes because on OSS-Fuzz.') + + def download_corpus(self, target_name, corpus_dir): + """Downloads the latest OSS-Fuzz corpus for the target. + + Returns: + The local path to to corpus or None if download failed. + """ + _make_empty_dir_if_nonexistent(corpus_dir) + project_qualified_fuzz_target_name = target_name + qualified_name_prefix = self.config.oss_fuzz_project_name + '_' + if not target_name.startswith(qualified_name_prefix): + project_qualified_fuzz_target_name = qualified_name_prefix + target_name + + corpus_url = (f'{utils.GCS_BASE_URL}{self.config.oss_fuzz_project_name}' + '-backup.clusterfuzz-external.appspot.com/corpus/' + f'libFuzzer/{project_qualified_fuzz_target_name}/' + f'{self.CORPUS_ZIP_NAME}') + logging.info('Downloading corpus from OSS-Fuzz: %s', corpus_url) + + if not http_utils.download_and_unpack_zip(corpus_url, corpus_dir): + logging.warning('Failed to download corpus for %s.', target_name) + return corpus_dir + + def upload_coverage(self): + """Noop Implementation of upload_coverage_report.""" + logging.info('Not uploading coverage report because on OSS-Fuzz.') + + def get_coverage(self, repo_path): + """Returns the project coverage object for the project.""" + try: + return get_coverage.OSSFuzzCoverage(repo_path, + self.config.oss_fuzz_project_name) + except get_coverage.CoverageError: + return None + + +class NoClusterFuzzDeployment(BaseClusterFuzzDeployment): + """ClusterFuzzDeployment implementation used when there is no deployment of + ClusterFuzz to use.""" + + def upload_build(self, commit): # pylint: disable=no-self-use + """Noop Implementation of upload_build.""" + logging.info('Not uploading latest build because no ClusterFuzz ' + 'deployment.') + + def upload_corpus(self, target_name, corpus_dir, replace=False): # pylint: disable=no-self-use,unused-argument + """Noop Implementation of upload_corpus.""" + logging.info('Not uploading corpus because no ClusterFuzz deployment.') + + def upload_crashes(self): # pylint: disable=no-self-use + """Noop Implementation of upload_crashes.""" + logging.info('Not uploading crashes because no ClusterFuzz deployment.') + + def download_corpus(self, target_name, corpus_dir): + """Noop Implementation of download_corpus.""" + logging.info('Not downloading corpus because no ClusterFuzz deployment.') + return _make_empty_dir_if_nonexistent(corpus_dir) + + def download_latest_build(self): # pylint: disable=no-self-use + """Noop Implementation of download_latest_build.""" + logging.info( + 'Not downloading latest build because no ClusterFuzz deployment.') + + def upload_coverage(self): + """Noop Implementation of upload_coverage.""" + logging.info( + 'Not uploading coverage report because no ClusterFuzz deployment.') + + def get_coverage(self, repo_path): + """Noop Implementation of get_coverage.""" + logging.info( + 'Not getting project coverage because no ClusterFuzz deployment.') + + +_PLATFORM_CLUSTERFUZZ_DEPLOYMENT_MAPPING = { + config_utils.BaseConfig.Platform.INTERNAL_GENERIC_CI: OSSFuzz, + config_utils.BaseConfig.Platform.INTERNAL_GITHUB: OSSFuzz, + config_utils.BaseConfig.Platform.EXTERNAL_GENERIC_CI: ClusterFuzzLite, + config_utils.BaseConfig.Platform.EXTERNAL_GITHUB: ClusterFuzzLite, +} + + +def get_clusterfuzz_deployment(config, workspace): + """Returns object reprsenting deployment of ClusterFuzz used by |config|.""" + deployment_cls = _PLATFORM_CLUSTERFUZZ_DEPLOYMENT_MAPPING[config.platform] + if config.no_clusterfuzz_deployment: + logging.info('Overriding ClusterFuzzDeployment. Using None.') + deployment_cls = NoClusterFuzzDeployment + result = deployment_cls(config, workspace) + logging.info('ClusterFuzzDeployment: %s.', result) + return result diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/config_utils_test.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/config_utils_test.py new file mode 100644 index 0000000000000000000000000000000000000000..cb82dee44026532eb0b0127a6bb565e25e965f1e --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/config_utils_test.py @@ -0,0 +1,200 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for config_utils.""" +import os +import unittest +from unittest import mock + +import config_utils +import constants +import test_helpers + +# pylint: disable=no-self-use,protected-access + + +class BaseConfigTest(unittest.TestCase): + """Tests for BaseConfig.""" + + def setUp(self): + test_helpers.patch_environ(self) + + def _create_config(self): + return config_utils.BuildFuzzersConfig() + + def test_language_default(self): + """Tests that the correct default language is set.""" + config = self._create_config() + self.assertEqual(config.language, 'c++') + + def test_language(self): + """Tests that the correct language is set.""" + language = 'python' + os.environ['LANGUAGE'] = language + config = self._create_config() + self.assertEqual(config.language, language) + + def test_is_coverage(self): + """Tests that is_coverage is set correctly.""" + # Test it is set when it is supposed to be. + os.environ['SANITIZER'] = 'coverage' + config = self._create_config() + self.assertTrue(config.is_coverage) + + # Test it is not set when it is not supposed to be. + os.environ['SANITIZER'] = 'address' + config = self._create_config() + self.assertFalse(config.is_coverage) + + @mock.patch('logging.error') + def test_validate_no_workspace(self, mock_error): + """Tests that validate returns False if GITHUB_WORKSPACE isn't set.""" + os.environ['OSS_FUZZ_PROJECT_NAME'] = 'example' + config = self._create_config() + self.assertFalse(config.validate()) + mock_error.assert_called_with('Must set WORKSPACE.') + + @mock.patch('logging.error') + def test_validate_invalid_language(self, mock_error): + """Tests that validate returns False if GITHUB_WORKSPACE isn't set.""" + os.environ['OSS_FUZZ_PROJECT_NAME'] = 'example' + os.environ['WORKSPACE'] = '/workspace' + os.environ['LANGUAGE'] = 'invalid-language' + config = self._create_config() + self.assertFalse(config.validate()) + mock_error.assert_called_with('Invalid LANGUAGE: %s. Must be one of: %s.', + os.environ['LANGUAGE'], constants.LANGUAGES) + + @mock.patch('logging.error') + def test_validate_invalid_sanitizer(self, mock_error): + """Tests that validate returns False if GITHUB_WORKSPACE isn't set.""" + os.environ['OSS_FUZZ_PROJECT_NAME'] = 'example' + os.environ['WORKSPACE'] = '/workspace' + os.environ['SANITIZER'] = 'invalid-sanitizer' + config = self._create_config() + self.assertFalse(config.validate()) + mock_error.assert_called_with('Invalid SANITIZER: %s. Must be one of: %s.', + os.environ['SANITIZER'], + config_utils.SANITIZERS) + + def test_validate(self): + """Tests that validate returns True if config is valid.""" + os.environ['OSS_FUZZ_PROJECT_NAME'] = 'example' + os.environ['WORKSPACE'] = '/workspace' + os.environ['REPOSITORY'] = 'repo' + config = self._create_config() + self.assertTrue(config.validate()) + + +class BuildFuzzersConfigTest(unittest.TestCase): + """Tests for BuildFuzzersConfig.""" + + def setUp(self): + test_helpers.patch_environ(self) + + def _create_config(self): + return config_utils.BuildFuzzersConfig() + + @mock.patch('platform_config.github._get_event_data', return_value={}) + def test_github_base_ref(self, _): + """Tests that base_ref is set properly.""" + expected_base_ref = 'expected_base_ref' + os.environ['GITHUB_BASE_REF'] = expected_base_ref + os.environ['CFL_PLATFORM'] = 'github' + os.environ['GITHUB_REPOSITORY'] = 'owner/repo' + config = self._create_config() + self.assertEqual(config.base_ref, expected_base_ref) + + def test_base_ref(self): + """Tests that base_ref is set properly.""" + expected_base_ref = 'expected_base_ref' + os.environ['GIT_BASE_REF'] = expected_base_ref + config = self._create_config() + self.assertEqual(config.base_ref, expected_base_ref) + + def test_keep_unaffected_defaults_to_true(self): + """Tests that keep_unaffected_fuzz_targets defaults to true.""" + config = self._create_config() + self.assertTrue(config.keep_unaffected_fuzz_targets) + + def test_keep_unaffected_defaults_to_false_when_pr(self): + """Tests that keep_unaffected_fuzz_targets defaults to false when from a + pr.""" + os.environ['GIT_BASE_REF'] = 'base-ref' + config = self._create_config() + self.assertFalse(config.keep_unaffected_fuzz_targets) + + +class RunFuzzersConfigTest(unittest.TestCase): + """Tests for RunFuzzersConfig.""" + + def setUp(self): + test_helpers.patch_environ(self) + + def _create_config(self): + return config_utils.RunFuzzersConfig() + + def test_coverage(self): + """Tests that mode is overriden properly based on is_coverage.""" + # Test that it is overriden when it is supposed to be. + os.environ['SANITIZER'] = 'coverage' + os.environ['MODE'] = 'code-change' + config = self._create_config() + self.assertEqual(config.mode, 'coverage') + + # Test that it isn't overriden when it isn't supposed to be. + os.environ['SANITIZER'] = 'address' + mode = 'code-change' + os.environ['MODE'] = mode + config = self._create_config() + self.assertEqual(config.mode, mode) + + def test_run_config_validate(self): + """Tests that _run_config_validate returns True when the config is valid.""" + self.assertTrue(self._create_config()._run_config_validate()) + + @mock.patch('logging.error') + def test_run_config_invalid_mode(self, mock_error): + """Tests that _run_config_validate returns False when mode is invalid.""" + fake_mode = 'fake-mode' + os.environ['MODE'] = fake_mode + self.assertFalse(self._create_config()._run_config_validate()) + mock_error.assert_called_with('Invalid MODE: %s. Must be one of %s.', + fake_mode, + config_utils.RunFuzzersConfig.MODES) + + +class GetSanitizerTest(unittest.TestCase): + """Tests for _get_sanitizer.""" + + def setUp(self): + test_helpers.patch_environ(self) + self.sanitizer = 'memory' + + def test_default_value(self): + """Tests that the default value returned by _get_sanitizer is correct.""" + self.assertEqual(config_utils._get_sanitizer(), 'address') + + def test_normal_case(self): + """Tests that _get_sanitizer returns the correct value in normal cases.""" + os.environ['SANITIZER'] = self.sanitizer + self.assertEqual(config_utils._get_sanitizer(), self.sanitizer) + + def test_capitalization(self): + """Tests that that _get_sanitizer handles capitalization properly.""" + os.environ['SANITIZER'] = self.sanitizer.upper() + self.assertEqual(config_utils._get_sanitizer(), self.sanitizer) + + +if __name__ == '__main__': + unittest.main() diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/continuous_integration_test.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/continuous_integration_test.py new file mode 100644 index 0000000000000000000000000000000000000000..5c4fa32895773b9a742509bc46d5f5df008e5a98 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/continuous_integration_test.py @@ -0,0 +1,88 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for continuous_integration_module.""" +import os +import sys +import unittest +from unittest import mock + +import continuous_integration +import docker + +# pylint: disable=wrong-import-position,import-error +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +import repo_manager + +# pylint: disable=no-self-use + + +class FixGitRepoForDiffTest(unittest.TestCase): + """Tests for fix_git_repo_for_diff.""" + + @mock.patch('utils.execute') + def test_fix_git_repo_for_diff(self, mock_execute): + """Tests that fix_git_repo_for_diff works as intended.""" + repo_dir = '/dir' + repo_manager_obj = repo_manager.RepoManager(repo_dir) + continuous_integration.fix_git_repo_for_diff(repo_manager_obj) + expected_command = [ + 'git', 'symbolic-ref', 'refs/remotes/origin/HEAD', + 'refs/remotes/origin/master' + ] + + mock_execute.assert_called_with(expected_command, location=repo_dir) + + +class GetBuildCommand(unittest.TestCase): + """Tests for get_build_command.""" + + def test_build_command(self): + """Tests that get_build_command works as intended.""" + self.assertEqual(continuous_integration.get_build_command(), 'compile') + + +class GetReplaceRepoAndBuildCommand(unittest.TestCase): + """Tests for get_replace_repo_and_build_command.""" + + def test_get_replace_repo_and_build_command(self): + """Tests that get_replace_repo_and_build_command works as intended.""" + host_repo_path = '/path/on/host/to/repo' + image_repo_path = '/src/repo' + command = continuous_integration.get_replace_repo_and_build_command( + host_repo_path, image_repo_path) + expected_command = ('cd / && rm -rf /src/repo/* && ' + 'cp -r /path/on/host/to/repo /src && cd - ' + '&& compile') + self.assertEqual(command, expected_command) + + +class BuildExternalProjetDockerImage(unittest.TestCase): + """Tests for build_external_project_docker_image.""" + + @mock.patch('helper.docker_build') + def test_build_external_project_docker_image(self, mock_docker_build): + """Tests that build_external_project_docker_image works as intended.""" + build_integration_path = '.clusterfuzzlite' + project_src = '/path/to/project/src' + continuous_integration.build_external_project_docker_image( + project_src, build_integration_path) + + mock_docker_build.assert_called_with([ + '-t', docker.EXTERNAL_PROJECT_IMAGE, '-f', + os.path.join('.clusterfuzzlite', 'Dockerfile'), project_src + ]) + + +# TODO(metzman): Write tests for the rest of continuous_integration.py. diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/docker.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/docker.py new file mode 100644 index 0000000000000000000000000000000000000000..36e4d7c572cccc0faa3638439193af5973f0e65e --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/docker.py @@ -0,0 +1,127 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for dealing with docker.""" +import logging +import os +import sys +import uuid + +# pylint: disable=wrong-import-position,import-error +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +import constants +import utils +import environment + +BASE_BUILDER_TAG = 'ghcr.io/aixcc-finals/base-builder' +PROJECT_TAG_PREFIX = 'gcr.io/oss-fuzz/' + +# Default fuzz configuration. +_DEFAULT_DOCKER_RUN_ARGS = [ + '-e', 'FUZZING_ENGINE=' + constants.DEFAULT_ENGINE, '-e', 'CIFUZZ=True' +] + +UNIQUE_ID_SUFFIX = '-' + uuid.uuid4().hex + +# TODO(metzman): Make run_fuzzers able to delete this image. +EXTERNAL_PROJECT_IMAGE = 'external-cfl-project' + UNIQUE_ID_SUFFIX + +_DEFAULT_DOCKER_RUN_COMMAND = [ + 'docker', + 'run', + '--rm', + '--privileged', +] + + +def get_docker_env_vars(env_mapping): + """Returns a list of docker arguments that sets each key in |env_mapping| as + an env var and the value of that key in |env_mapping| as the value.""" + env_var_args = [] + for env_var, env_var_val in env_mapping.items(): + env_var_args.extend(['-e', f'{env_var}={env_var_val}']) + return env_var_args + + +def get_project_image_name(project): + """Returns the name of the project builder image for |project_name|.""" + # TODO(jonathanmetzman): We may need unique names to support parallel fuzzing + # for CIFuzz (like CFL supports). Don't do this for now because no one has + # asked for it and build_specified_commit would need to be modified to support + # this. + if project: + return PROJECT_TAG_PREFIX + project + + return EXTERNAL_PROJECT_IMAGE + + +def delete_images(images): + """Deletes |images|.""" + command = ['docker', 'rmi', '-f'] + images + utils.execute(command) + utils.execute(['docker', 'builder', 'prune', '-f']) + + +def get_base_docker_run_args(workspace, + sanitizer=constants.DEFAULT_SANITIZER, + language=constants.DEFAULT_LANGUAGE, + architecture=constants.DEFAULT_ARCHITECTURE, + docker_in_docker=False): + """Returns arguments that should be passed to every invocation of 'docker + run'.""" + docker_args = _DEFAULT_DOCKER_RUN_ARGS.copy() + env_mapping = { + 'SANITIZER': sanitizer, + 'ARCHITECTURE': architecture, + 'FUZZING_LANGUAGE': language, + 'OUT': workspace.out + } + docker_args += get_docker_env_vars(env_mapping) + docker_container = environment.get('CFL_CONTAINER_ID', + utils.get_container_name()) + logging.info('Docker container: %s.', docker_container) + if docker_container and not docker_in_docker: + # Don't map specific volumes if in a docker container, it breaks when + # running a sibling container. + docker_args += ['--volumes-from', docker_container] + else: + docker_args += _get_args_mapping_host_path_to_container(workspace.workspace) + return docker_args, docker_container + + +def get_base_docker_run_command(workspace, + sanitizer=constants.DEFAULT_SANITIZER, + language=constants.DEFAULT_LANGUAGE, + architecture=constants.DEFAULT_ARCHITECTURE, + docker_in_docker=False): + """Returns part of the command that should be used everytime 'docker run' is + invoked.""" + docker_args, docker_container = get_base_docker_run_args( + workspace, + sanitizer, + language, + architecture, + docker_in_docker=docker_in_docker) + command = _DEFAULT_DOCKER_RUN_COMMAND.copy() + docker_args + return command, docker_container + + +def _get_args_mapping_host_path_to_container(host_path, container_path=None): + """Get arguments to docker run that will map |host_path| a path on the host to + a path in the container. If |container_path| is specified, that path is mapped + to. If not, then |host_path| is mapped to itself in the container.""" + # WARNING: Do not use this function when running in production (and + # --volumes-from) is used for mapping volumes. It will break production. + container_path = host_path if container_path is None else container_path + return ['-v', f'{host_path}:{container_path}'] diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/docker_test.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/docker_test.py new file mode 100644 index 0000000000000000000000000000000000000000..045131c10c5b94ed7739769890a243c2776dc999 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/docker_test.py @@ -0,0 +1,119 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests the functionality of the docker module.""" +import unittest +from unittest import mock + +import docker +import test_helpers +import workspace_utils + +CONTAINER_NAME = 'example-container' +config = test_helpers.create_run_config(oss_fuzz_project_name='project', + workspace='/workspace') +config.workspace = '/workspace' +WORKSPACE = workspace_utils.Workspace(config) +SANITIZER = 'example-sanitizer' +LANGUAGE = 'example-language' + + +class GetProjectImageTest(unittest.TestCase): + """Tests for get_project_image.""" + + def test_get_project_image(self): + """Tests that get_project_image_name works as intended.""" + project = 'my-project' + self.assertEqual(docker.get_project_image_name(project), + 'gcr.io/oss-fuzz/my-project') + + +class GetDeleteImagesTest(unittest.TestCase): + """Tests for delete_images.""" + + @mock.patch('utils.execute') + def test_delete_images(self, mock_execute): # pylint: disable=no-self-use + """Tests that get_project_image_name works as intended.""" + images = ['image'] + docker.delete_images(images) + expected_calls = [ + mock.call(['docker', 'rmi', '-f'] + images), + mock.call(['docker', 'builder', 'prune', '-f']) + ] + + mock_execute.assert_has_calls(expected_calls) + + +class GetBaseDockerRunArgsTest(unittest.TestCase): + """Tests get_base_docker_run_args.""" + + @mock.patch('utils.get_container_name', return_value=CONTAINER_NAME) + def test_get_base_docker_run_args_container(self, _): + """Tests that get_base_docker_run_args works as intended when inside a + container.""" + docker_args, docker_container = docker.get_base_docker_run_args( + WORKSPACE, SANITIZER, LANGUAGE) + self.assertEqual(docker_container, CONTAINER_NAME) + expected_docker_args = [] + expected_docker_args = [ + '-e', + 'FUZZING_ENGINE=libfuzzer', + '-e', + 'CIFUZZ=True', + '-e', + f'SANITIZER={SANITIZER}', + '-e', + 'ARCHITECTURE=x86_64', + '-e', + f'FUZZING_LANGUAGE={LANGUAGE}', + '-e', + f'OUT={WORKSPACE.out}', + '--volumes-from', + CONTAINER_NAME, + ] + self.assertEqual(docker_args, expected_docker_args) + + @mock.patch('utils.get_container_name', return_value=None) + def test_get_base_docker_run_args_no_container(self, _): + """Tests that get_base_docker_run_args works as intended when not inside a + container.""" + docker_args, docker_container = docker.get_base_docker_run_args( + WORKSPACE, SANITIZER, LANGUAGE) + self.assertEqual(docker_container, None) + expected_docker_args = [ + '-e', 'FUZZING_ENGINE=libfuzzer', '-e', 'CIFUZZ=True', '-e', + f'SANITIZER={SANITIZER}', '-e', 'ARCHITECTURE=x86_64', '-e', + f'FUZZING_LANGUAGE={LANGUAGE}', '-e', f'OUT={WORKSPACE.out}', '-v', + f'{WORKSPACE.workspace}:{WORKSPACE.workspace}' + ] + self.assertEqual(docker_args, expected_docker_args) + + +class GetBaseDockerRunCommandTest(unittest.TestCase): + """Tests get_base_docker_run_args.""" + + @mock.patch('utils.get_container_name', return_value=None) + def test_get_base_docker_run_command_no_container(self, _): + """Tests that get_base_docker_run_args works as intended when not inside a + container.""" + docker_args, docker_container = docker.get_base_docker_run_command( + WORKSPACE, SANITIZER, LANGUAGE) + self.assertEqual(docker_container, None) + expected_docker_command = [ + 'docker', 'run', '--rm', '--privileged', '-e', + 'FUZZING_ENGINE=libfuzzer', '-e', 'CIFUZZ=True', '-e', + f'SANITIZER={SANITIZER}', '-e', 'ARCHITECTURE=x86_64', '-e', + f'FUZZING_LANGUAGE={LANGUAGE}', '-e', f'OUT={WORKSPACE.out}', '-v', + f'{WORKSPACE.workspace}:{WORKSPACE.workspace}' + ] + self.assertEqual(docker_args, expected_docker_command) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/environment.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/environment.py new file mode 100644 index 0000000000000000000000000000000000000000..e99a67910948bcd4ef49e6f7684a976f49f68827 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/environment.py @@ -0,0 +1,51 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for dealing with env vars.""" + +import ast +import os + + +def _eval_value(value_string): + """Returns evaluated value.""" + try: + return ast.literal_eval(value_string) + except: # pylint: disable=bare-except + # String fallback. + return value_string + + +def get(env_var, default_value=None): + """Returns an environment variable value.""" + value_string = os.getenv(env_var) + if value_string is None: + return default_value + + return _eval_value(value_string) + + +def get_bool(env_var, default_value=None): + """Returns a boolean environment variable value. This is needed because a lot + of CIFuzz users specified 'false' for dry-run. So we need to special case + this.""" + value = get(env_var, default_value) + if not isinstance(value, str): + return bool(value) + + lower_value = value.lower() + allowed_values = {'true', 'false'} + if lower_value not in allowed_values: + raise Exception(f'Bool env var {env_var} value {value} is invalid. ' + f'Must be one of {allowed_values}.') + return lower_value == 'true' diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/filestore_utils.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/filestore_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..acb0189693eb876f8e38efe75a69fbde86a03b56 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/filestore_utils.py @@ -0,0 +1,48 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""External filestore interface. Cannot be depended on by filestore code.""" +import filestore +import filestore.filesystem +import filestore.git +import filestore.github_actions +import filestore.gsutil +import filestore.no_filestore +import filestore.gitlab + +FILESTORE_MAPPING = { + 'filesystem': filestore.filesystem.FilesystemFilestore, + 'gsutil': filestore.gsutil.GSUtilFilestore, + 'github-actions': filestore.github_actions.GithubActionsFilestore, + 'git': filestore.git.GitFilestore, + # TODO(metzman): Change to "no-filestore" + 'no_filestore': filestore.no_filestore.NoFilestore, + 'gitlab': filestore.gitlab.GitlabFilestore, +} + + +def get_filestore(config): + """Returns the correct filestore object based on the platform in |config|. + Raises an exception if there is no correct filestore for the platform.""" + if config.platform == config.Platform.EXTERNAL_GITHUB: + ci_filestore = filestore.github_actions.GithubActionsFilestore(config) + if not config.git_store_repo: + return ci_filestore + + return filestore.git.GitFilestore(config, ci_filestore) + + filestore_cls = FILESTORE_MAPPING.get(config.filestore) + if filestore_cls is None: + raise filestore.FilestoreError( + f'Filestore: {config.filestore} doesn\'t exist.') + return filestore_cls(config) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/fuzz_target.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/fuzz_target.py new file mode 100644 index 0000000000000000000000000000000000000000..8c9789d8a7ed8ee1798412981a5a6ea79cd82969 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/fuzz_target.py @@ -0,0 +1,408 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""A module to handle running a fuzz target for a specified amount of time.""" +import collections +import logging +import multiprocessing +import os +import shutil +import stat +import tempfile +from typing import Optional + +import clusterfuzz.environment +import clusterfuzz.fuzz + +import config_utils +import logs + +logs.init() + +# Use len_control=0 since we don't have enough time fuzzing for len_control to +# make sense (probably). +LIBFUZZER_OPTIONS_BATCH = ['-len_control=0'] +# Use a fixed seed for determinism for code change fuzzing. +LIBFUZZER_OPTIONS_CODE_CHANGE = LIBFUZZER_OPTIONS_BATCH + ['-seed=1337'] +LIBFUZZER_OPTIONS_NO_REPORT_OOM = ['-rss_limit_mb=0'] + +# The number of reproduce attempts for a crash. +REPRODUCE_ATTEMPTS = 10 + +DEFAULT_REPRODUCE_TIME_SECONDS = 30 +PER_LANGUAGE_REPRODUCE_TIMEOUTS = { + 'python': 30 * 4 # Python takes a bit longer on startup. +} +MINIMIZE_TIME_SECONDS = 60 * 4 + +# Seconds on top of duration until a timeout error is raised. +BUFFER_TIME = 10 + +# Log message if we can't check if crash reproduces on an recent build. +COULD_NOT_TEST_ON_CLUSTERFUZZ_MESSAGE = ( + 'Could not run previous build of target to determine if this code change ' + '(pr/commit) introduced crash. Assuming crash was newly introduced.') + +FuzzResult = collections.namedtuple('FuzzResult', + ['testcase', 'stacktrace', 'corpus_path']) + + +def get_libfuzzer_parallel_options(): + """Returns a list containing options to pass to libFuzzer to fuzz using all + available cores.""" + return ['-jobs=' + str(multiprocessing.cpu_count())] + + +class ReproduceError(Exception): + """Error for when we can't attempt to reproduce a crash.""" + + +def get_fuzz_target_corpus_dir(workspace, target_name): + """Returns the directory for storing |target_name|'s corpus in |workspace|.""" + return os.path.join(workspace.corpora, target_name) + + +def get_fuzz_target_pruned_corpus_dir(workspace, target_name): + """Returns the directory for storing |target_name|'s puned corpus in + |workspace|.""" + return os.path.join(workspace.pruned_corpora, target_name) + + +class FuzzTarget: # pylint: disable=too-many-instance-attributes + """A class to manage a single fuzz target. + + Attributes: + target_name: The name of the fuzz target. + duration: The length of time in seconds that the target should run. + target_path: The location of the fuzz target binary. + workspace: The workspace for storing things related to fuzzing. + """ + + # pylint: disable=too-many-arguments + def __init__(self, target_path, duration, workspace, clusterfuzz_deployment, + config): + """Represents a single fuzz target. + + Args: + target_path: The location of the fuzz target binary. + duration: The length of time in seconds the target should run. + workspace: The path used for storing things needed for fuzzing. + clusterfuzz_deployment: The object representing the ClusterFuzz + deployment. + config: The config of this project. + """ + self.target_path = target_path + self.target_name = os.path.basename(self.target_path) + self.duration = int(duration) + self.workspace = workspace + self.clusterfuzz_deployment = clusterfuzz_deployment + self.config = config + self.latest_corpus_path = get_fuzz_target_corpus_dir( + self.workspace, self.target_name) + os.makedirs(self.latest_corpus_path, exist_ok=True) + self.pruned_corpus_path = get_fuzz_target_pruned_corpus_dir( + self.workspace, self.target_name) + os.makedirs(self.pruned_corpus_path, exist_ok=True) + + def _download_corpus(self): + """Downloads the corpus for the target from ClusterFuzz and returns the path + to the corpus. An empty directory is provided if the corpus can't be + downloaded or is empty.""" + self.clusterfuzz_deployment.download_corpus(self.target_name, + self.latest_corpus_path) + return self.latest_corpus_path + + def _target_artifact_path(self): + """Target artifact path.""" + artifact_path = os.path.join(self.workspace.artifacts, self.target_name, + self.config.sanitizer) + os.makedirs(artifact_path, exist_ok=True) + return artifact_path + + def _save_crash(self, crash): + """Add stacktraces to crashes.""" + target_reproducer_path = os.path.join(self._target_artifact_path(), + os.path.basename(crash.input_path)) + shutil.copy(crash.input_path, target_reproducer_path) + bug_summary_artifact_path = target_reproducer_path + '.summary' + with open(bug_summary_artifact_path, 'w') as handle: + handle.write(crash.stacktrace) + + # Set permissions of testcase to be the same as summary so that we're sure + # it can be read by necessary users. + permissions_mode = os.stat(bug_summary_artifact_path).st_mode + os.chmod(target_reproducer_path, permissions_mode & 0o777) + return target_reproducer_path + + def prune(self): + """Prunes the corpus and returns the result.""" + self._download_corpus() + with clusterfuzz.environment.Environment(config_utils.DEFAULT_ENGINE, + self.config.sanitizer, + self.target_path): + engine_impl = clusterfuzz.fuzz.get_engine(config_utils.DEFAULT_ENGINE) + result = engine_impl.minimize_corpus(self.target_path, [], + [self.latest_corpus_path], + self.pruned_corpus_path, + self._target_artifact_path(), + self.duration) + + print(result.logs) + return FuzzResult(None, result.logs, self.pruned_corpus_path) + + def fuzz(self, batch=False) -> Optional[FuzzResult]: + """Starts the fuzz target run for the length of time specified by duration. + + Returns: + FuzzResult namedtuple with stacktrace and testcase if applicable. + """ + logging.info('Running fuzzer: %s.', self.target_name) + + self._download_corpus() + corpus_path = self.latest_corpus_path + + logging.info('Starting fuzzing') + with tempfile.TemporaryDirectory() as artifacts_dir: + with clusterfuzz.environment.Environment(config_utils.DEFAULT_ENGINE, + self.config.sanitizer, + self.target_path) as env: + engine_impl = clusterfuzz.fuzz.get_engine(config_utils.DEFAULT_ENGINE) + options = engine_impl.prepare(corpus_path, env.target_path, + env.build_dir) + options.merge_back_new_testcases = False + options.analyze_dictionary = False + if batch: + options.arguments.extend(LIBFUZZER_OPTIONS_BATCH) + else: + options.arguments.extend(LIBFUZZER_OPTIONS_CODE_CHANGE) + + if not self.config.report_ooms: + options.arguments.extend(LIBFUZZER_OPTIONS_NO_REPORT_OOM) + + if self.config.parallel_fuzzing: + if self.config.sanitizer == 'memory': + # TODO(https://github.com/google/oss-fuzz/issues/11915): Don't gate + # this after jobs is fixed for MSAN. + logging.info('Not using jobs because it breaks MSAN.') + else: + options.arguments.extend(get_libfuzzer_parallel_options()) + + result = engine_impl.fuzz(self.target_path, options, artifacts_dir, + self.duration) + print(f'Fuzzing logs:\n{result.logs}') + + if not result.crashes: + # Libfuzzer max time was reached. + logging.info('Fuzzer %s finished with no crashes discovered.', + self.target_name) + return FuzzResult(None, None, self.latest_corpus_path) + + if result.timed_out: + logging.info('Not reporting crash in %s because process timed out.', + self.target_name) + return FuzzResult(None, None, self.latest_corpus_path) + + # Only report first crash. + crash = result.crashes[0] + logging.info('Fuzzer: %s. Detected bug.', self.target_name) + + is_reportable = self.is_crash_reportable(crash.input_path, + crash.reproduce_args, + batch=batch) + if is_reportable or self.config.upload_all_crashes: + logging.info('SAVING CRASH') + fuzzer_logs = result.logs + testcase_path = self._save_crash(crash) + if is_reportable and self.config.minimize_crashes: + # TODO(metzman): We don't want to minimize unreproducible crashes. + # Use is_reportable to decide this even though reportable crashes + # are a subset of reproducible ones. + self.minimize_testcase(testcase_path) + else: + logging.info('NOT MINIMIZED') + else: + fuzzer_logs = None + testcase_path = None + + return FuzzResult(testcase_path, fuzzer_logs, self.latest_corpus_path) + + def minimize_testcase(self, testcase_path): + """Minimizes the testcase located at |testcase_path|.""" + with clusterfuzz.environment.Environment(config_utils.DEFAULT_ENGINE, + self.config.sanitizer, + self.target_path): + engine_impl = clusterfuzz.fuzz.get_engine(config_utils.DEFAULT_ENGINE) + minimized_testcase_path = testcase_path + '-minimized' + return engine_impl.minimize_testcase(self.target_path, [], + testcase_path, + minimized_testcase_path, + max_time=MINIMIZE_TIME_SECONDS) + + def free_disk_if_needed(self, delete_fuzz_target=True): + """Deletes things that are no longer needed from fuzzing this fuzz target to + save disk space if needed.""" + if not self.config.low_disk_space: + logging.info('Not freeing disk space after running fuzz target.') + return + logging.info('Deleting corpus and seed corpus of %s to save disk.', + self.target_name) + + # Delete the seed corpus, corpus, and fuzz target. + for corpus_path in [self.latest_corpus_path, self.pruned_corpus_path]: + # Use ignore_errors=True to fix + # https://github.com/google/oss-fuzz/issues/5383. + shutil.rmtree(corpus_path, ignore_errors=True) + + target_seed_corpus_path = self.target_path + '_seed_corpus.zip' + if os.path.exists(target_seed_corpus_path): + os.remove(target_seed_corpus_path) + + if delete_fuzz_target: + logging.info('Deleting fuzz target: %s.', self.target_name) + os.remove(self.target_path) + logging.info('Done deleting.') + + def is_reproducible(self, testcase, target_path, reproduce_args): + """Checks if the testcase reproduces. + + Args: + testcase: The path to the testcase to be tested. + target_path: The path to the fuzz target to be tested + reproduce_args: The arguments to pass to the target to reproduce the + crash. + + Returns: + True if crash is reproducible and we were able to run the + binary. + + Raises: + ReproduceError if we can't attempt to reproduce the crash. + """ + if not os.path.exists(target_path): + logging.info('Target: %s does not exist.', target_path) + raise ReproduceError(f'Target {target_path} not found.') + + os.chmod(target_path, stat.S_IRWXO) + + logging.info('Trying to reproduce crash using: %s.', testcase) + with clusterfuzz.environment.Environment(config_utils.DEFAULT_ENGINE, + self.config.sanitizer, + target_path): + reproduce_time_seconds = PER_LANGUAGE_REPRODUCE_TIMEOUTS.get( + self.config.language, DEFAULT_REPRODUCE_TIME_SECONDS) + for _ in range(REPRODUCE_ATTEMPTS): + engine_impl = clusterfuzz.fuzz.get_engine(config_utils.DEFAULT_ENGINE) + try: + result = engine_impl.reproduce(target_path, + testcase, + arguments=reproduce_args, + max_time=reproduce_time_seconds) + except TimeoutError as error: + logging.error('%s.', error) + return False + + if result.return_code != 0: + logging.info('Reproduce command returned: %s. Reproducible on %s.', + result.return_code, target_path) + + return True + + logging.info('Reproduce command returned: 0. Not reproducible on %s.', + target_path) + return False + + def is_crash_reportable(self, testcase, reproduce_args, batch=False): + """Returns True if a crash is reportable. This means the crash is + reproducible but not reproducible on a build from the ClusterFuzz deployment + (meaning the crash was introduced by this PR/commit/code change). + + Args: + testcase: The path to the testcase that triggered the crash. + reproduce_args: The arguments to pass to the target to reproduce the + crash. + + Returns: + True if the crash was introduced by the current pull request. + + Raises: + ReproduceError if we can't attempt to reproduce the crash on the PR build. + """ + + if not self.is_crash_type_reportable(testcase): + return False + + if not os.path.exists(testcase): + raise ReproduceError(f'Testcase {testcase} not found.') + + try: + reproducible_on_code_change = self.is_reproducible( + testcase, self.target_path, reproduce_args) + except ReproduceError as error: + logging.error('Could not check for crash reproducibility.' + 'Please file an issue:' + 'https://github.com/google/oss-fuzz/issues/new.') + raise error + + if not reproducible_on_code_change: + logging.info('Crash is not reproducible.') + return self.config.report_unreproducible_crashes + + logging.info('Crash is reproducible.') + if batch: + # We don't need to check if the crash is novel for batch fuzzing. + return True + + return self.is_crash_novel(testcase, reproduce_args) + + def is_crash_type_reportable(self, testcase): + """Returns True if |testcase| is an actual crash. If crash is a timeout or + OOM then returns True if config says we should report those.""" + # TODO(metzman): Use a less hacky method. + testcase = os.path.basename(testcase) + if testcase.startswith('oom-'): + return self.config.report_ooms + if testcase.startswith('timeout-'): + return self.config.report_timeouts + return True + + def is_crash_novel(self, testcase, reproduce_args): + """Returns whether or not the crash is new. A crash is considered new if it + can't be reproduced on an older ClusterFuzz build of the target.""" + if not os.path.exists(testcase): + raise ReproduceError('Testcase %s not found.' % testcase) + clusterfuzz_build_dir = self.clusterfuzz_deployment.download_latest_build() + if not clusterfuzz_build_dir: + # Crash is reproducible on PR build and we can't test on a recent + # ClusterFuzz/OSS-Fuzz build. + logging.info(COULD_NOT_TEST_ON_CLUSTERFUZZ_MESSAGE) + return True + + clusterfuzz_target_path = os.path.join(clusterfuzz_build_dir, + self.target_name) + + try: + reproducible_on_clusterfuzz_build = self.is_reproducible( + testcase, clusterfuzz_target_path, reproduce_args) + except ReproduceError: + # This happens if the project has ClusterFuzz builds, but the fuzz target + # is not in it (e.g. because the fuzz target is new). + logging.info(COULD_NOT_TEST_ON_CLUSTERFUZZ_MESSAGE) + return True + + if reproducible_on_clusterfuzz_build: + logging.info('The crash is reproducible on previous build. ' + 'Code change (pr/commit) did not introduce crash.') + return False + logging.info('The crash is not reproducible on previous build. ' + 'Code change (pr/commit) introduced crash.') + return True diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/generate_coverage_report_test.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/generate_coverage_report_test.py new file mode 100644 index 0000000000000000000000000000000000000000..df2c9b206b84cdc92b5304f2669ea4cadcc71cae --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/generate_coverage_report_test.py @@ -0,0 +1,71 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for generate_coverage_report.""" + +import unittest +from unittest import mock + +import generate_coverage_report +import test_helpers + +OUT_DIR = '/outdir' +PROJECT = 'example-project' +SANITIZER = 'coverage' + + +class TestRunCoverageCommand(unittest.TestCase): + """Tests run_coverage_command""" + + def setUp(self): + test_helpers.patch_environ(self, empty=True) + + @mock.patch('utils.execute') + def test_run_coverage_command(self, mock_execute): # pylint: disable=no-self-use + """Tests that run_coverage_command works as intended.""" + config = test_helpers.create_run_config(oss_fuzz_project_name=PROJECT, + sanitizer=SANITIZER) + workspace = test_helpers.create_workspace() + generate_coverage_report.run_coverage_command(config, workspace) + expected_command = 'coverage' + expected_env = { + 'SANITIZER': config.sanitizer, + 'FUZZING_LANGUAGE': config.language, + 'OUT': workspace.out, + 'CIFUZZ': 'True', + 'FUZZING_ENGINE': 'libfuzzer', + 'ARCHITECTURE': 'x86_64', + 'FUZZER_ARGS': '-rss_limit_mb=2560 -timeout=25', + 'HTTP_PORT': '', + 'COVERAGE_EXTRA_ARGS': '', + 'CORPUS_DIR': workspace.corpora, + 'COVERAGE_OUTPUT_DIR': workspace.coverage_report + } + mock_execute.assert_called_with(expected_command, env=expected_env) + + +class DownloadCorporaTest(unittest.TestCase): + """Tests for download_corpora.""" + + def test_download_corpora(self): # pylint: disable=no-self-use + """Tests that download_corpora works as intended.""" + clusterfuzz_deployment = mock.Mock() + clusterfuzz_deployment.workspace = test_helpers.create_workspace() + fuzz_target_paths = ['/path/to/fuzzer1', '/path/to/fuzzer2'] + expected_calls = [ + mock.call('fuzzer1', '/workspace/cifuzz-corpus/fuzzer1'), + mock.call('fuzzer2', '/workspace/cifuzz-corpus/fuzzer2') + ] + generate_coverage_report.download_corpora(fuzz_target_paths, + clusterfuzz_deployment) + clusterfuzz_deployment.download_corpus.assert_has_calls(expected_calls) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/get_coverage.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/get_coverage.py new file mode 100644 index 0000000000000000000000000000000000000000..762de7928cb93f074b73b51a37514e3807104d37 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/get_coverage.py @@ -0,0 +1,208 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for determining coverage of fuzz targets.""" +import json +import logging +import os +import sys + +import http_utils + +# pylint: disable=wrong-import-position,import-error +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +import utils + +# The path to get OSS-Fuzz project's latest report json file.` +OSS_FUZZ_LATEST_COVERAGE_INFO_PATH = 'oss-fuzz-coverage/latest_report_info/' + + +# pylint: disable=too-few-public-methods +class CoverageError(Exception): + """Exceptions for project coverage.""" + + +class BaseCoverage: + """Gets coverage data for a project.""" + + def __init__(self, repo_path): + self.repo_path = _normalize_repo_path(repo_path) + + def get_files_covered_by_target(self, target): + """Returns a list of source files covered by the specific fuzz target. + + Args: + target: The name of the fuzz target whose coverage is requested. + + Returns: + A list of files that the fuzz target covers or None. + """ + target_cov = self.get_target_coverage(target) + if not target_cov: + logging.info('No coverage available for %s.', target) + return None + + coverage_per_file = get_coverage_per_file(target_cov) + if not coverage_per_file: + logging.info('No files found in coverage report.') + return None + + affected_file_list = [] + for file_cov in coverage_per_file: + norm_file_path = os.path.normpath(file_cov['filename']) + if not norm_file_path.startswith(self.repo_path): + # Exclude files outside of the main repo. + continue + + if not is_file_covered(file_cov): + # Don't consider a file affected if code in it is never executed. + continue + + # TODO(metzman): It's weird to me that we access file_cov['filename'] + # again and not norm_file_path, figure out if this makes sense. + relative_path = utils.remove_prefix(file_cov['filename'], self.repo_path) + affected_file_list.append(relative_path) + + return affected_file_list + + def get_target_coverage(self, target): + """Get the coverage report for a specific fuzz target. + + Args: + target: The name of the fuzz target whose coverage is requested. + + Returns: + The target's coverage json dict or None on failure. + """ + raise NotImplementedError('Child class must implement method.') + + +class OSSFuzzCoverage(BaseCoverage): + """Gets coverage data for a project from OSS-Fuzz.""" + + def __init__(self, repo_path, oss_fuzz_project_name): + """Constructor for OSSFuzzCoverage.""" + super().__init__(repo_path) + self.oss_fuzz_project_name = oss_fuzz_project_name + self.fuzzer_stats_url = _get_oss_fuzz_fuzzer_stats_dir_url( + self.oss_fuzz_project_name) + if self.fuzzer_stats_url is None: + raise CoverageError('Could not get latest coverage.') + + def get_target_coverage(self, target): + """Get the coverage report for a specific fuzz target. + + Args: + target: The name of the fuzz target whose coverage is requested. + + Returns: + The target's coverage json dict or None on failure. + """ + if not self.fuzzer_stats_url: + return None + + target_url = utils.url_join(self.fuzzer_stats_url, target + '.json') + return http_utils.get_json_from_url(target_url) + + +def _get_oss_fuzz_latest_cov_report_info(oss_fuzz_project_name): + """Gets and returns a dictionary containing the latest coverage report info + for |project|.""" + latest_report_info_url = utils.url_join(utils.GCS_BASE_URL, + OSS_FUZZ_LATEST_COVERAGE_INFO_PATH, + oss_fuzz_project_name + '.json') + latest_cov_info = http_utils.get_json_from_url(latest_report_info_url) + if latest_cov_info is None: + logging.error('Could not get the coverage report json from url: %s.', + latest_report_info_url) + return None + return latest_cov_info + + +def _get_oss_fuzz_fuzzer_stats_dir_url(oss_fuzz_project_name): + """Gets latest coverage report info for a specific OSS-Fuzz project from + GCS. + + Args: + oss_fuzz_project_name: The name of the project. + + Returns: + The projects coverage report info in json dict or None on failure. + """ + latest_cov_info = _get_oss_fuzz_latest_cov_report_info(oss_fuzz_project_name) + + if not latest_cov_info: + return None + + if 'fuzzer_stats_dir' not in latest_cov_info: + logging.error('fuzzer_stats_dir not in latest coverage info.') + return None + + fuzzer_stats_dir_gs_url = latest_cov_info['fuzzer_stats_dir'] + fuzzer_stats_dir_url = utils.gs_url_to_https(fuzzer_stats_dir_gs_url) + return fuzzer_stats_dir_url + + +class FilesystemCoverage(BaseCoverage): + """Class that gets a project's coverage from the filesystem.""" + + def __init__(self, repo_path, project_coverage_dir): + super().__init__(repo_path) + self.project_coverage_dir = project_coverage_dir + + def get_target_coverage(self, target): + """Get the coverage report for a specific fuzz target. + + Args: + target: The name of the fuzz target whose coverage is requested. + + Returns: + The target's coverage json dict or None on failure. + """ + logging.info('Getting coverage for %s from filesystem.', target) + fuzzer_stats_json_path = os.path.join(self.project_coverage_dir, + 'fuzzer_stats', target + '.json') + if not os.path.exists(fuzzer_stats_json_path): + logging.warning('%s does not exist.', fuzzer_stats_json_path) + return None + + with open(fuzzer_stats_json_path) as fuzzer_stats_json_file_handle: + try: + return json.load(fuzzer_stats_json_file_handle) + except json.decoder.JSONDecodeError as err: + logging.error('Could not decode: %s. Error: %s.', + fuzzer_stats_json_path, err) + return None + + +def is_file_covered(file_cov): + """Returns whether the file is covered.""" + return file_cov['summary']['regions']['covered'] + + +def get_coverage_per_file(target_cov): + """Returns the coverage per file within |target_cov|.""" + try: + return target_cov['data'][0]['files'] + except (IndexError, TypeError, KeyError): + logging.error('target_cov: %s is malformed.', target_cov) + return None + + +def _normalize_repo_path(repo_path): + """Normalizes and returns |repo_path| to make sure cases like /src/curl and + /src/curl/ are both handled.""" + repo_path = os.path.normpath(repo_path) + if not repo_path.endswith('/'): + repo_path += '/' + return repo_path diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/http_utils_test.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/http_utils_test.py new file mode 100644 index 0000000000000000000000000000000000000000..64d0598acf5f1b3e0afc92f34d5b660ff989e4f2 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/http_utils_test.py @@ -0,0 +1,71 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for http_utils.py""" + +import unittest +from unittest import mock + +from pyfakefs import fake_filesystem_unittest + +import http_utils + +mock_get_response = mock.MagicMock(status_code=200, content=b'') + + +class DownloadUrlTest(unittest.TestCase): + """Tests that download_url works.""" + URL = 'https://example.com/file' + FILE_PATH = '/tmp/file' + + @mock.patch('time.sleep') + @mock.patch('requests.get', return_value=mock_get_response) + def test_download_url_no_error(self, mock_urlretrieve, _): + """Tests that download_url works when there is no error.""" + self.assertTrue(http_utils.download_url(self.URL, self.FILE_PATH)) + self.assertEqual(1, mock_urlretrieve.call_count) + + @mock.patch('time.sleep') + @mock.patch('logging.error') + @mock.patch('requests.get', + return_value=mock.MagicMock(status_code=404, content=b'')) + def test_download_url_http_error(self, mock_get, mock_error, _): + """Tests that download_url doesn't retry when there is an HTTP error.""" + self.assertFalse(http_utils.download_url(self.URL, self.FILE_PATH)) + mock_error.assert_called_with( + 'Unable to download from: %s. Code: %d. Content: %s.', self.URL, 404, + b'') + self.assertEqual(1, mock_get.call_count) + + @mock.patch('time.sleep') + @mock.patch('requests.get', side_effect=ConnectionResetError) + def test_download_url_connection_error(self, mock_get, mock_sleep): + """Tests that download_url doesn't retry when there is an HTTP error.""" + self.assertFalse(http_utils.download_url(self.URL, self.FILE_PATH)) + self.assertEqual(4, mock_get.call_count) + self.assertEqual(3, mock_sleep.call_count) + + +class DownloadAndUnpackZipTest(fake_filesystem_unittest.TestCase): + """Tests download_and_unpack_zip.""" + + def setUp(self): + self.setUpPyfakefs() + + @mock.patch('requests.get', return_value=mock_get_response) + def test_bad_zip_download(self, _): + """Tests download_and_unpack_zip returns none when a bad zip is passed.""" + self.fs.create_file('/url_tmp.zip', contents='Test file.') + self.assertFalse( + http_utils.download_and_unpack_zip('/not/a/real/url', + '/extract-directory')) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/logs.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/logs.py new file mode 100644 index 0000000000000000000000000000000000000000..e6fd83e8a02cdbe73961a4c839ed4444bdad6ec7 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/logs.py @@ -0,0 +1,25 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Log helpers.""" + +import logging +import os + + +def init(): + """Initialize logging.""" + log_level = logging.DEBUG if os.getenv('CIFUZZ_DEBUG') else logging.INFO + logging.basicConfig( + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + level=log_level) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/run_fuzzers_entrypoint.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/run_fuzzers_entrypoint.py new file mode 100644 index 0000000000000000000000000000000000000000..924914199677e732c81d7d927553b60341bdbe7b --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/run_fuzzers_entrypoint.py @@ -0,0 +1,97 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Runs a specific OSS-Fuzz project's fuzzers for CI tools.""" +import logging +import sys + +import config_utils +import docker +import logs +import run_fuzzers + +# pylint: disable=c-extension-no-member +# pylint gets confused because of the relative import of cifuzz. + +logs.init() + + +def delete_unneeded_docker_images(config): + """Deletes unneeded docker images if running in an environment with low + disk space.""" + if not config.low_disk_space: + return + logging.info('Deleting builder docker images to save disk space.') + project_image = docker.get_project_image_name(config.oss_fuzz_project_name) + images = [ + project_image, + docker.BASE_BUILDER_TAG, + docker.BASE_BUILDER_TAG + '-go', + docker.BASE_BUILDER_TAG + '-javascript', + docker.BASE_BUILDER_TAG + '-jvm', + docker.BASE_BUILDER_TAG + '-python', + docker.BASE_BUILDER_TAG + '-rust', + docker.BASE_BUILDER_TAG + '-ruby', + docker.BASE_BUILDER_TAG + '-swift', + ] + docker.delete_images(images) + + +def run_fuzzers_entrypoint(): + """This is the entrypoint for the run_fuzzers github action. + This action can be added to any OSS-Fuzz project's workflow that uses + Github.""" + config = config_utils.RunFuzzersConfig() + # The default return code when an error occurs. + returncode = 1 + if config.dry_run: + # Sets the default return code on error to success. + returncode = 0 + + delete_unneeded_docker_images(config) + # Run the specified project's fuzzers from the build. + result = run_fuzzers.run_fuzzers(config) + if result == run_fuzzers.RunFuzzersResult.ERROR: + logging.error('Error occurred while running in workspace %s.', + config.workspace) + return returncode + if result == run_fuzzers.RunFuzzersResult.BUG_FOUND: + logging.info('Bug found.') + if not config.dry_run: + # Return 2 when a bug was found by a fuzzer causing the CI to fail. + return 2 + return 0 + + +def main(): + """Runs project's fuzzers for CI tools. + This is the entrypoint for the run_fuzzers github action. + + NOTE: libFuzzer binaries must be located in the $WORKSPACE/build-out + directory in order for this action to be used. This action will only fuzz the + binaries that are located in that directory. It is recommended that you add + the build_fuzzers action preceding this one. + + NOTE: Any crash report will be in the filepath: + ${GITHUB_WORKSPACE}/out/testcase + This can be used in parallel with the upload-artifact action to surface the + logs. + + Returns: + 0 on success or nonzero on failure. + """ + return run_fuzzers_entrypoint() + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/sarif_utils_test.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/sarif_utils_test.py new file mode 100644 index 0000000000000000000000000000000000000000..e8870887c071e4694a6550af0a4f432ad3838a8f --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/sarif_utils_test.py @@ -0,0 +1,128 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for sarif_utils.py""" +import os +import unittest +from unittest import mock + +import sarif_utils + +CRASH_INFO_FILELINE = 403 + +TEST_DATA = os.path.join(os.path.dirname(__file__), 'test_data') + + +class GetSarifDataTest(unittest.TestCase): + """Tests for get_sarif_data.""" + + def setUp(self): + self.maxDiff = None # pylint: disable=invalid-name + + def test_get_sarif_data_none(self): + """Tests get_sarif_data when there was no crash.""" + self.assertEqual(sarif_utils.get_sarif_data(None, '/root/target'), + sarif_utils.SARIF_DATA) + + def test_ordinary_case(self): + stacktrace_filename = os.path.join(TEST_DATA, + 'sarif_utils_systemd_stack.txt') + with open(stacktrace_filename, 'r') as fp: + stacktrace = fp.read() + expected_result = { + 'level': 'error', + 'message': { + 'text': 'Heap-buffer-overflow\nREAD 4' + }, + 'locations': [{ + 'physicalLocation': { + 'artifactLocation': { + 'uri': 'src/core/fuzz-unit-file.c', + 'index': 0 + }, + 'region': { + 'startLine': 30, + # We don't have this granualarity fuzzing. + 'startColumn': 1, + } + } + }], + 'ruleId': 'heap-buffer-overflow', + 'ruleIndex': 2 + } + actual_result = sarif_utils.get_sarif_data( + stacktrace, '/root/target')['runs'][0]['results'][0] + self.assertEqual(actual_result, expected_result) + + def test_llvmfuzzertestoneinput_case(self): + stacktrace_filename = os.path.join(TEST_DATA, + 'sarif_utils_only_llvmfuzzer_stack.txt') + with open(stacktrace_filename, 'r') as fp: + stacktrace = fp.read() + actual_result = sarif_utils.get_sarif_data( + stacktrace, '/root/target')['runs'][0]['results'] + self.assertEqual(actual_result, []) + + def test_msan(self): + """Tests that MSAN stacktraces don't exception.""" + stacktrace_filename = os.path.join(TEST_DATA, 'sarif_utils_msan_stack.txt') + with open(stacktrace_filename, 'r') as fp: + stacktrace = fp.read() + + actual_result = sarif_utils.get_sarif_data(stacktrace, '/root/target') + + +class RedactSrcPathTest(unittest.TestCase): + """Tests for redact_src_path.""" + + def test_redact_src_path(self): + """Tests redact_src_path.""" + path = '/src/src-repo/subdir/file' + self.assertEqual(sarif_utils.redact_src_path(path), 'subdir/file') + + +def _get_mock_crash_info(): + """Returns a mock crash_info to be used in tests.""" + stack_frame = mock.MagicMock() + stack_frame.filename = '/src/repo-dir/sub/vuln.cc' + stack_frame.function_name = 'vuln_func' + stack_frame.fileline = CRASH_INFO_FILELINE + crash1_frames = [stack_frame, stack_frame] + frames = [crash1_frames] + crash_info = mock.MagicMock() + crash_info.frames = frames + crash_info.crash_state = 'vuln_func\nvuln_func0\nvuln_func1' + return crash_info + + +class GetErrorSourceInfoTest(unittest.TestCase): + """Tests for get_error_source_info.""" + + def test_redact_src_path(self): + """Tests that get_error_source_info finds the right source info.""" + crash_info = _get_mock_crash_info() + source_info = sarif_utils.get_error_source_info(crash_info) + expected_source_info = ('sub/vuln.cc', CRASH_INFO_FILELINE) + self.assertEqual(source_info, expected_source_info) + + +class GetRuleIndexTest(unittest.TestCase): + """Tests for get_rule_index.""" + CRASH_INFO_CRASH_TYPE = 'Heap-use-after-free READ 8' + + def test_get_rule_index(self): + """Tests that get_rule_index finds the right rule index.""" + index = sarif_utils.get_rule_index(self.CRASH_INFO_CRASH_TYPE) + self.assertEqual(sarif_utils.SARIF_RULES[index]['id'], + 'heap-use-after-free') + self.assertEqual(sarif_utils.get_rule_index('no-crashes'), 0) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/.clusterfuzzlite/Dockerfile b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/.clusterfuzzlite/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..30add4d3a8d3cd9d8efc9d8b97983e161a62205b --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/test_data/external-project/.clusterfuzzlite/Dockerfile @@ -0,0 +1,22 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +################################################################################ + +FROM ghcr.io/aixcc-finals/base-builder +RUN apt-get update && apt-get install -y make + +COPY . $SRC/external-project +WORKDIR $SRC/external-project +COPY .clusterfuzzlite/build.sh $SRC/ diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/test_helpers.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/test_helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..846a8a888ecda9c48f66d5486cb118387fbf1511 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/test_helpers.py @@ -0,0 +1,117 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Contains convenient helpers for writing tests.""" + +import contextlib +import os +import sys +import shutil +import tempfile +from unittest import mock + +import config_utils +import docker +import workspace_utils + +INFRA_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +# pylint: disable=wrong-import-position,import-error +sys.path.append(INFRA_DIR) + +import helper + + +# TODO(metzman): Get rid of these decorators. +@mock.patch('config_utils._is_dry_run', return_value=True) +@mock.patch('platform_config.BasePlatformConfig.project_src_path', + return_value=None) +@mock.patch('os.path.basename', return_value=None) +def _create_config(config_cls, _, __, ___, **kwargs): + """Creates a config object from |config_cls| and then sets every attribute + that is a key in |kwargs| to the corresponding value. Asserts that each key in + |kwargs| is an attribute of config.""" + with mock.patch('config_utils.BaseConfig.validate', return_value=True): + config = config_cls() + for key, value in kwargs.items(): + assert hasattr(config, key), 'Config doesn\'t have attribute: ' + key + setattr(config, key, value) + + return config + + +def create_build_config(**kwargs): + """Wrapper around _create_config for build configs.""" + return _create_config(config_utils.BuildFuzzersConfig, **kwargs) + + +def create_run_config(**kwargs): + """Wrapper around _create_config for run configs.""" + return _create_config(config_utils.RunFuzzersConfig, **kwargs) + + +def create_workspace(workspace_path='/workspace'): + """Returns a workspace located at |workspace_path| ('/workspace' by + default).""" + config = create_run_config(workspace=workspace_path) + return workspace_utils.Workspace(config) + + +def patch_environ(testcase_obj, env=None, empty=False, runner=False): + """Patch environment. |testcase_obj| is the unittest.TestCase that contains + tests. |env|, if specified, is a dictionary of environment variables to start + from. If |empty| is True then the new patched environment will be empty. If + |runner| is True then the necessary environment variables will be set to run + the scripts from base-runner.""" + if env is None: + env = {} + + patcher = mock.patch.dict(os.environ, env) + testcase_obj.addCleanup(patcher.stop) + patcher.start() + if empty: + for key in os.environ.copy(): + del os.environ[key] + + if runner: + # Add the scripts for base-runner to the path since the wont be in + # /usr/local/bin on host machines during testing. + base_runner_dir = os.path.join(INFRA_DIR, 'base-images', 'base-runner') + os.environ['PATH'] = (os.environ.get('PATH', '') + os.pathsep + + base_runner_dir) + if 'GOPATH' not in os.environ: + # A GOPATH must be set or else the coverage script fails, even for getting + # the coverage of non-Go programs. + os.environ['GOPATH'] = '/root/go' + + +@contextlib.contextmanager +def temp_dir_copy(directory): + """Context manager that yields a temporary copy of |directory|.""" + with tempfile.TemporaryDirectory() as temp_dir: + temp_copy_path = os.path.join(temp_dir, os.path.basename(directory)) + shutil.copytree(directory, temp_copy_path) + yield temp_copy_path + + +@contextlib.contextmanager +def docker_temp_dir(): + """Returns a temporary a directory that is useful for use with docker. On + cleanup this contextmanager uses docker to delete the directory's contents so + that if anything is owned by root it can be deleted (which + tempfile.TemporaryDirectory() cannot do) by non-root users.""" + with tempfile.TemporaryDirectory() as temp_dir: + yield temp_dir + helper.docker_run([ + '-v', f'{temp_dir}:/temp_dir', '-t', docker.BASE_BUILDER_TAG, + '/bin/bash', '-c', 'rm -rf /temp_dir/*' + ]) diff --git a/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/workspace_utils.py b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/workspace_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ba24456bea54ed0bc61190489cade8ae6e83f4da --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/infra/cifuzz/workspace_utils.py @@ -0,0 +1,85 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module for representing the workspace directory which CIFuzz uses.""" + +import os +import shutil + + +class Workspace: + """Class representing the workspace directory.""" + + def __init__(self, config): + self.workspace = config.workspace + + def initialize_dir(self, directory): # pylint: disable=no-self-use + """Creates directory if it doesn't already exist, otherwise does nothing.""" + os.makedirs(directory, exist_ok=True) + + @property + def repo_storage(self): + """The parent directory for repo storage.""" + return os.path.join(self.workspace, 'storage') + + @property + def out(self): + """The out directory used for storing the fuzzer build built by + build_fuzzers.""" + # Don't use 'out' because it needs to be used by artifacts. + return os.path.join(self.workspace, 'build-out') + + @property + def work(self): + """The directory used as the work directory for the fuzzer build/run.""" + return os.path.join(self.workspace, 'work') + + @property + def artifacts(self): + """The directory used to store artifacts for download by CI-system users.""" + # This is hardcoded by a lot of clients, so we need to use this. + return os.path.join(self.workspace, 'out', 'artifacts') + + @property + def clusterfuzz_build(self): + """The directory where builds from ClusterFuzz are stored.""" + return os.path.join(self.workspace, 'cifuzz-prev-build') + + @property + def clusterfuzz_coverage(self): + """The directory where builds from ClusterFuzz are stored.""" + return os.path.join(self.workspace, 'cifuzz-prev-coverage') + + @property + def coverage_report(self): + """The directory where coverage reports generated by cifuzz are put.""" + return os.path.join(self.workspace, 'cifuzz-coverage') + + @property + def corpora(self): + """The directory where corpora from ClusterFuzz are stored.""" + return os.path.join(self.workspace, 'cifuzz-corpus') + + @property + def pruned_corpora(self): + """The directory where pruned corpora are stored.""" + return os.path.join(self.workspace, 'cifuzz-pruned-corpus') + + @property + def sarif(self): + """The directory where sarif files are stored.""" + return os.path.join(self.workspace, 'cifuzz-sarif') + + def make_repo_for_sarif(self, repo_manager): + """Copies the repo over for the sarif upload GitHub action.""" + return shutil.copytree(repo_manager.repo_dir, self.sarif, symlinks=True) diff --git a/local-test-tika-delta-02/fuzz-tooling/tools/vscode-extension/src/commands/cmdSetupCIFuzz.ts b/local-test-tika-delta-02/fuzz-tooling/tools/vscode-extension/src/commands/cmdSetupCIFuzz.ts new file mode 100644 index 0000000000000000000000000000000000000000..e11b97bb9a6099d0bac292e8f8a679463d0c242e --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/tools/vscode-extension/src/commands/cmdSetupCIFuzz.ts @@ -0,0 +1,87 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +//////////////////////////////////////////////////////////////////////////////// + +import * as vscode from 'vscode'; +import {println} from '../logger'; +import {determineWorkspaceLanguage} from '../utils'; +import {cifuzzGenerator} from '../cifuzz'; + +export async function setupCIFuzzHandler() { + const workspaceFolder = vscode.workspace.workspaceFolders; + if (!workspaceFolder) { + return false; + } + + const wsPath = workspaceFolder[0].uri.fsPath; // gets the path of the first workspace folder + + /** + * Go through GitHub workflows to find potential traces of CIFuzz + */ + const githubWorkflowsPath = vscode.Uri.file(wsPath + '/.github/workflows'); + try { + await vscode.workspace.fs.readDirectory(githubWorkflowsPath); + } catch { + println('Did not find a workflows path.'); + return false; + } + + for (const [name, type] of await vscode.workspace.fs.readDirectory( + githubWorkflowsPath + )) { + // Skip directories. + if (type === 2) { + continue; + } + + // Read the files. + println('Is a file'); + const workflowFile = vscode.Uri.file(wsPath + '/.github/workflows/' + name); + const doc = await vscode.workspace.openTextDocument(workflowFile); + if (doc.getText().includes('cifuzz')) { + println('Found existing CIFuzz, will not continue.'); + return false; + } + } + + println('Did not find CIFuzz, creating one.'); + const projectName = await vscode.window.showInputBox({ + value: '', + placeHolder: 'OSS-Fuzz project name', + }); + if (!projectName) { + println('Failed to get project name'); + return false; + } + + /* + * There is no CIFuzz found, so we create one. + */ + // Determine the language of the workspace. + const targetLanguage = await determineWorkspaceLanguage(); + println('Target language: ' + targetLanguage); + + // Generate a CIFuzz workflow text. + const cifuzzWorkflowText = cifuzzGenerator(targetLanguage, projectName, 30); + + // Create the CIFuzz .yml file and write the contents to it to path + // .github/workflows/cifuzz.yml + const cifuzzYml = vscode.Uri.file(wsPath + '/.github/workflows/cifuzz.yml'); + const wsedit = new vscode.WorkspaceEdit(); + wsedit.createFile(cifuzzYml, {ignoreIfExists: true}); + wsedit.insert(cifuzzYml, new vscode.Position(0, 0), cifuzzWorkflowText); + vscode.workspace.applyEdit(wsedit); + return true; +} diff --git a/local-test-tika-delta-02/fuzz-tooling/tools/vscode-extension/src/commands/cmdTemplate.ts b/local-test-tika-delta-02/fuzz-tooling/tools/vscode-extension/src/commands/cmdTemplate.ts new file mode 100644 index 0000000000000000000000000000000000000000..964cc6788ba18faec29f4a475ad37b696b6ad375 --- /dev/null +++ b/local-test-tika-delta-02/fuzz-tooling/tools/vscode-extension/src/commands/cmdTemplate.ts @@ -0,0 +1,357 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +//////////////////////////////////////////////////////////////////////////////// + +/** + * Command for generating template fuzzers. This is a short-cut for rapid + * prototyping as well as an archive for inspiration. + */ +import * as vscode from 'vscode'; +import {println} from '../logger'; + +export const cLangSimpleStringFuzzer = `#include +#include +#include + +int +LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + char *new_str = (char *)malloc(size+1); + if (new_str == NULL){ + return 0; + } + memcpy(new_str, data, size); + new_str[size] = '\\0'; + + // Insert fuzzer contents here + // fuzz data in new_str + + // end of fuzzer contents + + free(new_str); + return 0; +}`; + +const cLangFileInputFuzzer = `int +LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + char filename[256]; + sprintf(filename, "/tmp/libfuzzer.%d", getpid()); + + // Create a file on the filesystem with fuzzer data in it + FILE *fp = fopen(filename, "wb"); + if (!fp) { + return 0; + } + fwrite(data, size, 1, fp); + fclose(fp); + + // Fuzzer logic here. Use the file as a source of data. + + // Fuzzer logic end + + // Clean up the file. + unlink(filename); + + return 0; +}`; + +const cLangBareTemplateFuzzer = `int +LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + return 0; +}`; + +const cppLangBareTemplateFuzzer = `extern "C" int +LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + return 0; +}`; + +const cppLangStdStringTemplateFuzzer = `extern "C" int +LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + std::string input(reinterpret_cast(data), size); + + return 0; +}`; + +export const cppLangFDPTemplateFuzzer = `#include + +#include + +extern "C" int +LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + FuzzedDataProvider fdp(data, size); + + // Extract higher level data types used for fuzzing, e.g. + // int ran_int = fdp.ConsumeIntegralInRange(1, 1024); + // std::string s = fdp.ConsumeRandomLengthString(); + + return 0; +}`; + +const cppLangFileInputFuzzer = `extern "C" int +LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) +{ + char filename[256]; + sprintf(filename, "/tmp/libfuzzer.%d", getpid()); + + FILE *fp = fopen(filename, "wb"); + if (!fp) { + return 0; + } + fwrite(data, size, 1, fp); + fclose(fp); + + // Fuzzer logic here + + // Fuzzer logic end + + unlink(filename); +}`; + +const pythonLangBareTemplate = `import sys +import atheris + + +def TestOneInput(fuzz_bytes): + return + + +def main(): + atheris.Setup(sys.argv, TestOneInput) + atheris.Fuzz() + + +if __name__ == "__main__": + main()`; + +export const pythonLangFileInputFuzzer = `import sys +import atheris + +@atheris.instrument_func +def TestOneInput(data): + # Write fuzz data to a file + with open('/tmp/fuzz_input.b') as f: + f.write(data) + + # Use '/tmp/fuzz_input.b' as input to file handling logic. + + +def main(): + atheris.instrument_all() + atheris.Setup(sys.argv, TestOneInput) + atheris.Fuzz() + + +if __name__ == "__main__": + main()`; + +const pythonLongFdpTemplate = `import sys +import atheris + +def TestOneInput(fuzz_bytes): + fdp = atheris.FuzzedDataProvider(fuzz_bytes) + return + +def main(): + atheris.Setup(sys.argv, TestOneInput) + atheris.Fuzz() + +if __name__ == "__main__": + main()`; + +export const javaLangBareTemplate = `import com.code_intelligence.jazzer.api.FuzzedDataProvider; +public class SampleFuzzer { + public static void fuzzerTestOneInput(FuzzedDataProvider fdp) { + // Use fdp to create arbitrary types seeded with fuzz data + } +} +`; + +/** + * C templates + */ +async function cTemplates() { + let template = ''; + const result = await vscode.window.showQuickPick( + ['Bare template', 'Null-terminated string input', 'File input'], + { + placeHolder: 'Pick which template', + } + ); + vscode.window.showInformationMessage(`Got: ${result}`); + + if (result === 'Null-terminated string input') { + template = cLangSimpleStringFuzzer; + } else if (result === 'File input') { + template = cLangFileInputFuzzer; + } else if (result === 'Bare template') { + template = cLangBareTemplateFuzzer; + } else { + template = 'empty'; + } + const workspaceFolder = vscode.workspace.workspaceFolders; + if (!workspaceFolder) { + return; + } + + const wsPath = workspaceFolder[0].uri.fsPath; // gets the path of the first workspace folder + + const cifuzzYml = vscode.Uri.file(wsPath + '/oss-fuzz-template.c'); + const wsedit = new vscode.WorkspaceEdit(); + wsedit.createFile(cifuzzYml, {ignoreIfExists: true}); + wsedit.insert(cifuzzYml, new vscode.Position(0, 0), template); + vscode.workspace.applyEdit(wsedit); + return; +} + +/** + * CPP templates + */ +async function cppTemplates() { + let template = ''; + const result = await vscode.window.showQuickPick( + [ + 'Bare template', + 'Simple CPP string', + 'File input fuzzer', + 'Fuzzed data provider', + ], + { + placeHolder: 'Pick which template', + } + ); + vscode.window.showInformationMessage(`Got: ${result}`); + + if (result === 'Bare template') { + template = cppLangBareTemplateFuzzer; + } else if (result === 'Simple CPP string') { + template = cppLangStdStringTemplateFuzzer; + } else if (result === 'File input fuzzer') { + template = cppLangFileInputFuzzer; + } else if (result === 'Fuzzed data provider') { + template = cppLangFDPTemplateFuzzer; + } else { + template = 'empty'; + } + const workspaceFolder = vscode.workspace.workspaceFolders; + if (!workspaceFolder) { + return; + } + + const wsPath = workspaceFolder[0].uri.fsPath; // gets the path of the first workspace folder + + const cifuzzYml = vscode.Uri.file(wsPath + '/oss-fuzz-template.cpp'); + const wsedit = new vscode.WorkspaceEdit(); + wsedit.createFile(cifuzzYml, {ignoreIfExists: true}); + wsedit.insert(cifuzzYml, new vscode.Position(0, 0), template); + vscode.workspace.applyEdit(wsedit); + return; +} + +/** + * Python templates + */ +async function pythonTepmlates() { + let template = ''; + const result = await vscode.window.showQuickPick( + ['Bare template', 'Fuzzed Data Provider', 'File input fuzzer'], + { + placeHolder: 'Pick which template', + } + ); + vscode.window.showInformationMessage(`Got: ${result}`); + + if (result === 'Fuzzed Data Provider') { + template = pythonLongFdpTemplate; + } else if (result === 'Bare template') { + template = pythonLangBareTemplate; + } else if (result === 'File input fuzzer') { + template = pythonLangFileInputFuzzer; + } else { + template = 'empty'; + } + const workspaceFolder = vscode.workspace.workspaceFolders; + if (!workspaceFolder) { + return; + } + + const wsPath = workspaceFolder[0].uri.fsPath; // gets the path of the first workspace folder + + const cifuzzYml = vscode.Uri.file(wsPath + '/oss-fuzz-template.py'); + const wsedit = new vscode.WorkspaceEdit(); + wsedit.createFile(cifuzzYml, {ignoreIfExists: true}); + wsedit.insert(cifuzzYml, new vscode.Position(0, 0), template); + vscode.workspace.applyEdit(wsedit); + return; +} + +/** + * Java templates + */ +async function javaTemplates() { + let template = ''; + const result = await vscode.window.showQuickPick(['Bare template'], { + placeHolder: 'Pick which template', + }); + vscode.window.showInformationMessage(`Got: ${result}`); + + if (result === 'Bare template') { + template = javaLangBareTemplate; + } else { + template = 'empty'; + } + const workspaceFolder = vscode.workspace.workspaceFolders; + if (!workspaceFolder) { + return; + } + + const wsPath = workspaceFolder[0].uri.fsPath; // gets the path of the first workspace folder + + const cifuzzYml = vscode.Uri.file(wsPath + '/oss-fuzz-template.java'); + const wsedit = new vscode.WorkspaceEdit(); + wsedit.createFile(cifuzzYml, {ignoreIfExists: true}); + wsedit.insert(cifuzzYml, new vscode.Position(0, 0), template); + vscode.workspace.applyEdit(wsedit); + return; +} + +export async function cmdDispatcherTemplate(context: vscode.ExtensionContext) { + println('Creating template'); + const options: { + [key: string]: (context: vscode.ExtensionContext) => Promise; + } = { + C: cTemplates, + CPP: cppTemplates, + Python: pythonTepmlates, + Java: javaTemplates, + }; + + const quickPick = vscode.window.createQuickPick(); + quickPick.items = Object.keys(options).map(label => ({label})); + quickPick.onDidChangeSelection(selection => { + if (selection[0]) { + options[selection[0].label](context).catch(console.error); + } + }); + quickPick.onDidHide(() => quickPick.dispose()); + quickPick.placeholder = 'Pick language'; + quickPick.show(); + + return; +}