|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
"""Checks code for common issues before submitting.""" |
|
|
|
|
|
import argparse |
|
|
import os |
|
|
import re |
|
|
import subprocess |
|
|
import sys |
|
|
import unittest |
|
|
import yaml |
|
|
|
|
|
import constants |
|
|
|
|
|
_SRC_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) |
|
|
VALID_PROJECT_REGEX_STR = '^[a-z0-9_-]+$' |
|
|
VALID_PROJECT_REGEX = re.compile(VALID_PROJECT_REGEX_STR) |
|
|
|
|
|
|
|
|
def _is_project_file(actual_path, expected_filename): |
|
|
"""Returns True if actual_path's name is |expected_filename| and is a file |
|
|
that exists and is in in projects/.""" |
|
|
if os.path.basename(actual_path) != expected_filename: |
|
|
return False |
|
|
|
|
|
if os.path.basename(os.path.dirname( |
|
|
os.path.dirname(actual_path))) != 'projects': |
|
|
return False |
|
|
|
|
|
return os.path.exists(actual_path) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _check_one_lib_fuzzing_engine(build_sh_file): |
|
|
"""Returns False if |build_sh_file| contains -lFuzzingEngine. |
|
|
This is deprecated behavior. $LIB_FUZZING_ENGINE should be used instead |
|
|
so that -fsanitize=fuzzer is used.""" |
|
|
if not _is_project_file(build_sh_file, 'build.sh'): |
|
|
return True |
|
|
|
|
|
with open(build_sh_file) as build_sh: |
|
|
build_sh_lines = build_sh.readlines() |
|
|
for line_num, line in enumerate(build_sh_lines): |
|
|
uncommented_code = line.split('#')[0] |
|
|
if '-lFuzzingEngine' in uncommented_code: |
|
|
print('Error: build.sh contains deprecated "-lFuzzingEngine" on line: ' |
|
|
f'{line_num}. Please use "$LIB_FUZZING_ENGINE" instead.') |
|
|
return False |
|
|
return True |
|
|
|
|
|
|
|
|
def check_lib_fuzzing_engine(paths): |
|
|
"""Calls _check_one_lib_fuzzing_engine on each path in |paths|. Returns True |
|
|
if the result of every call is True.""" |
|
|
return all(_check_one_lib_fuzzing_engine(path) for path in paths) |
|
|
|
|
|
|
|
|
class ProjectYamlChecker: |
|
|
"""Checks for a project.yaml file.""" |
|
|
|
|
|
|
|
|
|
|
|
SECTIONS_AND_CONSTANTS = { |
|
|
'sanitizers': constants.SANITIZERS, |
|
|
'architectures': constants.ARCHITECTURES, |
|
|
'fuzzing_engines': constants.ENGINES, |
|
|
} |
|
|
|
|
|
|
|
|
VALID_SECTION_NAMES = [ |
|
|
'architectures', |
|
|
'auto_ccs', |
|
|
'blackbox', |
|
|
'builds_per_day', |
|
|
'coverage_extra_args', |
|
|
'disabled', |
|
|
'fuzzing_engines', |
|
|
'help_url', |
|
|
'homepage', |
|
|
'language', |
|
|
'labels', |
|
|
'main_repo', |
|
|
'primary_contact', |
|
|
'run_tests', |
|
|
'sanitizers', |
|
|
'selective_unpack', |
|
|
'vendor_ccs', |
|
|
'view_restrictions', |
|
|
'file_github_issue', |
|
|
] |
|
|
|
|
|
REQUIRED_SECTIONS = ['main_repo'] |
|
|
|
|
|
def __init__(self, filename): |
|
|
self.filename = filename |
|
|
with open(filename) as file_handle: |
|
|
self.data = yaml.safe_load(file_handle) |
|
|
|
|
|
self.success = True |
|
|
|
|
|
def do_checks(self): |
|
|
"""Does all project.yaml checks. Returns True if they pass.""" |
|
|
if self.is_disabled(): |
|
|
return True |
|
|
|
|
|
checks = [ |
|
|
self.check_project_yaml_constants, |
|
|
self.check_required_sections, |
|
|
self.check_valid_section_names, |
|
|
self.check_valid_emails, |
|
|
self.check_valid_language, |
|
|
self.check_valid_project_name, |
|
|
] |
|
|
for check_function in checks: |
|
|
check_function() |
|
|
return self.success |
|
|
|
|
|
def is_disabled(self): |
|
|
"""Returns True if this project is disabled.""" |
|
|
return self.data.get('disabled', False) |
|
|
|
|
|
def error(self, message): |
|
|
"""Prints an error message and sets self.success to False.""" |
|
|
self.success = False |
|
|
print(f'Error in {self.filename}: {message}') |
|
|
|
|
|
def check_valid_project_name(self): |
|
|
"""Checks that the project has a valid name.""" |
|
|
banned_names = ['google', 'g00gle'] |
|
|
project_name = os.path.basename(os.path.dirname(self.filename)) |
|
|
for banned_name in banned_names: |
|
|
if banned_name in project_name: |
|
|
self.error('Projects can\'t have \'google\' in the name.') |
|
|
if not VALID_PROJECT_REGEX.match(project_name): |
|
|
self.error(f'Projects must conform to regex {VALID_PROJECT_REGEX_STR}') |
|
|
|
|
|
def check_project_yaml_constants(self): |
|
|
"""Returns True if certain sections only have certain constant values.""" |
|
|
for section, allowed_constants in self.SECTIONS_AND_CONSTANTS.items(): |
|
|
if section not in self.data: |
|
|
continue |
|
|
actual_constants = self.data[section] |
|
|
allowed_constants_str = ', '.join(allowed_constants) |
|
|
for constant in actual_constants: |
|
|
if isinstance(constant, str): |
|
|
if constant not in allowed_constants: |
|
|
self.error(f'{constant} (in {section} section) is not a valid ' |
|
|
f'constant ({allowed_constants_str}).') |
|
|
elif isinstance(constant, dict): |
|
|
|
|
|
|
|
|
|
|
|
if (len(constant.keys()) > 1 or |
|
|
list(constant.keys())[0] not in allowed_constants): |
|
|
self.error(f'Not allowed value in the project.yaml: {constant}') |
|
|
else: |
|
|
self.error(f'Not allowed value in the project.yaml: {constant}') |
|
|
|
|
|
def check_valid_section_names(self): |
|
|
"""Returns True if all section names are valid.""" |
|
|
for name in self.data: |
|
|
if name not in self.VALID_SECTION_NAMES: |
|
|
self.error( |
|
|
f'{name} is not a valid section name ({self.VALID_SECTION_NAMES})') |
|
|
|
|
|
def check_required_sections(self): |
|
|
"""Returns True if all required sections are in |self.data|.""" |
|
|
for section in self.REQUIRED_SECTIONS: |
|
|
if section not in self.data: |
|
|
self.error(f'{section} section is missing.') |
|
|
|
|
|
def check_valid_emails(self): |
|
|
"""Returns True if emails are valid looking..""" |
|
|
|
|
|
email_addresses = [] |
|
|
primary_contact = self.data.get('primary_contact') |
|
|
if primary_contact: |
|
|
email_addresses.append(primary_contact) |
|
|
auto_ccs = self.data.get('auto_ccs') |
|
|
if auto_ccs: |
|
|
email_addresses.extend(auto_ccs) |
|
|
|
|
|
|
|
|
for email_address in email_addresses: |
|
|
if '@' not in email_address or '.' not in email_address: |
|
|
self.error(f'{email_address} is an invalid email address.') |
|
|
|
|
|
def check_valid_language(self): |
|
|
"""Returns True if the language is specified and valid.""" |
|
|
language = self.data.get('language') |
|
|
if not language: |
|
|
self.error('Missing "language" attribute in project.yaml.') |
|
|
elif language not in constants.LANGUAGES: |
|
|
self.error( |
|
|
f'"language: {language}" is not supported ({constants.LANGUAGES}).') |
|
|
|
|
|
|
|
|
def _check_one_project_yaml(project_yaml_filename): |
|
|
"""Does checks on the project.yaml file. Returns True on success.""" |
|
|
if _is_project_file(project_yaml_filename, 'project.yml'): |
|
|
print(project_yaml_filename, 'must be named project.yaml.') |
|
|
return False |
|
|
|
|
|
if not _is_project_file(project_yaml_filename, 'project.yaml'): |
|
|
return True |
|
|
|
|
|
checker = ProjectYamlChecker(project_yaml_filename) |
|
|
return checker.do_checks() |
|
|
|
|
|
|
|
|
def check_project_yaml(paths): |
|
|
"""Calls _check_one_project_yaml on each path in |paths|. Returns True if the |
|
|
result of every call is True.""" |
|
|
return all([_check_one_project_yaml(path) for path in paths]) |
|
|
|
|
|
|
|
|
def _check_one_seed_corpus(path): |
|
|
"""Returns False and prints error if |path| is a seed corpus.""" |
|
|
if os.path.basename(os.path.dirname(os.path.dirname(path))) != 'projects': |
|
|
return True |
|
|
|
|
|
if os.path.splitext(path)[1] == '.zip': |
|
|
print('Don\'t commit seed corpora into the ClusterFuzz repo,' |
|
|
'they bloat it forever.') |
|
|
return False |
|
|
|
|
|
return True |
|
|
|
|
|
|
|
|
def check_seed_corpus(paths): |
|
|
"""Calls _check_one_seed_corpus on each path in |paths|. Returns True if the |
|
|
result of every call is True.""" |
|
|
return all([_check_one_seed_corpus(path) for path in paths]) |
|
|
|
|
|
|
|
|
def _check_one_apt_update(path): |
|
|
"""Checks that a Dockerfile uses apt-update before apt-install""" |
|
|
if os.path.basename(os.path.dirname(os.path.dirname(path))) != 'projects': |
|
|
return True |
|
|
|
|
|
if os.path.basename(path) != 'Dockerfile': |
|
|
return True |
|
|
|
|
|
with open(path, 'r') as file: |
|
|
dockerfile = file.read() |
|
|
if 'RUN apt install' in dockerfile or 'RUN apt-get install' in dockerfile: |
|
|
print('Please add an "apt-get update" before "apt-get install". ' |
|
|
'Otherwise, a cached and outdated RUN layer may lead to install ' |
|
|
'failures in file %s.' % str(path)) |
|
|
return False |
|
|
|
|
|
return True |
|
|
|
|
|
|
|
|
def check_apt_update(paths): |
|
|
"""Checks that all Dockerfile use apt-update before apt-install""" |
|
|
return all([_check_one_apt_update(path) for path in paths]) |
|
|
|
|
|
|
|
|
def do_checks(changed_files): |
|
|
"""Runs all presubmit checks. Returns False if any fails.""" |
|
|
checks = [ |
|
|
check_license, |
|
|
yapf, |
|
|
check_project_yaml, |
|
|
check_lib_fuzzing_engine, |
|
|
check_seed_corpus, |
|
|
check_apt_update, |
|
|
] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return all([check(changed_files) for check in checks]) |
|
|
|
|
|
|
|
|
_CHECK_LICENSE_FILENAMES = ['Dockerfile'] |
|
|
_CHECK_LICENSE_EXTENSIONS = [ |
|
|
'.bash', |
|
|
'.c', |
|
|
'.cc', |
|
|
'.cpp', |
|
|
'.css', |
|
|
'.Dockerfile', |
|
|
'.go', |
|
|
'.h', |
|
|
'.htm', |
|
|
'.html', |
|
|
'.java', |
|
|
'.js', |
|
|
'.proto', |
|
|
'.py', |
|
|
'.rs', |
|
|
'.sh', |
|
|
'.ts', |
|
|
] |
|
|
THIRD_PARTY_DIR_NAME = 'third_party' |
|
|
|
|
|
_LICENSE_STRING = 'http://www.apache.org/licenses/LICENSE-2.0' |
|
|
|
|
|
|
|
|
def check_license(paths): |
|
|
"""Validates license header.""" |
|
|
if not paths: |
|
|
return True |
|
|
|
|
|
success = True |
|
|
for path in paths: |
|
|
path_parts = str(path).split(os.sep) |
|
|
if any(path_part == THIRD_PARTY_DIR_NAME for path_part in path_parts): |
|
|
continue |
|
|
filename = os.path.basename(path) |
|
|
extension = os.path.splitext(path)[1] |
|
|
if (filename not in _CHECK_LICENSE_FILENAMES and |
|
|
extension not in _CHECK_LICENSE_EXTENSIONS): |
|
|
continue |
|
|
|
|
|
with open(path) as file_handle: |
|
|
if _LICENSE_STRING not in file_handle.read(): |
|
|
print('Missing license header in file %s.' % str(path)) |
|
|
success = False |
|
|
|
|
|
return success |
|
|
|
|
|
|
|
|
def bool_to_returncode(success): |
|
|
"""Returns 0 if |success|. Otherwise returns 1.""" |
|
|
if success: |
|
|
print('Success.') |
|
|
return 0 |
|
|
|
|
|
print('Failed.') |
|
|
return 1 |
|
|
|
|
|
|
|
|
def is_nonfuzzer_python(path): |
|
|
"""Returns True if |path| ends in .py.""" |
|
|
return os.path.splitext(path)[1] == '.py' and '/projects/' not in path |
|
|
|
|
|
|
|
|
def lint(_=None): |
|
|
"""Runs python's linter on infra. Returns False if it fails linting.""" |
|
|
|
|
|
|
|
|
command = ['python3', '-m', 'pylint', '--score', 'no', '-j', '0', 'infra'] |
|
|
returncode = subprocess.run(command, check=False).returncode |
|
|
return returncode == 0 |
|
|
|
|
|
|
|
|
def yapf(paths, validate=True): |
|
|
"""Does yapf on |path| if it is Python file. Only validates format if |
|
|
|validate|. Otherwise, formats the file. Returns False if validation or |
|
|
formatting fails.""" |
|
|
paths = [path for path in paths if is_nonfuzzer_python(path)] |
|
|
if not paths: |
|
|
return True |
|
|
|
|
|
validate_argument = '-d' if validate else '-i' |
|
|
command = ['yapf', validate_argument, '-p'] |
|
|
command.extend(paths) |
|
|
|
|
|
returncode = subprocess.run(command, check=False).returncode |
|
|
return returncode == 0 |
|
|
|
|
|
|
|
|
def get_changed_files(): |
|
|
"""Returns a list of absolute paths of files changed in this git branch.""" |
|
|
branch_commit_hash = subprocess.check_output( |
|
|
['git', 'merge-base', 'HEAD', 'origin/HEAD']).strip().decode() |
|
|
|
|
|
diff_commands = [ |
|
|
|
|
|
['git', 'diff', '--name-only', branch_commit_hash + '..'], |
|
|
|
|
|
['git', 'diff', '--name-only'] |
|
|
] |
|
|
|
|
|
changed_files = set() |
|
|
for command in diff_commands: |
|
|
file_paths = subprocess.check_output(command).decode().splitlines() |
|
|
for file_path in file_paths: |
|
|
if not os.path.isfile(file_path): |
|
|
continue |
|
|
changed_files.add(file_path) |
|
|
print(f'Changed files: {" ".join(changed_files)}') |
|
|
return [os.path.abspath(f) for f in changed_files] |
|
|
|
|
|
|
|
|
def run_build_tests(): |
|
|
"""Runs build tests because they can't be run in parallel.""" |
|
|
suite_list = [ |
|
|
unittest.TestLoader().discover(os.path.join(_SRC_ROOT, 'infra', 'build'), |
|
|
pattern='*_test.py'), |
|
|
] |
|
|
suite = unittest.TestSuite(suite_list) |
|
|
print('Running build tests.') |
|
|
result = unittest.TextTestRunner().run(suite) |
|
|
return not result.failures and not result.errors |
|
|
|
|
|
|
|
|
def run_nonbuild_tests(parallel): |
|
|
"""Runs all tests but build tests. Does them in parallel if |parallel|. The |
|
|
reason why we exclude build tests is because they use an emulator that |
|
|
prevents them from being used in parallel.""" |
|
|
|
|
|
|
|
|
relevant_dirs = set() |
|
|
all_files = get_all_files() |
|
|
for file_path in all_files: |
|
|
directory = os.path.dirname(file_path) |
|
|
relevant_dirs.add(directory) |
|
|
|
|
|
|
|
|
|
|
|
command = [ |
|
|
'pytest', |
|
|
'--ignore-glob=infra/build/*', |
|
|
'--ignore-glob=projects/*', |
|
|
] |
|
|
if parallel: |
|
|
command.extend(['-n', 'auto']) |
|
|
command += list(relevant_dirs) |
|
|
print('Running non-build tests.') |
|
|
|
|
|
|
|
|
env = os.environ.copy() |
|
|
env['CIFUZZ_TEST'] = '1' |
|
|
|
|
|
return subprocess.run(command, check=False, env=env).returncode == 0 |
|
|
|
|
|
|
|
|
def run_tests(_=None, parallel=False, build_tests=True, nonbuild_tests=True): |
|
|
"""Runs all unit tests.""" |
|
|
build_success = True |
|
|
nonbuild_success = True |
|
|
if nonbuild_tests: |
|
|
nonbuild_success = run_nonbuild_tests(parallel) |
|
|
else: |
|
|
print('Skipping nonbuild tests as specified.') |
|
|
|
|
|
if build_tests: |
|
|
build_success = run_build_tests() |
|
|
else: |
|
|
print('Skipping build tests as specified.') |
|
|
|
|
|
return nonbuild_success and build_success |
|
|
|
|
|
|
|
|
def run_systemsan_tests(_=None): |
|
|
"""Runs SystemSan unit tests.""" |
|
|
command = ['make', 'test'] |
|
|
return subprocess.run(command, |
|
|
cwd='infra/experimental/SystemSan', |
|
|
check=False).returncode == 0 |
|
|
|
|
|
|
|
|
def get_all_files(): |
|
|
"""Returns a list of absolute paths of files in this repo.""" |
|
|
get_all_files_command = ['git', 'ls-files'] |
|
|
output = subprocess.check_output(get_all_files_command).decode().splitlines() |
|
|
return [os.path.abspath(path) for path in output if os.path.isfile(path)] |
|
|
|
|
|
|
|
|
def main(): |
|
|
"""Check changes on a branch for common issues before submitting.""" |
|
|
|
|
|
parser = argparse.ArgumentParser(description='Presubmit script for oss-fuzz.') |
|
|
parser.add_argument( |
|
|
'command', |
|
|
choices=['format', 'lint', 'license', 'infra-tests', 'systemsan-tests'], |
|
|
nargs='?') |
|
|
parser.add_argument('-a', |
|
|
'--all-files', |
|
|
action='store_true', |
|
|
help='Run presubmit check(s) on all files', |
|
|
default=False) |
|
|
parser.add_argument('-p', |
|
|
'--parallel', |
|
|
action='store_true', |
|
|
help='Run tests in parallel.', |
|
|
default=False) |
|
|
parser.add_argument('-s', |
|
|
'--skip-build-tests', |
|
|
action='store_true', |
|
|
help='Skip build tests which are slow and must run ' |
|
|
'sequentially.', |
|
|
default=False) |
|
|
parser.add_argument('-n', |
|
|
'--skip-nonbuild-tests', |
|
|
action='store_true', |
|
|
help='Only do build tests.', |
|
|
default=False) |
|
|
args = parser.parse_args() |
|
|
|
|
|
if args.all_files: |
|
|
relevant_files = get_all_files() |
|
|
else: |
|
|
relevant_files = get_changed_files() |
|
|
|
|
|
os.chdir(_SRC_ROOT) |
|
|
|
|
|
|
|
|
if args.command == 'format': |
|
|
success = yapf(relevant_files, False) |
|
|
return bool_to_returncode(success) |
|
|
|
|
|
if args.command == 'lint': |
|
|
success = lint() |
|
|
return bool_to_returncode(success) |
|
|
|
|
|
if args.command == 'license': |
|
|
success = check_license(relevant_files) |
|
|
return bool_to_returncode(success) |
|
|
|
|
|
if args.command == 'infra-tests': |
|
|
success = run_tests(relevant_files, |
|
|
parallel=args.parallel, |
|
|
build_tests=(not args.skip_build_tests), |
|
|
nonbuild_tests=(not args.skip_nonbuild_tests)) |
|
|
return bool_to_returncode(success) |
|
|
|
|
|
if args.command == 'systemsan-tests': |
|
|
success = run_systemsan_tests(relevant_files) |
|
|
return bool_to_returncode(success) |
|
|
|
|
|
|
|
|
success = do_checks(relevant_files) |
|
|
|
|
|
return bool_to_returncode(success) |
|
|
|
|
|
|
|
|
if __name__ == '__main__': |
|
|
sys.exit(main()) |
|
|
|