repo_name
stringlengths
5
100
path
stringlengths
4
294
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
FederatedAI/FATE
python/federatedml/feature/feature_selection/test/manually_filter_test.py
1
3518
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import uuid import numpy as np from fate_arch.session import computing_session as session from federatedml.feature.feature_selection.filter_factory import get_filter from federatedml.feature.feature_selection.selection_properties import SelectionProperties from federatedml.param.feature_selection_param import FeatureSelectionParam from federatedml.util import consts class TestManuallyFilter(unittest.TestCase): def setUp(self): self.job_id = str(uuid.uuid1()) session.init(self.job_id) def gen_data(self, data_num, feature_num, partition): data = [] header = [str(i) for i in range(feature_num)] col_data = [] for _ in range(feature_num): col_1 = np.random.randn(data_num) col_data.append(col_1) for key in range(data_num): data.append((key, np.array([col[key] for col in col_data]))) result = session.parallelize(data, include_key=True, partition=partition) result.schema = {'header': header} self.header = header return result def test_filter_logic(self): data_table = self.gen_data(1000, 10, 48) select_param = FeatureSelectionParam() select_param.manually_param.filter_out_indexes = [9, 8, 7] select_param.manually_param.filter_out_names = ['6', '5', '4'] filter_obj = get_filter(consts.MANUALLY_FILTER, select_param) select_properties = SelectionProperties() select_properties.set_header(self.header) select_properties.set_last_left_col_indexes([x for x in range(len(self.header))]) select_properties.set_select_all_cols() filter_obj.set_selection_properties(select_properties) res_select_properties = filter_obj.fit(data_table, suffix='').selection_properties result = ['0', '1', '2', '3'] self.assertEqual(res_select_properties.all_left_col_names, result) def test_left_logic(self): data_table = self.gen_data(1000, 10, 48) select_param = FeatureSelectionParam() select_param.manually_param.left_col_indexes = [0, 1] select_param.manually_param.left_col_names = ['3', '2'] filter_obj = get_filter(consts.MANUALLY_FILTER, select_param) select_properties = SelectionProperties() select_properties.set_header(self.header) select_properties.set_last_left_col_indexes([x for x in range(len(self.header))]) select_properties.set_select_all_cols() filter_obj.set_selection_properties(select_properties) res_select_properties = filter_obj.fit(data_table, suffix='').selection_properties result = ['0', '1', '2', '3'] self.assertEqual(res_select_properties.all_left_col_names, result) def tearDown(self): session.stop() if __name__ == '__main__': unittest.main()
apache-2.0
damdam-s/hr
__unported__/hr_worked_days_hourly_rate/hr_payslip_worked_days.py
27
2555
# -*- coding:utf-8 -*- ############################################################################## # # Copyright (C) 2014 Savoir-faire Linux. All Rights Reserved. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, orm from datetime import datetime from openerp.tools import DEFAULT_SERVER_DATE_FORMAT class hr_payslip_worked_days(orm.Model): _inherit = 'hr.payslip.worked_days' def _get_total( self, cr, uid, ids, field_name, arg=None, context=None ): res = {} for wd in self.browse(cr, uid, ids, context=context): res[wd.id] = wd.number_of_hours \ * wd.hourly_rate * wd.rate / 100 return res _columns = { 'hourly_rate': fields.float( 'Hourly Rate', help="""\ The employee's standard hourly rate for one hour of work. Example, 25 Euros per hour.""" ), 'rate': fields.float( 'Rate (%)', help="""\ The rate by which to multiply the standard hourly rate. Example, an overtime hour could be paid the standard rate multiplied by 150%. """ ), # When a worked day has a number of hours and an hourly rate, # it is necessary to have a date interval, # because hourly rates are likely to change over the time. 'date_from': fields.date('Date From'), 'date_to': fields.date('Date To'), 'total': fields.function( _get_total, method=True, type="float", string="Total", ), } _defaults = { 'hourly_rate': 0, 'rate': 100, 'date_from': lambda *a: datetime.now().strftime( DEFAULT_SERVER_DATE_FORMAT), 'date_to': lambda *a: datetime.now().strftime( DEFAULT_SERVER_DATE_FORMAT) }
agpl-3.0
Nicop06/ansible
test/units/mock/procenv.py
141
2636
# (c) 2016, Matt Davis <mdavis@ansible.com> # (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import sys import json from contextlib import contextmanager from io import BytesIO, StringIO from ansible.compat.tests import unittest from ansible.module_utils.six import PY3 from ansible.module_utils._text import to_bytes @contextmanager def swap_stdin_and_argv(stdin_data='', argv_data=tuple()): """ context manager that temporarily masks the test runner's values for stdin and argv """ real_stdin = sys.stdin real_argv = sys.argv if PY3: fake_stream = StringIO(stdin_data) fake_stream.buffer = BytesIO(to_bytes(stdin_data)) else: fake_stream = BytesIO(to_bytes(stdin_data)) try: sys.stdin = fake_stream sys.argv = argv_data yield finally: sys.stdin = real_stdin sys.argv = real_argv @contextmanager def swap_stdout(): """ context manager that temporarily replaces stdout for tests that need to verify output """ old_stdout = sys.stdout if PY3: fake_stream = StringIO() else: fake_stream = BytesIO() try: sys.stdout = fake_stream yield fake_stream finally: sys.stdout = old_stdout class ModuleTestCase(unittest.TestCase): def setUp(self, module_args=None): if module_args is None: module_args = {} args = json.dumps(dict(ANSIBLE_MODULE_ARGS=module_args)) # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually self.stdin_swap = swap_stdin_and_argv(stdin_data=args) self.stdin_swap.__enter__() def tearDown(self): # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually self.stdin_swap.__exit__(None, None, None)
gpl-3.0
johngian/mozillians
mozillians/groups/tests/__init__.py
4
1081
import factory from mozillians.groups.models import Group, GroupAlias, Invite, Skill, SkillAlias class GroupFactory(factory.DjangoModelFactory): name = factory.Sequence(lambda n: 'Group {0}'.format(n)) class Meta: model = Group @factory.post_generation def curators(self, create, extracted, **kwargs): if not create: return if extracted: for curator in extracted: self.curators.add(curator) class SkillFactory(factory.DjangoModelFactory): name = factory.Sequence(lambda n: 'Skill {0}'.format(n)) class Meta: model = Skill class GroupAliasFactory(factory.DjangoModelFactory): url = factory.Sequence(lambda n: 'alias-{0}'.format(n)) class Meta: model = GroupAlias class SkillAliasFactory(factory.DjangoModelFactory): url = factory.Sequence(lambda n: 'alias-{0}'.format(n)) class Meta: model = SkillAlias class InviteFactory(factory.DjangoModelFactory): group = factory.SubFactory(GroupFactory) class Meta: model = Invite
bsd-3-clause
Linux-Box/lbgui
lib/python/Components/Converter/ServiceOrbitalPosition.py
102
1768
# -*- coding: utf-8 -*- from Components.Converter.Converter import Converter from enigma import iServiceInformation, iPlayableService, iPlayableServicePtr, eServiceCenter from ServiceReference import resolveAlternate from Components.Element import cached class ServiceOrbitalPosition(Converter, object): FULL = 0 SHORT = 1 def __init__(self, type): Converter.__init__(self, type) if type == "Short": self.type = self.SHORT else: self.type = self.FULL @cached def getText(self): service = self.source.service if isinstance(service, iPlayableServicePtr): info = service and service.info() ref = None else: # reference info = service and self.source.info ref = service if not info: return "" if ref: nref = resolveAlternate(ref) if nref: ref = nref info = eServiceCenter.getInstance().info(ref) transponder_info = info.getInfoObject(ref, iServiceInformation.sTransponderData) else: transponder_info = info.getInfoObject(iServiceInformation.sTransponderData) if transponder_info: tunerType = transponder_info["tuner_type"] if tunerType == "DVB-S": pos = int(transponder_info["orbital_position"]) direction = 'E' if pos > 1800: pos = 3600 - pos direction = 'W' if self.type == self.SHORT: return "%d.%d%s" % (pos/10, pos%10, direction) else: return "%d.%d° %s" % (pos/10, pos%10, direction) return tunerType if ref: refString = ref.toString().lower() if "%3a//" in refString: return _("Stream") if refString.startswith("1:134:"): return _("Alternative") return "" text = property(getText) def changed(self, what): if what[0] != self.CHANGED_SPECIFIC or what[1] in [iPlayableService.evStart]: Converter.changed(self, what)
gpl-2.0
mensler/ansible
contrib/inventory/openshift.py
196
3274
#!/usr/bin/env python # (c) 2013, Michael Scherer <misc@zarb.org> # # This file is part of Ansible, # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- inventory: openshift short_description: Openshift gears external inventory script description: - Generates inventory of Openshift gears using the REST interface - this permit to reuse playbook to setup an Openshift gear version_added: None author: Michael Scherer ''' try: import json except ImportError: import simplejson as json import os import os.path import sys import ConfigParser import StringIO from ansible.module_utils.urls import open_url configparser = None def get_from_rhc_config(variable): global configparser CONF_FILE = os.path.expanduser('~/.openshift/express.conf') if os.path.exists(CONF_FILE): if not configparser: ini_str = '[root]\n' + open(CONF_FILE, 'r').read() configparser = ConfigParser.SafeConfigParser() configparser.readfp(StringIO.StringIO(ini_str)) try: return configparser.get('root', variable) except ConfigParser.NoOptionError: return None def get_config(env_var, config_var): result = os.getenv(env_var) if not result: result = get_from_rhc_config(config_var) if not result: sys.exit("failed=True msg='missing %s'" % env_var) return result def get_json_from_api(url, username, password): headers = {'Accept': 'application/json; version=1.5'} response = open_url(url, headers=headers, url_username=username, url_password=password) return json.loads(response.read())['data'] username = get_config('ANSIBLE_OPENSHIFT_USERNAME', 'default_rhlogin') password = get_config('ANSIBLE_OPENSHIFT_PASSWORD', 'password') broker_url = 'https://%s/broker/rest/' % get_config('ANSIBLE_OPENSHIFT_BROKER', 'libra_server') response = get_json_from_api(broker_url + '/domains', username, password) response = get_json_from_api("%s/domains/%s/applications" % (broker_url, response[0]['id']), username, password) result = {} for app in response: # ssh://520311404832ce3e570000ff@blog-johndoe.example.org (user, host) = app['ssh_url'][6:].split('@') app_name = host.split('-')[0] result[app_name] = {} result[app_name]['hosts'] = [] result[app_name]['hosts'].append(host) result[app_name]['vars'] = {} result[app_name]['vars']['ansible_ssh_user'] = user if len(sys.argv) == 2 and sys.argv[1] == '--list': print(json.dumps(result)) elif len(sys.argv) == 3 and sys.argv[1] == '--host': print(json.dumps({})) else: print("Need an argument, either --list or --host <host>")
gpl-3.0
ganyuling/nvm
npm/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
1284
100329
# Copyright (c) 2013 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import collections import copy import hashlib import json import multiprocessing import os.path import re import signal import subprocess import sys import gyp import gyp.common from gyp.common import OrderedSet import gyp.msvs_emulation import gyp.MSVSUtil as MSVSUtil import gyp.xcode_emulation from cStringIO import StringIO from gyp.common import GetEnvironFallback import gyp.ninja_syntax as ninja_syntax generator_default_variables = { 'EXECUTABLE_PREFIX': '', 'EXECUTABLE_SUFFIX': '', 'STATIC_LIB_PREFIX': 'lib', 'STATIC_LIB_SUFFIX': '.a', 'SHARED_LIB_PREFIX': 'lib', # Gyp expects the following variables to be expandable by the build # system to the appropriate locations. Ninja prefers paths to be # known at gyp time. To resolve this, introduce special # variables starting with $! and $| (which begin with a $ so gyp knows it # should be treated specially, but is otherwise an invalid # ninja/shell variable) that are passed to gyp here but expanded # before writing out into the target .ninja files; see # ExpandSpecial. # $! is used for variables that represent a path and that can only appear at # the start of a string, while $| is used for variables that can appear # anywhere in a string. 'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen', 'PRODUCT_DIR': '$!PRODUCT_DIR', 'CONFIGURATION_NAME': '$|CONFIGURATION_NAME', # Special variables that may be used by gyp 'rule' targets. # We generate definitions for these variables on the fly when processing a # rule. 'RULE_INPUT_ROOT': '${root}', 'RULE_INPUT_DIRNAME': '${dirname}', 'RULE_INPUT_PATH': '${source}', 'RULE_INPUT_EXT': '${ext}', 'RULE_INPUT_NAME': '${name}', } # Placates pylint. generator_additional_non_configuration_keys = [] generator_additional_path_sections = [] generator_extra_sources_for_rules = [] generator_filelist_paths = None generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested() def StripPrefix(arg, prefix): if arg.startswith(prefix): return arg[len(prefix):] return arg def QuoteShellArgument(arg, flavor): """Quote a string such that it will be interpreted as a single argument by the shell.""" # Rather than attempting to enumerate the bad shell characters, just # whitelist common OK ones and quote anything else. if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg): return arg # No quoting necessary. if flavor == 'win': return gyp.msvs_emulation.QuoteForRspFile(arg) return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'" def Define(d, flavor): """Takes a preprocessor define and returns a -D parameter that's ninja- and shell-escaped.""" if flavor == 'win': # cl.exe replaces literal # characters with = in preprocesor definitions for # some reason. Octal-encode to work around that. d = d.replace('#', '\\%03o' % ord('#')) return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor) def AddArch(output, arch): """Adds an arch string to an output path.""" output, extension = os.path.splitext(output) return '%s.%s%s' % (output, arch, extension) class Target(object): """Target represents the paths used within a single gyp target. Conceptually, building a single target A is a series of steps: 1) actions/rules/copies generates source/resources/etc. 2) compiles generates .o files 3) link generates a binary (library/executable) 4) bundle merges the above in a mac bundle (Any of these steps can be optional.) From a build ordering perspective, a dependent target B could just depend on the last output of this series of steps. But some dependent commands sometimes need to reach inside the box. For example, when linking B it needs to get the path to the static library generated by A. This object stores those paths. To keep things simple, member variables only store concrete paths to single files, while methods compute derived values like "the last output of the target". """ def __init__(self, type): # Gyp type ("static_library", etc.) of this target. self.type = type # File representing whether any input dependencies necessary for # dependent actions have completed. self.preaction_stamp = None # File representing whether any input dependencies necessary for # dependent compiles have completed. self.precompile_stamp = None # File representing the completion of actions/rules/copies, if any. self.actions_stamp = None # Path to the output of the link step, if any. self.binary = None # Path to the file representing the completion of building the bundle, # if any. self.bundle = None # On Windows, incremental linking requires linking against all the .objs # that compose a .lib (rather than the .lib itself). That list is stored # here. In this case, we also need to save the compile_deps for the target, # so that the the target that directly depends on the .objs can also depend # on those. self.component_objs = None self.compile_deps = None # Windows only. The import .lib is the output of a build step, but # because dependents only link against the lib (not both the lib and the # dll) we keep track of the import library here. self.import_lib = None def Linkable(self): """Return true if this is a target that can be linked against.""" return self.type in ('static_library', 'shared_library') def UsesToc(self, flavor): """Return true if the target should produce a restat rule based on a TOC file.""" # For bundles, the .TOC should be produced for the binary, not for # FinalOutput(). But the naive approach would put the TOC file into the # bundle, so don't do this for bundles for now. if flavor == 'win' or self.bundle: return False return self.type in ('shared_library', 'loadable_module') def PreActionInput(self, flavor): """Return the path, if any, that should be used as a dependency of any dependent action step.""" if self.UsesToc(flavor): return self.FinalOutput() + '.TOC' return self.FinalOutput() or self.preaction_stamp def PreCompileInput(self): """Return the path, if any, that should be used as a dependency of any dependent compile step.""" return self.actions_stamp or self.precompile_stamp def FinalOutput(self): """Return the last output of the target, which depends on all prior steps.""" return self.bundle or self.binary or self.actions_stamp # A small discourse on paths as used within the Ninja build: # All files we produce (both at gyp and at build time) appear in the # build directory (e.g. out/Debug). # # Paths within a given .gyp file are always relative to the directory # containing the .gyp file. Call these "gyp paths". This includes # sources as well as the starting directory a given gyp rule/action # expects to be run from. We call the path from the source root to # the gyp file the "base directory" within the per-.gyp-file # NinjaWriter code. # # All paths as written into the .ninja files are relative to the build # directory. Call these paths "ninja paths". # # We translate between these two notions of paths with two helper # functions: # # - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file) # into the equivalent ninja path. # # - GypPathToUniqueOutput translates a gyp path into a ninja path to write # an output file; the result can be namespaced such that it is unique # to the input file name as well as the output target name. class NinjaWriter(object): def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir, output_file, toplevel_build, output_file_name, flavor, toplevel_dir=None): """ base_dir: path from source root to directory containing this gyp file, by gyp semantics, all input paths are relative to this build_dir: path from source root to build output toplevel_dir: path to the toplevel directory """ self.hash_for_rules = hash_for_rules self.target_outputs = target_outputs self.base_dir = base_dir self.build_dir = build_dir self.ninja = ninja_syntax.Writer(output_file) self.toplevel_build = toplevel_build self.output_file_name = output_file_name self.flavor = flavor self.abs_build_dir = None if toplevel_dir is not None: self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, build_dir)) self.obj_ext = '.obj' if flavor == 'win' else '.o' if flavor == 'win': # See docstring of msvs_emulation.GenerateEnvironmentFiles(). self.win_env = {} for arch in ('x86', 'x64'): self.win_env[arch] = 'environment.' + arch # Relative path from build output dir to base dir. build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir) self.build_to_base = os.path.join(build_to_top, base_dir) # Relative path from base dir to build dir. base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir) self.base_to_build = os.path.join(base_to_top, build_dir) def ExpandSpecial(self, path, product_dir=None): """Expand specials like $!PRODUCT_DIR in |path|. If |product_dir| is None, assumes the cwd is already the product dir. Otherwise, |product_dir| is the relative path to the product dir. """ PRODUCT_DIR = '$!PRODUCT_DIR' if PRODUCT_DIR in path: if product_dir: path = path.replace(PRODUCT_DIR, product_dir) else: path = path.replace(PRODUCT_DIR + '/', '') path = path.replace(PRODUCT_DIR + '\\', '') path = path.replace(PRODUCT_DIR, '.') INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR' if INTERMEDIATE_DIR in path: int_dir = self.GypPathToUniqueOutput('gen') # GypPathToUniqueOutput generates a path relative to the product dir, # so insert product_dir in front if it is provided. path = path.replace(INTERMEDIATE_DIR, os.path.join(product_dir or '', int_dir)) CONFIGURATION_NAME = '$|CONFIGURATION_NAME' path = path.replace(CONFIGURATION_NAME, self.config_name) return path def ExpandRuleVariables(self, path, root, dirname, source, ext, name): if self.flavor == 'win': path = self.msvs_settings.ConvertVSMacros( path, config=self.config_name) path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root) path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'], dirname) path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source) path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext) path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name) return path def GypPathToNinja(self, path, env=None): """Translate a gyp path to a ninja path, optionally expanding environment variable references in |path| with |env|. See the above discourse on path conversions.""" if env: if self.flavor == 'mac': path = gyp.xcode_emulation.ExpandEnvVars(path, env) elif self.flavor == 'win': path = gyp.msvs_emulation.ExpandMacros(path, env) if path.startswith('$!'): expanded = self.ExpandSpecial(path) if self.flavor == 'win': expanded = os.path.normpath(expanded) return expanded if '$|' in path: path = self.ExpandSpecial(path) assert '$' not in path, path return os.path.normpath(os.path.join(self.build_to_base, path)) def GypPathToUniqueOutput(self, path, qualified=True): """Translate a gyp path to a ninja path for writing output. If qualified is True, qualify the resulting filename with the name of the target. This is necessary when e.g. compiling the same path twice for two separate output targets. See the above discourse on path conversions.""" path = self.ExpandSpecial(path) assert not path.startswith('$'), path # Translate the path following this scheme: # Input: foo/bar.gyp, target targ, references baz/out.o # Output: obj/foo/baz/targ.out.o (if qualified) # obj/foo/baz/out.o (otherwise) # (and obj.host instead of obj for cross-compiles) # # Why this scheme and not some other one? # 1) for a given input, you can compute all derived outputs by matching # its path, even if the input is brought via a gyp file with '..'. # 2) simple files like libraries and stamps have a simple filename. obj = 'obj' if self.toolset != 'target': obj += '.' + self.toolset path_dir, path_basename = os.path.split(path) assert not os.path.isabs(path_dir), ( "'%s' can not be absolute path (see crbug.com/462153)." % path_dir) if qualified: path_basename = self.name + '.' + path_basename return os.path.normpath(os.path.join(obj, self.base_dir, path_dir, path_basename)) def WriteCollapsedDependencies(self, name, targets, order_only=None): """Given a list of targets, return a path for a single file representing the result of building all the targets or None. Uses a stamp file if necessary.""" assert targets == filter(None, targets), targets if len(targets) == 0: assert not order_only return None if len(targets) > 1 or order_only: stamp = self.GypPathToUniqueOutput(name + '.stamp') targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only) self.ninja.newline() return targets[0] def _SubninjaNameForArch(self, arch): output_file_base = os.path.splitext(self.output_file_name)[0] return '%s.%s.ninja' % (output_file_base, arch) def WriteSpec(self, spec, config_name, generator_flags): """The main entry point for NinjaWriter: write the build rules for a spec. Returns a Target object, which represents the output paths for this spec. Returns None if there are no outputs (e.g. a settings-only 'none' type target).""" self.config_name = config_name self.name = spec['target_name'] self.toolset = spec['toolset'] config = spec['configurations'][config_name] self.target = Target(spec['type']) self.is_standalone_static_library = bool( spec.get('standalone_static_library', 0)) # Track if this target contains any C++ files, to decide if gcc or g++ # should be used for linking. self.uses_cpp = False self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) self.xcode_settings = self.msvs_settings = None if self.flavor == 'mac': self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) if self.flavor == 'win': self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, generator_flags) arch = self.msvs_settings.GetArch(config_name) self.ninja.variable('arch', self.win_env[arch]) self.ninja.variable('cc', '$cl_' + arch) self.ninja.variable('cxx', '$cl_' + arch) self.ninja.variable('cc_host', '$cl_' + arch) self.ninja.variable('cxx_host', '$cl_' + arch) self.ninja.variable('asm', '$ml_' + arch) if self.flavor == 'mac': self.archs = self.xcode_settings.GetActiveArchs(config_name) if len(self.archs) > 1: self.arch_subninjas = dict( (arch, ninja_syntax.Writer( OpenOutput(os.path.join(self.toplevel_build, self._SubninjaNameForArch(arch)), 'w'))) for arch in self.archs) # Compute predepends for all rules. # actions_depends is the dependencies this target depends on before running # any of its action/rule/copy steps. # compile_depends is the dependencies this target depends on before running # any of its compile steps. actions_depends = [] compile_depends = [] # TODO(evan): it is rather confusing which things are lists and which # are strings. Fix these. if 'dependencies' in spec: for dep in spec['dependencies']: if dep in self.target_outputs: target = self.target_outputs[dep] actions_depends.append(target.PreActionInput(self.flavor)) compile_depends.append(target.PreCompileInput()) actions_depends = filter(None, actions_depends) compile_depends = filter(None, compile_depends) actions_depends = self.WriteCollapsedDependencies('actions_depends', actions_depends) compile_depends = self.WriteCollapsedDependencies('compile_depends', compile_depends) self.target.preaction_stamp = actions_depends self.target.precompile_stamp = compile_depends # Write out actions, rules, and copies. These must happen before we # compile any sources, so compute a list of predependencies for sources # while we do it. extra_sources = [] mac_bundle_depends = [] self.target.actions_stamp = self.WriteActionsRulesCopies( spec, extra_sources, actions_depends, mac_bundle_depends) # If we have actions/rules/copies, we depend directly on those, but # otherwise we depend on dependent target's actions/rules/copies etc. # We never need to explicitly depend on previous target's link steps, # because no compile ever depends on them. compile_depends_stamp = (self.target.actions_stamp or compile_depends) # Write out the compilation steps, if any. link_deps = [] sources = extra_sources + spec.get('sources', []) if sources: if self.flavor == 'mac' and len(self.archs) > 1: # Write subninja file containing compile and link commands scoped to # a single arch if a fat binary is being built. for arch in self.archs: self.ninja.subninja(self._SubninjaNameForArch(arch)) pch = None if self.flavor == 'win': gyp.msvs_emulation.VerifyMissingSources( sources, self.abs_build_dir, generator_flags, self.GypPathToNinja) pch = gyp.msvs_emulation.PrecompiledHeader( self.msvs_settings, config_name, self.GypPathToNinja, self.GypPathToUniqueOutput, self.obj_ext) else: pch = gyp.xcode_emulation.MacPrefixHeader( self.xcode_settings, self.GypPathToNinja, lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang)) link_deps = self.WriteSources( self.ninja, config_name, config, sources, compile_depends_stamp, pch, spec) # Some actions/rules output 'sources' that are already object files. obj_outputs = [f for f in sources if f.endswith(self.obj_ext)] if obj_outputs: if self.flavor != 'mac' or len(self.archs) == 1: link_deps += [self.GypPathToNinja(o) for o in obj_outputs] else: print "Warning: Actions/rules writing object files don't work with " \ "multiarch targets, dropping. (target %s)" % spec['target_name'] elif self.flavor == 'mac' and len(self.archs) > 1: link_deps = collections.defaultdict(list) compile_deps = self.target.actions_stamp or actions_depends if self.flavor == 'win' and self.target.type == 'static_library': self.target.component_objs = link_deps self.target.compile_deps = compile_deps # Write out a link step, if needed. output = None is_empty_bundle = not link_deps and not mac_bundle_depends if link_deps or self.target.actions_stamp or actions_depends: output = self.WriteTarget(spec, config_name, config, link_deps, compile_deps) if self.is_mac_bundle: mac_bundle_depends.append(output) # Bundle all of the above together, if needed. if self.is_mac_bundle: output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle) if not output: return None assert self.target.FinalOutput(), output return self.target def _WinIdlRule(self, source, prebuild, outputs): """Handle the implicit VS .idl rule for one source file. Fills |outputs| with files that are generated.""" outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData( source, self.config_name) outdir = self.GypPathToNinja(outdir) def fix_path(path, rel=None): path = os.path.join(outdir, path) dirname, basename = os.path.split(source) root, ext = os.path.splitext(basename) path = self.ExpandRuleVariables( path, root, dirname, source, ext, basename) if rel: path = os.path.relpath(path, rel) return path vars = [(name, fix_path(value, outdir)) for name, value in vars] output = [fix_path(p) for p in output] vars.append(('outdir', outdir)) vars.append(('idlflags', flags)) input = self.GypPathToNinja(source) self.ninja.build(output, 'idl', input, variables=vars, order_only=prebuild) outputs.extend(output) def WriteWinIdlFiles(self, spec, prebuild): """Writes rules to match MSVS's implicit idl handling.""" assert self.flavor == 'win' if self.msvs_settings.HasExplicitIdlRulesOrActions(spec): return [] outputs = [] for source in filter(lambda x: x.endswith('.idl'), spec['sources']): self._WinIdlRule(source, prebuild, outputs) return outputs def WriteActionsRulesCopies(self, spec, extra_sources, prebuild, mac_bundle_depends): """Write out the Actions, Rules, and Copies steps. Return a path representing the outputs of these steps.""" outputs = [] if self.is_mac_bundle: mac_bundle_resources = spec.get('mac_bundle_resources', [])[:] else: mac_bundle_resources = [] extra_mac_bundle_resources = [] if 'actions' in spec: outputs += self.WriteActions(spec['actions'], extra_sources, prebuild, extra_mac_bundle_resources) if 'rules' in spec: outputs += self.WriteRules(spec['rules'], extra_sources, prebuild, mac_bundle_resources, extra_mac_bundle_resources) if 'copies' in spec: outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends) if 'sources' in spec and self.flavor == 'win': outputs += self.WriteWinIdlFiles(spec, prebuild) stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs) if self.is_mac_bundle: xcassets = self.WriteMacBundleResources( extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends) partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends) self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends) return stamp def GenerateDescription(self, verb, message, fallback): """Generate and return a description of a build step. |verb| is the short summary, e.g. ACTION or RULE. |message| is a hand-written description, or None if not available. |fallback| is the gyp-level name of the step, usable as a fallback. """ if self.toolset != 'target': verb += '(%s)' % self.toolset if message: return '%s %s' % (verb, self.ExpandSpecial(message)) else: return '%s %s: %s' % (verb, self.name, fallback) def WriteActions(self, actions, extra_sources, prebuild, extra_mac_bundle_resources): # Actions cd into the base directory. env = self.GetToolchainEnv() all_outputs = [] for action in actions: # First write out a rule for the action. name = '%s_%s' % (action['action_name'], self.hash_for_rules) description = self.GenerateDescription('ACTION', action.get('message', None), name) is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action) if self.flavor == 'win' else False) args = action['action'] depfile = action.get('depfile', None) if depfile: depfile = self.ExpandSpecial(depfile, self.base_to_build) pool = 'console' if int(action.get('ninja_use_console', 0)) else None rule_name, _ = self.WriteNewNinjaRule(name, args, description, is_cygwin, env, pool, depfile=depfile) inputs = [self.GypPathToNinja(i, env) for i in action['inputs']] if int(action.get('process_outputs_as_sources', False)): extra_sources += action['outputs'] if int(action.get('process_outputs_as_mac_bundle_resources', False)): extra_mac_bundle_resources += action['outputs'] outputs = [self.GypPathToNinja(o, env) for o in action['outputs']] # Then write out an edge using the rule. self.ninja.build(outputs, rule_name, inputs, order_only=prebuild) all_outputs += outputs self.ninja.newline() return all_outputs def WriteRules(self, rules, extra_sources, prebuild, mac_bundle_resources, extra_mac_bundle_resources): env = self.GetToolchainEnv() all_outputs = [] for rule in rules: # Skip a rule with no action and no inputs. if 'action' not in rule and not rule.get('rule_sources', []): continue # First write out a rule for the rule action. name = '%s_%s' % (rule['rule_name'], self.hash_for_rules) args = rule['action'] description = self.GenerateDescription( 'RULE', rule.get('message', None), ('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name) is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule) if self.flavor == 'win' else False) pool = 'console' if int(rule.get('ninja_use_console', 0)) else None rule_name, args = self.WriteNewNinjaRule( name, args, description, is_cygwin, env, pool) # TODO: if the command references the outputs directly, we should # simplify it to just use $out. # Rules can potentially make use of some special variables which # must vary per source file. # Compute the list of variables we'll need to provide. special_locals = ('source', 'root', 'dirname', 'ext', 'name') needed_variables = set(['source']) for argument in args: for var in special_locals: if '${%s}' % var in argument: needed_variables.add(var) def cygwin_munge(path): # pylint: disable=cell-var-from-loop if is_cygwin: return path.replace('\\', '/') return path inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])] # If there are n source files matching the rule, and m additional rule # inputs, then adding 'inputs' to each build edge written below will # write m * n inputs. Collapsing reduces this to m + n. sources = rule.get('rule_sources', []) num_inputs = len(inputs) if prebuild: num_inputs += 1 if num_inputs > 2 and len(sources) > 2: inputs = [self.WriteCollapsedDependencies( rule['rule_name'], inputs, order_only=prebuild)] prebuild = [] # For each source file, write an edge that generates all the outputs. for source in sources: source = os.path.normpath(source) dirname, basename = os.path.split(source) root, ext = os.path.splitext(basename) # Gather the list of inputs and outputs, expanding $vars if possible. outputs = [self.ExpandRuleVariables(o, root, dirname, source, ext, basename) for o in rule['outputs']] if int(rule.get('process_outputs_as_sources', False)): extra_sources += outputs was_mac_bundle_resource = source in mac_bundle_resources if was_mac_bundle_resource or \ int(rule.get('process_outputs_as_mac_bundle_resources', False)): extra_mac_bundle_resources += outputs # Note: This is n_resources * n_outputs_in_rule. Put to-be-removed # items in a set and remove them all in a single pass if this becomes # a performance issue. if was_mac_bundle_resource: mac_bundle_resources.remove(source) extra_bindings = [] for var in needed_variables: if var == 'root': extra_bindings.append(('root', cygwin_munge(root))) elif var == 'dirname': # '$dirname' is a parameter to the rule action, which means # it shouldn't be converted to a Ninja path. But we don't # want $!PRODUCT_DIR in there either. dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build) extra_bindings.append(('dirname', cygwin_munge(dirname_expanded))) elif var == 'source': # '$source' is a parameter to the rule action, which means # it shouldn't be converted to a Ninja path. But we don't # want $!PRODUCT_DIR in there either. source_expanded = self.ExpandSpecial(source, self.base_to_build) extra_bindings.append(('source', cygwin_munge(source_expanded))) elif var == 'ext': extra_bindings.append(('ext', ext)) elif var == 'name': extra_bindings.append(('name', cygwin_munge(basename))) else: assert var == None, repr(var) outputs = [self.GypPathToNinja(o, env) for o in outputs] if self.flavor == 'win': # WriteNewNinjaRule uses unique_name for creating an rsp file on win. extra_bindings.append(('unique_name', hashlib.md5(outputs[0]).hexdigest())) self.ninja.build(outputs, rule_name, self.GypPathToNinja(source), implicit=inputs, order_only=prebuild, variables=extra_bindings) all_outputs.extend(outputs) return all_outputs def WriteCopies(self, copies, prebuild, mac_bundle_depends): outputs = [] env = self.GetToolchainEnv() for copy in copies: for path in copy['files']: # Normalize the path so trailing slashes don't confuse us. path = os.path.normpath(path) basename = os.path.split(path)[1] src = self.GypPathToNinja(path, env) dst = self.GypPathToNinja(os.path.join(copy['destination'], basename), env) outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild) if self.is_mac_bundle: # gyp has mac_bundle_resources to copy things into a bundle's # Resources folder, but there's no built-in way to copy files to other # places in the bundle. Hence, some targets use copies for this. Check # if this file is copied into the current bundle, and if so add it to # the bundle depends so that dependent targets get rebuilt if the copy # input changes. if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()): mac_bundle_depends.append(dst) return outputs def WriteMacBundleResources(self, resources, bundle_depends): """Writes ninja edges for 'mac_bundle_resources'.""" xcassets = [] for output, res in gyp.xcode_emulation.GetMacBundleResources( generator_default_variables['PRODUCT_DIR'], self.xcode_settings, map(self.GypPathToNinja, resources)): output = self.ExpandSpecial(output) if os.path.splitext(output)[-1] != '.xcassets': isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) self.ninja.build(output, 'mac_tool', res, variables=[('mactool_cmd', 'copy-bundle-resource'), \ ('binary', isBinary)]) bundle_depends.append(output) else: xcassets.append(res) return xcassets def WriteMacXCassets(self, xcassets, bundle_depends): """Writes ninja edges for 'mac_bundle_resources' .xcassets files. This add an invocation of 'actool' via the 'mac_tool.py' helper script. It assumes that the assets catalogs define at least one imageset and thus an Assets.car file will be generated in the application resources directory. If this is not the case, then the build will probably be done at each invocation of ninja.""" if not xcassets: return extra_arguments = {} settings_to_arg = { 'XCASSETS_APP_ICON': 'app-icon', 'XCASSETS_LAUNCH_IMAGE': 'launch-image', } settings = self.xcode_settings.xcode_settings[self.config_name] for settings_key, arg_name in settings_to_arg.iteritems(): value = settings.get(settings_key) if value: extra_arguments[arg_name] = value partial_info_plist = None if extra_arguments: partial_info_plist = self.GypPathToUniqueOutput( 'assetcatalog_generated_info.plist') extra_arguments['output-partial-info-plist'] = partial_info_plist outputs = [] outputs.append( os.path.join( self.xcode_settings.GetBundleResourceFolder(), 'Assets.car')) if partial_info_plist: outputs.append(partial_info_plist) keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor) extra_env = self.xcode_settings.GetPerTargetSettings() env = self.GetSortedXcodeEnv(additional_settings=extra_env) env = self.ComputeExportEnvString(env) bundle_depends.extend(self.ninja.build( outputs, 'compile_xcassets', xcassets, variables=[('env', env), ('keys', keys)])) return partial_info_plist def WriteMacInfoPlist(self, partial_info_plist, bundle_depends): """Write build rules for bundle Info.plist files.""" info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( generator_default_variables['PRODUCT_DIR'], self.xcode_settings, self.GypPathToNinja) if not info_plist: return out = self.ExpandSpecial(out) if defines: # Create an intermediate file to store preprocessed results. intermediate_plist = self.GypPathToUniqueOutput( os.path.basename(info_plist)) defines = ' '.join([Define(d, self.flavor) for d in defines]) info_plist = self.ninja.build( intermediate_plist, 'preprocess_infoplist', info_plist, variables=[('defines',defines)]) env = self.GetSortedXcodeEnv(additional_settings=extra_env) env = self.ComputeExportEnvString(env) if partial_info_plist: intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist') info_plist = self.ninja.build( intermediate_plist, 'merge_infoplist', [partial_info_plist, info_plist]) keys = self.xcode_settings.GetExtraPlistItems(self.config_name) keys = QuoteShellArgument(json.dumps(keys), self.flavor) isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) self.ninja.build(out, 'copy_infoplist', info_plist, variables=[('env', env), ('keys', keys), ('binary', isBinary)]) bundle_depends.append(out) def WriteSources(self, ninja_file, config_name, config, sources, predepends, precompiled_header, spec): """Write build rules to compile all of |sources|.""" if self.toolset == 'host': self.ninja.variable('ar', '$ar_host') self.ninja.variable('cc', '$cc_host') self.ninja.variable('cxx', '$cxx_host') self.ninja.variable('ld', '$ld_host') self.ninja.variable('ldxx', '$ldxx_host') self.ninja.variable('nm', '$nm_host') self.ninja.variable('readelf', '$readelf_host') if self.flavor != 'mac' or len(self.archs) == 1: return self.WriteSourcesForArch( self.ninja, config_name, config, sources, predepends, precompiled_header, spec) else: return dict((arch, self.WriteSourcesForArch( self.arch_subninjas[arch], config_name, config, sources, predepends, precompiled_header, spec, arch=arch)) for arch in self.archs) def WriteSourcesForArch(self, ninja_file, config_name, config, sources, predepends, precompiled_header, spec, arch=None): """Write build rules to compile all of |sources|.""" extra_defines = [] if self.flavor == 'mac': cflags = self.xcode_settings.GetCflags(config_name, arch=arch) cflags_c = self.xcode_settings.GetCflagsC(config_name) cflags_cc = self.xcode_settings.GetCflagsCC(config_name) cflags_objc = ['$cflags_c'] + \ self.xcode_settings.GetCflagsObjC(config_name) cflags_objcc = ['$cflags_cc'] + \ self.xcode_settings.GetCflagsObjCC(config_name) elif self.flavor == 'win': asmflags = self.msvs_settings.GetAsmflags(config_name) cflags = self.msvs_settings.GetCflags(config_name) cflags_c = self.msvs_settings.GetCflagsC(config_name) cflags_cc = self.msvs_settings.GetCflagsCC(config_name) extra_defines = self.msvs_settings.GetComputedDefines(config_name) # See comment at cc_command for why there's two .pdb files. pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName( config_name, self.ExpandSpecial) if not pdbpath_c: obj = 'obj' if self.toolset != 'target': obj += '.' + self.toolset pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name)) pdbpath_c = pdbpath + '.c.pdb' pdbpath_cc = pdbpath + '.cc.pdb' self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c]) self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc]) self.WriteVariableList(ninja_file, 'pchprefix', [self.name]) else: cflags = config.get('cflags', []) cflags_c = config.get('cflags_c', []) cflags_cc = config.get('cflags_cc', []) # Respect environment variables related to build, but target-specific # flags can still override them. if self.toolset == 'target': cflags_c = (os.environ.get('CPPFLAGS', '').split() + os.environ.get('CFLAGS', '').split() + cflags_c) cflags_cc = (os.environ.get('CPPFLAGS', '').split() + os.environ.get('CXXFLAGS', '').split() + cflags_cc) elif self.toolset == 'host': cflags_c = (os.environ.get('CPPFLAGS_host', '').split() + os.environ.get('CFLAGS_host', '').split() + cflags_c) cflags_cc = (os.environ.get('CPPFLAGS_host', '').split() + os.environ.get('CXXFLAGS_host', '').split() + cflags_cc) defines = config.get('defines', []) + extra_defines self.WriteVariableList(ninja_file, 'defines', [Define(d, self.flavor) for d in defines]) if self.flavor == 'win': self.WriteVariableList(ninja_file, 'asmflags', map(self.ExpandSpecial, asmflags)) self.WriteVariableList(ninja_file, 'rcflags', [QuoteShellArgument(self.ExpandSpecial(f), self.flavor) for f in self.msvs_settings.GetRcflags(config_name, self.GypPathToNinja)]) include_dirs = config.get('include_dirs', []) env = self.GetToolchainEnv() if self.flavor == 'win': include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs, config_name) self.WriteVariableList(ninja_file, 'includes', [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor) for i in include_dirs]) if self.flavor == 'win': midl_include_dirs = config.get('midl_include_dirs', []) midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs( midl_include_dirs, config_name) self.WriteVariableList(ninja_file, 'midl_includes', [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor) for i in midl_include_dirs]) pch_commands = precompiled_header.GetPchBuildCommands(arch) if self.flavor == 'mac': # Most targets use no precompiled headers, so only write these if needed. for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'), ('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]: include = precompiled_header.GetInclude(ext, arch) if include: ninja_file.variable(var, include) arflags = config.get('arflags', []) self.WriteVariableList(ninja_file, 'cflags', map(self.ExpandSpecial, cflags)) self.WriteVariableList(ninja_file, 'cflags_c', map(self.ExpandSpecial, cflags_c)) self.WriteVariableList(ninja_file, 'cflags_cc', map(self.ExpandSpecial, cflags_cc)) if self.flavor == 'mac': self.WriteVariableList(ninja_file, 'cflags_objc', map(self.ExpandSpecial, cflags_objc)) self.WriteVariableList(ninja_file, 'cflags_objcc', map(self.ExpandSpecial, cflags_objcc)) self.WriteVariableList(ninja_file, 'arflags', map(self.ExpandSpecial, arflags)) ninja_file.newline() outputs = [] has_rc_source = False for source in sources: filename, ext = os.path.splitext(source) ext = ext[1:] obj_ext = self.obj_ext if ext in ('cc', 'cpp', 'cxx'): command = 'cxx' self.uses_cpp = True elif ext == 'c' or (ext == 'S' and self.flavor != 'win'): command = 'cc' elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files. command = 'cc_s' elif (self.flavor == 'win' and ext == 'asm' and not self.msvs_settings.HasExplicitAsmRules(spec)): command = 'asm' # Add the _asm suffix as msvs is capable of handling .cc and # .asm files of the same name without collision. obj_ext = '_asm.obj' elif self.flavor == 'mac' and ext == 'm': command = 'objc' elif self.flavor == 'mac' and ext == 'mm': command = 'objcxx' self.uses_cpp = True elif self.flavor == 'win' and ext == 'rc': command = 'rc' obj_ext = '.res' has_rc_source = True else: # Ignore unhandled extensions. continue input = self.GypPathToNinja(source) output = self.GypPathToUniqueOutput(filename + obj_ext) if arch is not None: output = AddArch(output, arch) implicit = precompiled_header.GetObjDependencies([input], [output], arch) variables = [] if self.flavor == 'win': variables, output, implicit = precompiled_header.GetFlagsModifications( input, output, implicit, command, cflags_c, cflags_cc, self.ExpandSpecial) ninja_file.build(output, command, input, implicit=[gch for _, _, gch in implicit], order_only=predepends, variables=variables) outputs.append(output) if has_rc_source: resource_include_dirs = config.get('resource_include_dirs', include_dirs) self.WriteVariableList(ninja_file, 'resource_includes', [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor) for i in resource_include_dirs]) self.WritePchTargets(ninja_file, pch_commands) ninja_file.newline() return outputs def WritePchTargets(self, ninja_file, pch_commands): """Writes ninja rules to compile prefix headers.""" if not pch_commands: return for gch, lang_flag, lang, input in pch_commands: var_name = { 'c': 'cflags_pch_c', 'cc': 'cflags_pch_cc', 'm': 'cflags_pch_objc', 'mm': 'cflags_pch_objcc', }[lang] map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', } cmd = map.get(lang) ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)]) def WriteLink(self, spec, config_name, config, link_deps): """Write out a link step. Fills out target.binary. """ if self.flavor != 'mac' or len(self.archs) == 1: return self.WriteLinkForArch( self.ninja, spec, config_name, config, link_deps) else: output = self.ComputeOutput(spec) inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec, config_name, config, link_deps[arch], arch=arch) for arch in self.archs] extra_bindings = [] build_output = output if not self.is_mac_bundle: self.AppendPostbuildVariable(extra_bindings, spec, output, output) # TODO(yyanagisawa): more work needed to fix: # https://code.google.com/p/gyp/issues/detail?id=411 if (spec['type'] in ('shared_library', 'loadable_module') and not self.is_mac_bundle): extra_bindings.append(('lib', output)) self.ninja.build([output, output + '.TOC'], 'solipo', inputs, variables=extra_bindings) else: self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings) return output def WriteLinkForArch(self, ninja_file, spec, config_name, config, link_deps, arch=None): """Write out a link step. Fills out target.binary. """ command = { 'executable': 'link', 'loadable_module': 'solink_module', 'shared_library': 'solink', }[spec['type']] command_suffix = '' implicit_deps = set() solibs = set() order_deps = set() if 'dependencies' in spec: # Two kinds of dependencies: # - Linkable dependencies (like a .a or a .so): add them to the link line. # - Non-linkable dependencies (like a rule that generates a file # and writes a stamp file): add them to implicit_deps extra_link_deps = set() for dep in spec['dependencies']: target = self.target_outputs.get(dep) if not target: continue linkable = target.Linkable() if linkable: new_deps = [] if (self.flavor == 'win' and target.component_objs and self.msvs_settings.IsUseLibraryDependencyInputs(config_name)): new_deps = target.component_objs if target.compile_deps: order_deps.add(target.compile_deps) elif self.flavor == 'win' and target.import_lib: new_deps = [target.import_lib] elif target.UsesToc(self.flavor): solibs.add(target.binary) implicit_deps.add(target.binary + '.TOC') else: new_deps = [target.binary] for new_dep in new_deps: if new_dep not in extra_link_deps: extra_link_deps.add(new_dep) link_deps.append(new_dep) final_output = target.FinalOutput() if not linkable or final_output != target.binary: implicit_deps.add(final_output) extra_bindings = [] if self.uses_cpp and self.flavor != 'win': extra_bindings.append(('ld', '$ldxx')) output = self.ComputeOutput(spec, arch) if arch is None and not self.is_mac_bundle: self.AppendPostbuildVariable(extra_bindings, spec, output, output) is_executable = spec['type'] == 'executable' # The ldflags config key is not used on mac or win. On those platforms # linker flags are set via xcode_settings and msvs_settings, respectively. env_ldflags = os.environ.get('LDFLAGS', '').split() if self.flavor == 'mac': ldflags = self.xcode_settings.GetLdflags(config_name, self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']), self.GypPathToNinja, arch) ldflags = env_ldflags + ldflags elif self.flavor == 'win': manifest_base_name = self.GypPathToUniqueOutput( self.ComputeOutputFileName(spec)) ldflags, intermediate_manifest, manifest_files = \ self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja, self.ExpandSpecial, manifest_base_name, output, is_executable, self.toplevel_build) ldflags = env_ldflags + ldflags self.WriteVariableList(ninja_file, 'manifests', manifest_files) implicit_deps = implicit_deps.union(manifest_files) if intermediate_manifest: self.WriteVariableList( ninja_file, 'intermediatemanifest', [intermediate_manifest]) command_suffix = _GetWinLinkRuleNameSuffix( self.msvs_settings.IsEmbedManifest(config_name)) def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja) if def_file: implicit_deps.add(def_file) else: # Respect environment variables related to build, but target-specific # flags can still override them. ldflags = env_ldflags + config.get('ldflags', []) if is_executable and len(solibs): rpath = 'lib/' if self.toolset != 'target': rpath += self.toolset ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath) ldflags.append('-Wl,-rpath-link=%s' % rpath) self.WriteVariableList(ninja_file, 'ldflags', map(self.ExpandSpecial, ldflags)) library_dirs = config.get('library_dirs', []) if self.flavor == 'win': library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name) for l in library_dirs] library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l), self.flavor) for l in library_dirs] else: library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l), self.flavor) for l in library_dirs] libraries = gyp.common.uniquer(map(self.ExpandSpecial, spec.get('libraries', []))) if self.flavor == 'mac': libraries = self.xcode_settings.AdjustLibraries(libraries, config_name) elif self.flavor == 'win': libraries = self.msvs_settings.AdjustLibraries(libraries) self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries) linked_binary = output if command in ('solink', 'solink_module'): extra_bindings.append(('soname', os.path.split(output)[1])) extra_bindings.append(('lib', gyp.common.EncodePOSIXShellArgument(output))) if self.flavor != 'win': link_file_list = output if self.is_mac_bundle: # 'Dependency Framework.framework/Versions/A/Dependency Framework' -> # 'Dependency Framework.framework.rsp' link_file_list = self.xcode_settings.GetWrapperName() if arch: link_file_list += '.' + arch link_file_list += '.rsp' # If an rspfile contains spaces, ninja surrounds the filename with # quotes around it and then passes it to open(), creating a file with # quotes in its name (and when looking for the rsp file, the name # makes it through bash which strips the quotes) :-/ link_file_list = link_file_list.replace(' ', '_') extra_bindings.append( ('link_file_list', gyp.common.EncodePOSIXShellArgument(link_file_list))) if self.flavor == 'win': extra_bindings.append(('binary', output)) if ('/NOENTRY' not in ldflags and not self.msvs_settings.GetNoImportLibrary(config_name)): self.target.import_lib = output + '.lib' extra_bindings.append(('implibflag', '/IMPLIB:%s' % self.target.import_lib)) pdbname = self.msvs_settings.GetPDBName( config_name, self.ExpandSpecial, output + '.pdb') output = [output, self.target.import_lib] if pdbname: output.append(pdbname) elif not self.is_mac_bundle: output = [output, output + '.TOC'] else: command = command + '_notoc' elif self.flavor == 'win': extra_bindings.append(('binary', output)) pdbname = self.msvs_settings.GetPDBName( config_name, self.ExpandSpecial, output + '.pdb') if pdbname: output = [output, pdbname] if len(solibs): extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs))) ninja_file.build(output, command + command_suffix, link_deps, implicit=list(implicit_deps), order_only=list(order_deps), variables=extra_bindings) return linked_binary def WriteTarget(self, spec, config_name, config, link_deps, compile_deps): extra_link_deps = any(self.target_outputs.get(dep).Linkable() for dep in spec.get('dependencies', []) if dep in self.target_outputs) if spec['type'] == 'none' or (not link_deps and not extra_link_deps): # TODO(evan): don't call this function for 'none' target types, as # it doesn't do anything, and we fake out a 'binary' with a stamp file. self.target.binary = compile_deps self.target.type = 'none' elif spec['type'] == 'static_library': self.target.binary = self.ComputeOutput(spec) if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not self.is_standalone_static_library): self.ninja.build(self.target.binary, 'alink_thin', link_deps, order_only=compile_deps) else: variables = [] if self.xcode_settings: libtool_flags = self.xcode_settings.GetLibtoolflags(config_name) if libtool_flags: variables.append(('libtool_flags', libtool_flags)) if self.msvs_settings: libflags = self.msvs_settings.GetLibFlags(config_name, self.GypPathToNinja) variables.append(('libflags', libflags)) if self.flavor != 'mac' or len(self.archs) == 1: self.AppendPostbuildVariable(variables, spec, self.target.binary, self.target.binary) self.ninja.build(self.target.binary, 'alink', link_deps, order_only=compile_deps, variables=variables) else: inputs = [] for arch in self.archs: output = self.ComputeOutput(spec, arch) self.arch_subninjas[arch].build(output, 'alink', link_deps[arch], order_only=compile_deps, variables=variables) inputs.append(output) # TODO: It's not clear if libtool_flags should be passed to the alink # call that combines single-arch .a files into a fat .a file. self.AppendPostbuildVariable(variables, spec, self.target.binary, self.target.binary) self.ninja.build(self.target.binary, 'alink', inputs, # FIXME: test proving order_only=compile_deps isn't # needed. variables=variables) else: self.target.binary = self.WriteLink(spec, config_name, config, link_deps) return self.target.binary def WriteMacBundle(self, spec, mac_bundle_depends, is_empty): assert self.is_mac_bundle package_framework = spec['type'] in ('shared_library', 'loadable_module') output = self.ComputeMacBundleOutput() if is_empty: output += '.stamp' variables = [] self.AppendPostbuildVariable(variables, spec, output, self.target.binary, is_command_start=not package_framework) if package_framework and not is_empty: variables.append(('version', self.xcode_settings.GetFrameworkVersion())) self.ninja.build(output, 'package_framework', mac_bundle_depends, variables=variables) else: self.ninja.build(output, 'stamp', mac_bundle_depends, variables=variables) self.target.bundle = output return output def GetToolchainEnv(self, additional_settings=None): """Returns the variables toolchain would set for build steps.""" env = self.GetSortedXcodeEnv(additional_settings=additional_settings) if self.flavor == 'win': env = self.GetMsvsToolchainEnv( additional_settings=additional_settings) return env def GetMsvsToolchainEnv(self, additional_settings=None): """Returns the variables Visual Studio would set for build steps.""" return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR', config=self.config_name) def GetSortedXcodeEnv(self, additional_settings=None): """Returns the variables Xcode would set for build steps.""" assert self.abs_build_dir abs_build_dir = self.abs_build_dir return gyp.xcode_emulation.GetSortedXcodeEnv( self.xcode_settings, abs_build_dir, os.path.join(abs_build_dir, self.build_to_base), self.config_name, additional_settings) def GetSortedXcodePostbuildEnv(self): """Returns the variables Xcode would set for postbuild steps.""" postbuild_settings = {} # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. # TODO(thakis): It would be nice to have some general mechanism instead. strip_save_file = self.xcode_settings.GetPerTargetSetting( 'CHROMIUM_STRIP_SAVE_FILE') if strip_save_file: postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file return self.GetSortedXcodeEnv(additional_settings=postbuild_settings) def AppendPostbuildVariable(self, variables, spec, output, binary, is_command_start=False): """Adds a 'postbuild' variable if there is a postbuild for |output|.""" postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start) if postbuild: variables.append(('postbuilds', postbuild)) def GetPostbuildCommand(self, spec, output, output_binary, is_command_start): """Returns a shell command that runs all the postbuilds, and removes |output| if any of them fails. If |is_command_start| is False, then the returned string will start with ' && '.""" if not self.xcode_settings or spec['type'] == 'none' or not output: return '' output = QuoteShellArgument(output, self.flavor) postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True) if output_binary is not None: postbuilds = self.xcode_settings.AddImplicitPostbuilds( self.config_name, os.path.normpath(os.path.join(self.base_to_build, output)), QuoteShellArgument( os.path.normpath(os.path.join(self.base_to_build, output_binary)), self.flavor), postbuilds, quiet=True) if not postbuilds: return '' # Postbuilds expect to be run in the gyp file's directory, so insert an # implicit postbuild to cd to there. postbuilds.insert(0, gyp.common.EncodePOSIXShellList( ['cd', self.build_to_base])) env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv()) # G will be non-null if any postbuild fails. Run all postbuilds in a # subshell. commands = env + ' (' + \ ' && '.join([ninja_syntax.escape(command) for command in postbuilds]) command_string = (commands + '); G=$$?; ' # Remove the final output if any postbuild failed. '((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)') if is_command_start: return '(' + command_string + ' && ' else: return '$ && (' + command_string def ComputeExportEnvString(self, env): """Given an environment, returns a string looking like 'export FOO=foo; export BAR="${FOO} bar;' that exports |env| to the shell.""" export_str = [] for k, v in env: export_str.append('export %s=%s;' % (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v)))) return ' '.join(export_str) def ComputeMacBundleOutput(self): """Return the 'output' (full output path) to a bundle output directory.""" assert self.is_mac_bundle path = generator_default_variables['PRODUCT_DIR'] return self.ExpandSpecial( os.path.join(path, self.xcode_settings.GetWrapperName())) def ComputeOutputFileName(self, spec, type=None): """Compute the filename of the final output for the current target.""" if not type: type = spec['type'] default_variables = copy.copy(generator_default_variables) CalculateVariables(default_variables, {'flavor': self.flavor}) # Compute filename prefix: the product prefix, or a default for # the product type. DEFAULT_PREFIX = { 'loadable_module': default_variables['SHARED_LIB_PREFIX'], 'shared_library': default_variables['SHARED_LIB_PREFIX'], 'static_library': default_variables['STATIC_LIB_PREFIX'], 'executable': default_variables['EXECUTABLE_PREFIX'], } prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, '')) # Compute filename extension: the product extension, or a default # for the product type. DEFAULT_EXTENSION = { 'loadable_module': default_variables['SHARED_LIB_SUFFIX'], 'shared_library': default_variables['SHARED_LIB_SUFFIX'], 'static_library': default_variables['STATIC_LIB_SUFFIX'], 'executable': default_variables['EXECUTABLE_SUFFIX'], } extension = spec.get('product_extension') if extension: extension = '.' + extension else: extension = DEFAULT_EXTENSION.get(type, '') if 'product_name' in spec: # If we were given an explicit name, use that. target = spec['product_name'] else: # Otherwise, derive a name from the target name. target = spec['target_name'] if prefix == 'lib': # Snip out an extra 'lib' from libs if appropriate. target = StripPrefix(target, 'lib') if type in ('static_library', 'loadable_module', 'shared_library', 'executable'): return '%s%s%s' % (prefix, target, extension) elif type == 'none': return '%s.stamp' % target else: raise Exception('Unhandled output type %s' % type) def ComputeOutput(self, spec, arch=None): """Compute the path for the final output of the spec.""" type = spec['type'] if self.flavor == 'win': override = self.msvs_settings.GetOutputName(self.config_name, self.ExpandSpecial) if override: return override if arch is None and self.flavor == 'mac' and type in ( 'static_library', 'executable', 'shared_library', 'loadable_module'): filename = self.xcode_settings.GetExecutablePath() else: filename = self.ComputeOutputFileName(spec, type) if arch is None and 'product_dir' in spec: path = os.path.join(spec['product_dir'], filename) return self.ExpandSpecial(path) # Some products go into the output root, libraries go into shared library # dir, and everything else goes into the normal place. type_in_output_root = ['executable', 'loadable_module'] if self.flavor == 'mac' and self.toolset == 'target': type_in_output_root += ['shared_library', 'static_library'] elif self.flavor == 'win' and self.toolset == 'target': type_in_output_root += ['shared_library'] if arch is not None: # Make sure partial executables don't end up in a bundle or the regular # output directory. archdir = 'arch' if self.toolset != 'target': archdir = os.path.join('arch', '%s' % self.toolset) return os.path.join(archdir, AddArch(filename, arch)) elif type in type_in_output_root or self.is_standalone_static_library: return filename elif type == 'shared_library': libdir = 'lib' if self.toolset != 'target': libdir = os.path.join('lib', '%s' % self.toolset) return os.path.join(libdir, filename) else: return self.GypPathToUniqueOutput(filename, qualified=False) def WriteVariableList(self, ninja_file, var, values): assert not isinstance(values, str) if values is None: values = [] ninja_file.variable(var, ' '.join(values)) def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool, depfile=None): """Write out a new ninja "rule" statement for a given command. Returns the name of the new rule, and a copy of |args| with variables expanded.""" if self.flavor == 'win': args = [self.msvs_settings.ConvertVSMacros( arg, self.base_to_build, config=self.config_name) for arg in args] description = self.msvs_settings.ConvertVSMacros( description, config=self.config_name) elif self.flavor == 'mac': # |env| is an empty list on non-mac. args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args] description = gyp.xcode_emulation.ExpandEnvVars(description, env) # TODO: we shouldn't need to qualify names; we do it because # currently the ninja rule namespace is global, but it really # should be scoped to the subninja. rule_name = self.name if self.toolset == 'target': rule_name += '.' + self.toolset rule_name += '.' + name rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name) # Remove variable references, but not if they refer to the magic rule # variables. This is not quite right, as it also protects these for # actions, not just for rules where they are valid. Good enough. protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ] protect = '(?!' + '|'.join(map(re.escape, protect)) + ')' description = re.sub(protect + r'\$', '_', description) # gyp dictates that commands are run from the base directory. # cd into the directory before running, and adjust paths in # the arguments to point to the proper locations. rspfile = None rspfile_content = None args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args] if self.flavor == 'win': rspfile = rule_name + '.$unique_name.rsp' # The cygwin case handles this inside the bash sub-shell. run_in = '' if is_cygwin else ' ' + self.build_to_base if is_cygwin: rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine( args, self.build_to_base) else: rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args) command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable + rspfile + run_in) else: env = self.ComputeExportEnvString(env) command = gyp.common.EncodePOSIXShellList(args) command = 'cd %s; ' % self.build_to_base + env + command # GYP rules/actions express being no-ops by not touching their outputs. # Avoid executing downstream dependencies in this case by specifying # restat=1 to ninja. self.ninja.rule(rule_name, command, description, depfile=depfile, restat=True, pool=pool, rspfile=rspfile, rspfile_content=rspfile_content) self.ninja.newline() return rule_name, args def CalculateVariables(default_variables, params): """Calculate additional variables for use in the build (called by gyp).""" global generator_additional_non_configuration_keys global generator_additional_path_sections flavor = gyp.common.GetFlavor(params) if flavor == 'mac': default_variables.setdefault('OS', 'mac') default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib') default_variables.setdefault('SHARED_LIB_DIR', generator_default_variables['PRODUCT_DIR']) default_variables.setdefault('LIB_DIR', generator_default_variables['PRODUCT_DIR']) # Copy additional generator configuration data from Xcode, which is shared # by the Mac Ninja generator. import gyp.generator.xcode as xcode_generator generator_additional_non_configuration_keys = getattr(xcode_generator, 'generator_additional_non_configuration_keys', []) generator_additional_path_sections = getattr(xcode_generator, 'generator_additional_path_sections', []) global generator_extra_sources_for_rules generator_extra_sources_for_rules = getattr(xcode_generator, 'generator_extra_sources_for_rules', []) elif flavor == 'win': exts = gyp.MSVSUtil.TARGET_TYPE_EXT default_variables.setdefault('OS', 'win') default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable'] default_variables['STATIC_LIB_PREFIX'] = '' default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library'] default_variables['SHARED_LIB_PREFIX'] = '' default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library'] # Copy additional generator configuration data from VS, which is shared # by the Windows Ninja generator. import gyp.generator.msvs as msvs_generator generator_additional_non_configuration_keys = getattr(msvs_generator, 'generator_additional_non_configuration_keys', []) generator_additional_path_sections = getattr(msvs_generator, 'generator_additional_path_sections', []) gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) else: operating_system = flavor if flavor == 'android': operating_system = 'linux' # Keep this legacy behavior for now. default_variables.setdefault('OS', operating_system) default_variables.setdefault('SHARED_LIB_SUFFIX', '.so') default_variables.setdefault('SHARED_LIB_DIR', os.path.join('$!PRODUCT_DIR', 'lib')) default_variables.setdefault('LIB_DIR', os.path.join('$!PRODUCT_DIR', 'obj')) def ComputeOutputDir(params): """Returns the path from the toplevel_dir to the build output directory.""" # generator_dir: relative path from pwd to where make puts build files. # Makes migrating from make to ninja easier, ninja doesn't put anything here. generator_dir = os.path.relpath(params['options'].generator_output or '.') # output_dir: relative path from generator_dir to the build directory. output_dir = params.get('generator_flags', {}).get('output_dir', 'out') # Relative path from source root to our output files. e.g. "out" return os.path.normpath(os.path.join(generator_dir, output_dir)) def CalculateGeneratorInputInfo(params): """Called by __init__ to initialize generator values based on params.""" # E.g. "out/gypfiles" toplevel = params['options'].toplevel_dir qualified_out_dir = os.path.normpath(os.path.join( toplevel, ComputeOutputDir(params), 'gypfiles')) global generator_filelist_paths generator_filelist_paths = { 'toplevel': toplevel, 'qualified_out_dir': qualified_out_dir, } def OpenOutput(path, mode='w'): """Open |path| for writing, creating directories if necessary.""" gyp.common.EnsureDirExists(path) return open(path, mode) def CommandWithWrapper(cmd, wrappers, prog): wrapper = wrappers.get(cmd, '') if wrapper: return wrapper + ' ' + prog return prog def GetDefaultConcurrentLinks(): """Returns a best-guess for a number of concurrent links.""" pool_size = int(os.environ.get('GYP_LINK_CONCURRENCY', 0)) if pool_size: return pool_size if sys.platform in ('win32', 'cygwin'): import ctypes class MEMORYSTATUSEX(ctypes.Structure): _fields_ = [ ("dwLength", ctypes.c_ulong), ("dwMemoryLoad", ctypes.c_ulong), ("ullTotalPhys", ctypes.c_ulonglong), ("ullAvailPhys", ctypes.c_ulonglong), ("ullTotalPageFile", ctypes.c_ulonglong), ("ullAvailPageFile", ctypes.c_ulonglong), ("ullTotalVirtual", ctypes.c_ulonglong), ("ullAvailVirtual", ctypes.c_ulonglong), ("sullAvailExtendedVirtual", ctypes.c_ulonglong), ] stat = MEMORYSTATUSEX() stat.dwLength = ctypes.sizeof(stat) ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM # on a 64 GB machine. mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30))) # total / 5GB hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32))) return min(mem_limit, hard_cap) elif sys.platform.startswith('linux'): if os.path.exists("/proc/meminfo"): with open("/proc/meminfo") as meminfo: memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB') for line in meminfo: match = memtotal_re.match(line) if not match: continue # Allow 8Gb per link on Linux because Gold is quite memory hungry return max(1, int(match.group(1)) / (8 * (2 ** 20))) return 1 elif sys.platform == 'darwin': try: avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize'])) # A static library debug build of Chromium's unit_tests takes ~2.7GB, so # 4GB per ld process allows for some more bloat. return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB except: return 1 else: # TODO(scottmg): Implement this for other platforms. return 1 def _GetWinLinkRuleNameSuffix(embed_manifest): """Returns the suffix used to select an appropriate linking rule depending on whether the manifest embedding is enabled.""" return '_embed' if embed_manifest else '' def _AddWinLinkRules(master_ninja, embed_manifest): """Adds link rules for Windows platform to |master_ninja|.""" def FullLinkCommand(ldcmd, out, binary_type): resource_name = { 'exe': '1', 'dll': '2', }[binary_type] return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \ '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \ '$manifests' % { 'python': sys.executable, 'out': out, 'ldcmd': ldcmd, 'resname': resource_name, 'embed': embed_manifest } rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest) use_separate_mspdbsrv = ( int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0) dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper() dllcmd = ('%s gyp-win-tool link-wrapper $arch %s ' '$ld /nologo $implibflag /DLL /OUT:$binary ' '@$binary.rsp' % (sys.executable, use_separate_mspdbsrv)) dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll') master_ninja.rule('solink' + rule_name_suffix, description=dlldesc, command=dllcmd, rspfile='$binary.rsp', rspfile_content='$libs $in_newline $ldflags', restat=True, pool='link_pool') master_ninja.rule('solink_module' + rule_name_suffix, description=dlldesc, command=dllcmd, rspfile='$binary.rsp', rspfile_content='$libs $in_newline $ldflags', restat=True, pool='link_pool') # Note that ldflags goes at the end so that it has the option of # overriding default settings earlier in the command line. exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s ' '$ld /nologo /OUT:$binary @$binary.rsp' % (sys.executable, use_separate_mspdbsrv)) exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe') master_ninja.rule('link' + rule_name_suffix, description='LINK%s $binary' % rule_name_suffix.upper(), command=exe_cmd, rspfile='$binary.rsp', rspfile_content='$in_newline $libs $ldflags', pool='link_pool') def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): options = params['options'] flavor = gyp.common.GetFlavor(params) generator_flags = params.get('generator_flags', {}) # build_dir: relative path from source root to our output files. # e.g. "out/Debug" build_dir = os.path.normpath( os.path.join(ComputeOutputDir(params), config_name)) toplevel_build = os.path.join(options.toplevel_dir, build_dir) master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja')) master_ninja = ninja_syntax.Writer(master_ninja_file, width=120) # Put build-time support tools in out/{config_name}. gyp.common.CopyTool(flavor, toplevel_build) # Grab make settings for CC/CXX. # The rules are # - The priority from low to high is gcc/g++, the 'make_global_settings' in # gyp, the environment variable. # - If there is no 'make_global_settings' for CC.host/CXX.host or # 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set # to cc/cxx. if flavor == 'win': ar = 'lib.exe' # cc and cxx must be set to the correct architecture by overriding with one # of cl_x86 or cl_x64 below. cc = 'UNSET' cxx = 'UNSET' ld = 'link.exe' ld_host = '$ld' else: ar = 'ar' cc = 'cc' cxx = 'c++' ld = '$cc' ldxx = '$cxx' ld_host = '$cc_host' ldxx_host = '$cxx_host' ar_host = 'ar' cc_host = None cxx_host = None cc_host_global_setting = None cxx_host_global_setting = None clang_cl = None nm = 'nm' nm_host = 'nm' readelf = 'readelf' readelf_host = 'readelf' build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) make_global_settings = data[build_file].get('make_global_settings', []) build_to_root = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir) wrappers = {} for key, value in make_global_settings: if key == 'AR': ar = os.path.join(build_to_root, value) if key == 'AR.host': ar_host = os.path.join(build_to_root, value) if key == 'CC': cc = os.path.join(build_to_root, value) if cc.endswith('clang-cl'): clang_cl = cc if key == 'CXX': cxx = os.path.join(build_to_root, value) if key == 'CC.host': cc_host = os.path.join(build_to_root, value) cc_host_global_setting = value if key == 'CXX.host': cxx_host = os.path.join(build_to_root, value) cxx_host_global_setting = value if key == 'LD': ld = os.path.join(build_to_root, value) if key == 'LD.host': ld_host = os.path.join(build_to_root, value) if key == 'NM': nm = os.path.join(build_to_root, value) if key == 'NM.host': nm_host = os.path.join(build_to_root, value) if key == 'READELF': readelf = os.path.join(build_to_root, value) if key == 'READELF.host': readelf_host = os.path.join(build_to_root, value) if key.endswith('_wrapper'): wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value) # Support wrappers from environment variables too. for key, value in os.environ.iteritems(): if key.lower().endswith('_wrapper'): key_prefix = key[:-len('_wrapper')] key_prefix = re.sub(r'\.HOST$', '.host', key_prefix) wrappers[key_prefix] = os.path.join(build_to_root, value) if flavor == 'win': configs = [target_dicts[qualified_target]['configurations'][config_name] for qualified_target in target_list] shared_system_includes = None if not generator_flags.get('ninja_use_custom_environment_files', 0): shared_system_includes = \ gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes( configs, generator_flags) cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles( toplevel_build, generator_flags, shared_system_includes, OpenOutput) for arch, path in cl_paths.iteritems(): if clang_cl: # If we have selected clang-cl, use that instead. path = clang_cl command = CommandWithWrapper('CC', wrappers, QuoteShellArgument(path, 'win')) if clang_cl: # Use clang-cl to cross-compile for x86 or x86_64. command += (' -m32' if arch == 'x86' else ' -m64') master_ninja.variable('cl_' + arch, command) cc = GetEnvironFallback(['CC_target', 'CC'], cc) master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc)) cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx) master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx)) if flavor == 'win': master_ninja.variable('ld', ld) master_ninja.variable('idl', 'midl.exe') master_ninja.variable('ar', ar) master_ninja.variable('rc', 'rc.exe') master_ninja.variable('ml_x86', 'ml.exe') master_ninja.variable('ml_x64', 'ml64.exe') master_ninja.variable('mt', 'mt.exe') else: master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld)) master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx)) master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar)) if flavor != 'mac': # Mac does not use readelf/nm for .TOC generation, so avoiding polluting # the master ninja with extra unused variables. master_ninja.variable( 'nm', GetEnvironFallback(['NM_target', 'NM'], nm)) master_ninja.variable( 'readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf)) if generator_supports_multiple_toolsets: if not cc_host: cc_host = cc if not cxx_host: cxx_host = cxx master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host)) master_ninja.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host)) master_ninja.variable('readelf_host', GetEnvironFallback(['READELF_host'], readelf_host)) cc_host = GetEnvironFallback(['CC_host'], cc_host) cxx_host = GetEnvironFallback(['CXX_host'], cxx_host) # The environment variable could be used in 'make_global_settings', like # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here. if '$(CC)' in cc_host and cc_host_global_setting: cc_host = cc_host_global_setting.replace('$(CC)', cc) if '$(CXX)' in cxx_host and cxx_host_global_setting: cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx) master_ninja.variable('cc_host', CommandWithWrapper('CC.host', wrappers, cc_host)) master_ninja.variable('cxx_host', CommandWithWrapper('CXX.host', wrappers, cxx_host)) if flavor == 'win': master_ninja.variable('ld_host', ld_host) else: master_ninja.variable('ld_host', CommandWithWrapper( 'LINK', wrappers, ld_host)) master_ninja.variable('ldxx_host', CommandWithWrapper( 'LINK', wrappers, ldxx_host)) master_ninja.newline() master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks()) master_ninja.newline() deps = 'msvc' if flavor == 'win' else 'gcc' if flavor != 'win': master_ninja.rule( 'cc', description='CC $out', command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c ' '$cflags_pch_c -c $in -o $out'), depfile='$out.d', deps=deps) master_ninja.rule( 'cc_s', description='CC $out', command=('$cc $defines $includes $cflags $cflags_c ' '$cflags_pch_c -c $in -o $out')) master_ninja.rule( 'cxx', description='CXX $out', command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc ' '$cflags_pch_cc -c $in -o $out'), depfile='$out.d', deps=deps) else: # TODO(scottmg) Separate pdb names is a test to see if it works around # http://crbug.com/142362. It seems there's a race between the creation of # the .pdb by the precompiled header step for .cc and the compilation of # .c files. This should be handled by mspdbsrv, but rarely errors out with # c1xx : fatal error C1033: cannot open program database # By making the rules target separate pdb files this might be avoided. cc_command = ('ninja -t msvc -e $arch ' + '-- ' '$cc /nologo /showIncludes /FC ' '@$out.rsp /c $in /Fo$out /Fd$pdbname_c ') cxx_command = ('ninja -t msvc -e $arch ' + '-- ' '$cxx /nologo /showIncludes /FC ' '@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ') master_ninja.rule( 'cc', description='CC $out', command=cc_command, rspfile='$out.rsp', rspfile_content='$defines $includes $cflags $cflags_c', deps=deps) master_ninja.rule( 'cxx', description='CXX $out', command=cxx_command, rspfile='$out.rsp', rspfile_content='$defines $includes $cflags $cflags_cc', deps=deps) master_ninja.rule( 'idl', description='IDL $in', command=('%s gyp-win-tool midl-wrapper $arch $outdir ' '$tlb $h $dlldata $iid $proxy $in ' '$midl_includes $idlflags' % sys.executable)) master_ninja.rule( 'rc', description='RC $in', # Note: $in must be last otherwise rc.exe complains. command=('%s gyp-win-tool rc-wrapper ' '$arch $rc $defines $resource_includes $rcflags /fo$out $in' % sys.executable)) master_ninja.rule( 'asm', description='ASM $out', command=('%s gyp-win-tool asm-wrapper ' '$arch $asm $defines $includes $asmflags /c /Fo $out $in' % sys.executable)) if flavor != 'mac' and flavor != 'win': master_ninja.rule( 'alink', description='AR $out', command='rm -f $out && $ar rcs $arflags $out $in') master_ninja.rule( 'alink_thin', description='AR $out', command='rm -f $out && $ar rcsT $arflags $out $in') # This allows targets that only need to depend on $lib's API to declare an # order-only dependency on $lib.TOC and avoid relinking such downstream # dependencies when $lib changes only in non-public ways. # The resulting string leaves an uninterpolated %{suffix} which # is used in the final substitution below. mtime_preserving_solink_base = ( 'if [ ! -e $lib -o ! -e $lib.TOC ]; then ' '%(solink)s && %(extract_toc)s > $lib.TOC; else ' '%(solink)s && %(extract_toc)s > $lib.tmp && ' 'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; ' 'fi; fi' % { 'solink': '$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s', 'extract_toc': ('{ $readelf -d $lib | grep SONAME ; ' '$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')}) master_ninja.rule( 'solink', description='SOLINK $lib', restat=True, command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'}, rspfile='$link_file_list', rspfile_content= '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs', pool='link_pool') master_ninja.rule( 'solink_module', description='SOLINK(module) $lib', restat=True, command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'}, rspfile='$link_file_list', rspfile_content='-Wl,--start-group $in -Wl,--end-group $solibs $libs', pool='link_pool') master_ninja.rule( 'link', description='LINK $out', command=('$ld $ldflags -o $out ' '-Wl,--start-group $in -Wl,--end-group $solibs $libs'), pool='link_pool') elif flavor == 'win': master_ninja.rule( 'alink', description='LIB $out', command=('%s gyp-win-tool link-wrapper $arch False ' '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' % sys.executable), rspfile='$out.rsp', rspfile_content='$in_newline $libflags') _AddWinLinkRules(master_ninja, embed_manifest=True) _AddWinLinkRules(master_ninja, embed_manifest=False) else: master_ninja.rule( 'objc', description='OBJC $out', command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc ' '$cflags_pch_objc -c $in -o $out'), depfile='$out.d', deps=deps) master_ninja.rule( 'objcxx', description='OBJCXX $out', command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc ' '$cflags_pch_objcc -c $in -o $out'), depfile='$out.d', deps=deps) master_ninja.rule( 'alink', description='LIBTOOL-STATIC $out, POSTBUILDS', command='rm -f $out && ' './gyp-mac-tool filter-libtool libtool $libtool_flags ' '-static -o $out $in' '$postbuilds') master_ninja.rule( 'lipo', description='LIPO $out, POSTBUILDS', command='rm -f $out && lipo -create $in -output $out$postbuilds') master_ninja.rule( 'solipo', description='SOLIPO $out, POSTBUILDS', command=( 'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&' '%(extract_toc)s > $lib.TOC' % { 'extract_toc': '{ otool -l $lib | grep LC_ID_DYLIB -A 5; ' 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})) # Record the public interface of $lib in $lib.TOC. See the corresponding # comment in the posix section above for details. solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s' mtime_preserving_solink_base = ( 'if [ ! -e $lib -o ! -e $lib.TOC ] || ' # Always force dependent targets to relink if this library # reexports something. Handling this correctly would require # recursive TOC dumping but this is rare in practice, so punt. 'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then ' '%(solink)s && %(extract_toc)s > $lib.TOC; ' 'else ' '%(solink)s && %(extract_toc)s > $lib.tmp && ' 'if ! cmp -s $lib.tmp $lib.TOC; then ' 'mv $lib.tmp $lib.TOC ; ' 'fi; ' 'fi' % { 'solink': solink_base, 'extract_toc': '{ otool -l $lib | grep LC_ID_DYLIB -A 5; ' 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}) solink_suffix = '@$link_file_list$postbuilds' master_ninja.rule( 'solink', description='SOLINK $lib, POSTBUILDS', restat=True, command=mtime_preserving_solink_base % {'suffix': solink_suffix, 'type': '-shared'}, rspfile='$link_file_list', rspfile_content='$in $solibs $libs', pool='link_pool') master_ninja.rule( 'solink_notoc', description='SOLINK $lib, POSTBUILDS', restat=True, command=solink_base % {'suffix':solink_suffix, 'type': '-shared'}, rspfile='$link_file_list', rspfile_content='$in $solibs $libs', pool='link_pool') master_ninja.rule( 'solink_module', description='SOLINK(module) $lib, POSTBUILDS', restat=True, command=mtime_preserving_solink_base % {'suffix': solink_suffix, 'type': '-bundle'}, rspfile='$link_file_list', rspfile_content='$in $solibs $libs', pool='link_pool') master_ninja.rule( 'solink_module_notoc', description='SOLINK(module) $lib, POSTBUILDS', restat=True, command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'}, rspfile='$link_file_list', rspfile_content='$in $solibs $libs', pool='link_pool') master_ninja.rule( 'link', description='LINK $out, POSTBUILDS', command=('$ld $ldflags -o $out ' '$in $solibs $libs$postbuilds'), pool='link_pool') master_ninja.rule( 'preprocess_infoplist', description='PREPROCESS INFOPLIST $out', command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && ' 'plutil -convert xml1 $out $out')) master_ninja.rule( 'copy_infoplist', description='COPY INFOPLIST $in', command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys') master_ninja.rule( 'merge_infoplist', description='MERGE INFOPLISTS $in', command='$env ./gyp-mac-tool merge-info-plist $out $in') master_ninja.rule( 'compile_xcassets', description='COMPILE XCASSETS $in', command='$env ./gyp-mac-tool compile-xcassets $keys $in') master_ninja.rule( 'mac_tool', description='MACTOOL $mactool_cmd $in', command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary') master_ninja.rule( 'package_framework', description='PACKAGE FRAMEWORK $out, POSTBUILDS', command='./gyp-mac-tool package-framework $out $version$postbuilds ' '&& touch $out') if flavor == 'win': master_ninja.rule( 'stamp', description='STAMP $out', command='%s gyp-win-tool stamp $out' % sys.executable) master_ninja.rule( 'copy', description='COPY $in $out', command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable) else: master_ninja.rule( 'stamp', description='STAMP $out', command='${postbuilds}touch $out') master_ninja.rule( 'copy', description='COPY $in $out', command='rm -rf $out && cp -af $in $out') master_ninja.newline() all_targets = set() for build_file in params['build_files']: for target in gyp.common.AllTargets(target_list, target_dicts, os.path.normpath(build_file)): all_targets.add(target) all_outputs = set() # target_outputs is a map from qualified target name to a Target object. target_outputs = {} # target_short_names is a map from target short name to a list of Target # objects. target_short_names = {} # short name of targets that were skipped because they didn't contain anything # interesting. # NOTE: there may be overlap between this an non_empty_target_names. empty_target_names = set() # Set of non-empty short target names. # NOTE: there may be overlap between this an empty_target_names. non_empty_target_names = set() for qualified_target in target_list: # qualified_target is like: third_party/icu/icu.gyp:icui18n#target build_file, name, toolset = \ gyp.common.ParseQualifiedTarget(qualified_target) this_make_global_settings = data[build_file].get('make_global_settings', []) assert make_global_settings == this_make_global_settings, ( "make_global_settings needs to be the same for all targets. %s vs. %s" % (this_make_global_settings, make_global_settings)) spec = target_dicts[qualified_target] if flavor == 'mac': gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) # If build_file is a symlink, we must not follow it because there's a chance # it could point to a path above toplevel_dir, and we cannot correctly deal # with that case at the moment. build_file = gyp.common.RelativePath(build_file, options.toplevel_dir, False) qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name, toolset) hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest() base_path = os.path.dirname(build_file) obj = 'obj' if toolset != 'target': obj += '.' + toolset output_file = os.path.join(obj, base_path, name + '.ninja') ninja_output = StringIO() writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir, ninja_output, toplevel_build, output_file, flavor, toplevel_dir=options.toplevel_dir) target = writer.WriteSpec(spec, config_name, generator_flags) if ninja_output.tell() > 0: # Only create files for ninja files that actually have contents. with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file: ninja_file.write(ninja_output.getvalue()) ninja_output.close() master_ninja.subninja(output_file) if target: if name != target.FinalOutput() and spec['toolset'] == 'target': target_short_names.setdefault(name, []).append(target) target_outputs[qualified_target] = target if qualified_target in all_targets: all_outputs.add(target.FinalOutput()) non_empty_target_names.add(name) else: empty_target_names.add(name) if target_short_names: # Write a short name to build this target. This benefits both the # "build chrome" case as well as the gyp tests, which expect to be # able to run actions and build libraries by their short name. master_ninja.newline() master_ninja.comment('Short names for targets.') for short_name in target_short_names: master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in target_short_names[short_name]]) # Write phony targets for any empty targets that weren't written yet. As # short names are not necessarily unique only do this for short names that # haven't already been output for another target. empty_target_names = empty_target_names - non_empty_target_names if empty_target_names: master_ninja.newline() master_ninja.comment('Empty targets (output for completeness).') for name in sorted(empty_target_names): master_ninja.build(name, 'phony') if all_outputs: master_ninja.newline() master_ninja.build('all', 'phony', list(all_outputs)) master_ninja.default(generator_flags.get('default_target', 'all')) master_ninja_file.close() def PerformBuild(data, configurations, params): options = params['options'] for config in configurations: builddir = os.path.join(options.toplevel_dir, 'out', config) arguments = ['ninja', '-C', builddir] print 'Building [%s]: %s' % (config, arguments) subprocess.check_call(arguments) def CallGenerateOutputForConfig(arglist): # Ignore the interrupt signal so that the parent process catches it and # kills all multiprocessing children. signal.signal(signal.SIGINT, signal.SIG_IGN) (target_list, target_dicts, data, params, config_name) = arglist GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) def GenerateOutput(target_list, target_dicts, data, params): # Update target_dicts for iOS device builds. target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator( target_dicts) user_config = params.get('generator_flags', {}).get('config', None) if gyp.common.GetFlavor(params) == 'win': target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts) target_list, target_dicts = MSVSUtil.InsertLargePdbShims( target_list, target_dicts, generator_default_variables) if user_config: GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) else: config_names = target_dicts[target_list[0]]['configurations'].keys() if params['parallel']: try: pool = multiprocessing.Pool(len(config_names)) arglists = [] for config_name in config_names: arglists.append( (target_list, target_dicts, data, params, config_name)) pool.map(CallGenerateOutputForConfig, arglists) except KeyboardInterrupt, e: pool.terminate() raise e else: for config_name in config_names: GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
mit
CarlFK/dvsource-v4l2-other
dvsource-v4l2-other.py
1
12401
#! /usr/bin/python # # -*- coding: utf-8 -*- # vim: set ts=4 sw=4 et sts=4 ai: # """ dvsource-v4l2-other - Video4Linux2 source for DVswitch which supports any device. """ import argparse import atexit import os import tempfile import time import subprocess import shutil subprocess.DEVNULL = file(os.devnull, "rw+") ############################################################################### # Argument parsing ############################################################################### parser = argparse.ArgumentParser(add_help=False) parser.add_argument( "-d", "--device", default="/dev/video0", help="Video4Linux device to read the input from.") parser.add_argument( "-c", "--caps", default="", help="gstreamer caps to force v4l2src device to use.") parser.add_argument( "-s", "--system", default="pal", choices=["ntsc", "pal"], help="Specify the video system to use.This must match the system used by DVswitch.") parser.add_argument( "-a", "--aspect", default="4:3", choices=["4:3", "16:9"], help="Choose DV output aspect ratio.") parser.add_argument( "-t", "--timeout", type=int, default=10, help="How long to wait when terminating subprocess before killing.") parser.add_argument( "-r", "--rate", default="48000", choices=["48000", "44100", "32000"], help="Specify the sample rate, in Hz. **Only 48000kHz is supported.**") fake_types = { None: 0, "": 0, "smpte": 0, "snow": 1, "black": 2, "white": 3, "red": 4, "green": 5, "blue": 6, "checkers-1": 7, "checkers-2": 8, "checkers-4": 9, "checkers-8": 10, "circular": 11, "blink": 12, "smpte": 13, "zone-plate": 14, "gamut": 15, "chroma-zone-plate": 16, "solid-color": 17, "ball": 18, "smpte": 19, "bar": 20, } parser.add_argument( "-n", "--fake", choices=fake_types.keys(), help="Use a fake source rather then a real V4L2 device.") parser.add_argument( "-v", "--verbose", action="store_true", help="Increase output verbosity") parser.add_argument( "-x", "--display", action="store_true", help="Display the incoming video locally.") ############################################################################### # dvswitch arguments and .dvswitchrc parsing ############################################################################### def parse_dvswitchrc(configfile): r = {} for line in file(configfile, "r").readlines(): cmt = line.find('#') if cmt > 0: line = line[cmt:] line = line.strip() if not line: continue key, value = line.split('=', 1) r[key] = value return r def get_dvswitchrc(): import shlex from os import path configs = [path.expanduser("~"), ".", "/etc"] actual_config = {} for dirname in configs: configfile = path.join(dirname, ".dvswitchrc") if path.exists(configfile): actual_config.update(parse_dvswitchrc(configfile)) if actual_config.get("MIXER_HOST", None) == "0.0.0.0": actual_config["MIXER_HOST"] = "127.0.0.1" return actual_config config = get_dvswitchrc() # dvswitch arguments parser.add_argument( "-h", "--host", help=("" "Specify the network address on which DVswitch is listening. The host" " address may be specified by name or as an IPv4 or IPv6 literal."), default = config.get("MIXER_HOST", None), required = config.get("MIXER_HOST", None) == None, ) parser.add_argument( "-p", "--port", help=("" "Specify the network address on which DVswitch is listening. The host" " address may be specified by name or as an IPv4 or IPv6 literal."), default = config.get("MIXER_PORT", None), required = config.get("MIXER_PORT", None) == None, ) ############################################################################### parser.add_argument( "--help", action='help', help="show this help message and exit") ############################################################################### # Code to check dependencies ############################################################################### def check_command(name, package=None): try: output = subprocess.check_output(["which", name]) if args.verbose: print "Using", name, "found at", output.strip() except subprocess.CalledProcessError, e: print "Unable to find required command:", name if package: print "Please try 'sudo apt-get install %s'" % package raise def check_gst_module(name, package=None, extra_help=None): try: subprocess.check_call(["gst-inspect-1.0", name], stdout=subprocess.DEVNULL) except subprocess.CalledProcessError, e: print "Unable to find required gstreamer module", name if package: print "Please try 'sudo apt-get install %s'" % package if extra_help: print extra_help raise ############################################################################### # Code to which actually does the work ############################################################################### def launch_gstreamer(): if args.caps: args.caps += " ! " cmd = ("gst-launch-1.0" + " " + {True: "-v", False: "" }[args.verbose] + " " + # Video Pipeline -------------------- # ----------------------------------- {True: # Read the v4l2 input and decode it if it's a mjpeg input "v4l2src device=%s ! " % args.device + args.caps + "decodebin ! ", False: "videotestsrc is-live=true pattern=%s !" % fake_types[args.fake], }[args.fake == None] + " " + # Allow at most 1ms of data to be in the buffer, 2=GST_QUEUE_LEAK_DOWNSTREAM "queue leaky=downstream max-size-buffers=1 ! " + " " + # Convert to 4:3 format by adding borders if needed {"4:3": # Convert to 4:3 format by adding borders if needed "videoscale add-borders=1 ! video/x-raw,width=1024,height=768,pixel-aspect-ratio=\(fraction\)1/1 !", "16:9": # Convert to 16:9 format by adding borders if needed "videoscale add-borders=1 ! video/x-raw,width=1280,height=720,pixel-aspect-ratio=\(fraction\)1/1 !", }[args.aspect] + " " + # Pixel aspect ratio conversion is a bit hairy, see the following links for more information. # http://forum.doom9.org/showthread.php?t=111102 # http://www.sciencemedianetwork.org/wiki/Tutorials/Video/Pixel_Aspect_Ratio {"ntsc-4:3": # Convert to 4:3 with non-square pixels (was 10.0/11 ~= 0.91, now 8.0/9 ~= 0.88) "videoscale ! video/x-raw,width=720,height=480,pixel-aspect-ratio=\(fraction\)10/11 !", "ntsc-16:9": # Convert to 4:3 with non-square pixels (was 40.0/33 ~= 1.21, now 32.0/27 ~= 1.19) "videoscale ! video/x-raw,width=720,height=480,pixel-aspect-ratio=\(fraction\)40/33 !", "pal-4:3": # Convert to 4:3 with non-square pixels (was 59.0/54 ~= 1.09 == ITU-PAR, now 16.0/15 ~= 1.07 == NLE-PAR used by Final Cut / Adobe) "videoscale ! video/x-raw,width=720,height=576,pixel-aspect-ratio=\(fraction\)16/15 !", "pal-4:3": # Convert to 4:3 with non-square pixels (was 118.0/81 ~= 1.46 == anamorphic 'ITU', now 64.0/45 ~= 1.42 == anamorphic 'NLE') "videoscale ! video/x-raw,width=720,height=576,pixel-aspect-ratio=\(fraction\)64/45 !", }["%s-%s" % (args.system, args.aspect)] + " " + {"ntsc": # Convert the framerate to 30fps "videorate ! video/x-raw,framerate=\(fraction\)30000/1001 !", "pal": # Convert the framerate to 25fps "videorate ! video/x-raw,framerate=\(fraction\)25/1 !", }[args.system] + " " + # FIXME: Check which color space is needed by PAL verse NSTC. # Convert to color space needed by dvswitch "videoconvert ! video/x-raw,format=\(string\)I420 !" + " " + ["", "tee name=t ! "][args.display] + " " + "queue leaky=downstream max-size-buffers=1 ! " + " " + # Convert to DV format "videoconvert ! avenc_dvvideo ! avmux_dv name=dvmux !" + " " + # Output to dvswitch "dvswitchsink host=%s port=%s" % (args.host, args.port) + " " + # ----------------------------------- # Audio Pipeline -------------------- # ----------------------------------- " " + # Generate a dummy audio signal # 2 channels, 16-bit Linear PCM at 48 kHz # 2 channels, 16-bit Linear PCM at 44.1 kHz # 4 channels, 12-bit nonlinear PCM channels at 32 kHz (Not supported - gstreamer doesn't support 12-bit nonlinear) "audiotestsrc is-live=true wave=sine freq=200 ! audio/x-raw,channels=2,rate=%s,depth=16 ! queue ! dvmux." % args.rate + " " + # ----------------------------------- # Local Display --------------------- # ----------------------------------- " " + ["", "t. ! queue max-size-buffers=1 leaky=downstream ! videoconvert ! xvimagesink"][args.display] ) cmdargs = {} if args.verbose: print "Running the gstreamer conversion command of" print " ", cmd else: cmdargs["stdout"] = subprocess.DEVNULL return subprocess.Popen(cmd, shell=True, **cmdargs) ############################################################################### # Main function ############################################################################### def main(): # Check that gstreamer cmd line tools are installed check_command("gst-inspect-1.0", "gstreamer1.0-tools") check_command("gst-launch-1.0", "gstreamer1.0-tools") # Check if the gstreamer modules are installed check_gst_module("v4l2src", "gstreamer1.0-plugins-good") check_gst_module("decodebin", "gstreamer1.0-plugins-base") check_gst_module("videotestsrc", "gstreamer1.0-plugins-base") check_gst_module("videoscale", "gstreamer1.0-plugins-base") check_gst_module("videorate", "gstreamer1.0-plugins-base") check_gst_module("queue", "libgstreamer1.0-0") check_gst_module("tee", "libgstreamer1.0-0") check_gst_module("videoconvert", "gstreamer1.0-libav") check_gst_module("avenc_dvvideo", "gstreamer1.0-libav") check_gst_module("avmux_dv", "gstreamer1.0-libav") check_gst_module("dvswitchsink", "gstreamer1.0-dvswitch", """ If your distro doesn't ship the gstreamer1.0-dvswitch package you can find out more information in the README.md file or at https://github.com/timvideos/dvsource-v4l2-other#installing-the-gstreamer-dvswitch-plugin """) check_gst_module("audiotestsrc", "gstreamer1.0-plugins-base") check_gst_module("xvimagesink", "gstreamer1.0-plugins-base") # Check the input arguments make sense. if args.rate != "48000": raise SystemError("Only a --rate of 48000 is supported.") # Launch the sub-commands gst = launch_gstreamer() try: while True: if gst.poll() != None: raise OSError("gst-launch command terminated!") # FIXME: Add some type of monitoring of CPU usage here... if args.verbose: print "gst-launch happily running!" time.sleep(1.0) except KeyboardInterrupt, e: pass finally: exitstart = time.time() try: if args.verbose: print "Terminating gst-launch" gst.terminate() except Exception, e: print "Error terminating gst-launch", e while True: if gst.poll() != None: break if args.verbose: print "Waiting for gst-launch to terminate" time.sleep(1) if time.time() - exitstart > args.timeout: print "Timeout waiting for gst-launch", print "to terminate, killing." try: gst.kill() except Exception, e: print "Error killing gst-launch", e args = None if __name__ == "__main__": args = parser.parse_args() main()
apache-2.0
paplorinc/intellij-community
python/helpers/py3only/docutils/parsers/rst/directives/__init__.py
44
13607
# $Id: __init__.py 7621 2013-03-04 13:20:49Z milde $ # Author: David Goodger <goodger@python.org> # Copyright: This module has been placed in the public domain. """ This package contains directive implementation modules. """ __docformat__ = 'reStructuredText' import codecs import re import sys from docutils import nodes from docutils.parsers.rst.languages import en as _fallback_language_module if sys.version_info < (2,5): from docutils._compat import __import__ _directive_registry = { 'attention': ('admonitions', 'Attention'), 'caution': ('admonitions', 'Caution'), 'code': ('body', 'CodeBlock'), 'danger': ('admonitions', 'Danger'), 'error': ('admonitions', 'Error'), 'important': ('admonitions', 'Important'), 'note': ('admonitions', 'Note'), 'tip': ('admonitions', 'Tip'), 'hint': ('admonitions', 'Hint'), 'warning': ('admonitions', 'Warning'), 'admonition': ('admonitions', 'Admonition'), 'sidebar': ('body', 'Sidebar'), 'topic': ('body', 'Topic'), 'line-block': ('body', 'LineBlock'), 'parsed-literal': ('body', 'ParsedLiteral'), 'math': ('body', 'MathBlock'), 'rubric': ('body', 'Rubric'), 'epigraph': ('body', 'Epigraph'), 'highlights': ('body', 'Highlights'), 'pull-quote': ('body', 'PullQuote'), 'compound': ('body', 'Compound'), 'container': ('body', 'Container'), #'questions': ('body', 'question_list'), 'table': ('tables', 'RSTTable'), 'csv-table': ('tables', 'CSVTable'), 'list-table': ('tables', 'ListTable'), 'image': ('images', 'Image'), 'figure': ('images', 'Figure'), 'contents': ('parts', 'Contents'), 'sectnum': ('parts', 'Sectnum'), 'header': ('parts', 'Header'), 'footer': ('parts', 'Footer'), #'footnotes': ('parts', 'footnotes'), #'citations': ('parts', 'citations'), 'target-notes': ('references', 'TargetNotes'), 'meta': ('html', 'Meta'), #'imagemap': ('html', 'imagemap'), 'raw': ('misc', 'Raw'), 'include': ('misc', 'Include'), 'replace': ('misc', 'Replace'), 'unicode': ('misc', 'Unicode'), 'class': ('misc', 'Class'), 'role': ('misc', 'Role'), 'default-role': ('misc', 'DefaultRole'), 'title': ('misc', 'Title'), 'date': ('misc', 'Date'), 'restructuredtext-test-directive': ('misc', 'TestDirective'),} """Mapping of directive name to (module name, class name). The directive name is canonical & must be lowercase. Language-dependent names are defined in the ``language`` subpackage.""" _directives = {} """Cache of imported directives.""" def directive(directive_name, language_module, document): """ Locate and return a directive function from its language-dependent name. If not found in the current language, check English. Return None if the named directive cannot be found. """ normname = directive_name.lower() messages = [] msg_text = [] if normname in _directives: return _directives[normname], messages canonicalname = None try: canonicalname = language_module.directives[normname] except AttributeError as error: msg_text.append('Problem retrieving directive entry from language ' 'module %r: %s.' % (language_module, error)) except KeyError: msg_text.append('No directive entry for "%s" in module "%s".' % (directive_name, language_module.__name__)) if not canonicalname: try: canonicalname = _fallback_language_module.directives[normname] msg_text.append('Using English fallback for directive "%s".' % directive_name) except KeyError: msg_text.append('Trying "%s" as canonical directive name.' % directive_name) # The canonical name should be an English name, but just in case: canonicalname = normname if msg_text: message = document.reporter.info( '\n'.join(msg_text), line=document.current_line) messages.append(message) try: modulename, classname = _directive_registry[canonicalname] except KeyError: # Error handling done by caller. return None, messages try: module = __import__(modulename, globals(), locals(), level=1) except ImportError as detail: messages.append(document.reporter.error( 'Error importing directive module "%s" (directive "%s"):\n%s' % (modulename, directive_name, detail), line=document.current_line)) return None, messages try: directive = getattr(module, classname) _directives[normname] = directive except AttributeError: messages.append(document.reporter.error( 'No directive class "%s" in module "%s" (directive "%s").' % (classname, modulename, directive_name), line=document.current_line)) return None, messages return directive, messages def register_directive(name, directive): """ Register a nonstandard application-defined directive function. Language lookups are not needed for such functions. """ _directives[name] = directive def flag(argument): """ Check for a valid flag option (no argument) and return ``None``. (Directive option conversion function.) Raise ``ValueError`` if an argument is found. """ if argument and argument.strip(): raise ValueError('no argument is allowed; "%s" supplied' % argument) else: return None def unchanged_required(argument): """ Return the argument text, unchanged. (Directive option conversion function.) Raise ``ValueError`` if no argument is found. """ if argument is None: raise ValueError('argument required but none supplied') else: return argument # unchanged! def unchanged(argument): """ Return the argument text, unchanged. (Directive option conversion function.) No argument implies empty string (""). """ if argument is None: return '' else: return argument # unchanged! def path(argument): """ Return the path argument unwrapped (with newlines removed). (Directive option conversion function.) Raise ``ValueError`` if no argument is found. """ if argument is None: raise ValueError('argument required but none supplied') else: path = ''.join([s.strip() for s in argument.splitlines()]) return path def uri(argument): """ Return the URI argument with whitespace removed. (Directive option conversion function.) Raise ``ValueError`` if no argument is found. """ if argument is None: raise ValueError('argument required but none supplied') else: uri = ''.join(argument.split()) return uri def nonnegative_int(argument): """ Check for a nonnegative integer argument; raise ``ValueError`` if not. (Directive option conversion function.) """ value = int(argument) if value < 0: raise ValueError('negative value; must be positive or zero') return value def percentage(argument): """ Check for an integer percentage value with optional percent sign. """ try: argument = argument.rstrip(' %') except AttributeError: pass return nonnegative_int(argument) length_units = ['em', 'ex', 'px', 'in', 'cm', 'mm', 'pt', 'pc'] def get_measure(argument, units): """ Check for a positive argument of one of the units and return a normalized string of the form "<value><unit>" (without space in between). To be called from directive option conversion functions. """ match = re.match(r'^([0-9.]+) *(%s)$' % '|'.join(units), argument) try: float(match.group(1)) except (AttributeError, ValueError): raise ValueError( 'not a positive measure of one of the following units:\n%s' % ' '.join(['"%s"' % i for i in units])) return match.group(1) + match.group(2) def length_or_unitless(argument): return get_measure(argument, length_units + ['']) def length_or_percentage_or_unitless(argument, default=''): """ Return normalized string of a length or percentage unit. Add <default> if there is no unit. Raise ValueError if the argument is not a positive measure of one of the valid CSS units (or without unit). >>> length_or_percentage_or_unitless('3 pt') '3pt' >>> length_or_percentage_or_unitless('3%', 'em') '3%' >>> length_or_percentage_or_unitless('3') '3' >>> length_or_percentage_or_unitless('3', 'px') '3px' """ try: return get_measure(argument, length_units + ['%']) except ValueError: try: return get_measure(argument, ['']) + default except ValueError: # raise ValueError with list of valid units: return get_measure(argument, length_units + ['%']) def class_option(argument): """ Convert the argument into a list of ID-compatible strings and return it. (Directive option conversion function.) Raise ``ValueError`` if no argument is found. """ if argument is None: raise ValueError('argument required but none supplied') names = argument.split() class_names = [] for name in names: class_name = nodes.make_id(name) if not class_name: raise ValueError('cannot make "%s" into a class name' % name) class_names.append(class_name) return class_names unicode_pattern = re.compile( r'(?:0x|x|\\x|U\+?|\\u)([0-9a-f]+)$|&#x([0-9a-f]+);$', re.IGNORECASE) def unicode_code(code): r""" Convert a Unicode character code to a Unicode character. (Directive option conversion function.) Codes may be decimal numbers, hexadecimal numbers (prefixed by ``0x``, ``x``, ``\x``, ``U+``, ``u``, or ``\u``; e.g. ``U+262E``), or XML-style numeric character entities (e.g. ``&#x262E;``). Other text remains as-is. Raise ValueError for illegal Unicode code values. """ try: if code.isdigit(): # decimal number return chr(int(code)) else: match = unicode_pattern.match(code) if match: # hex number value = match.group(1) or match.group(2) return chr(int(value, 16)) else: # other text return code except OverflowError as detail: raise ValueError('code too large (%s)' % detail) def single_char_or_unicode(argument): """ A single character is returned as-is. Unicode characters codes are converted as in `unicode_code`. (Directive option conversion function.) """ char = unicode_code(argument) if len(char) > 1: raise ValueError('%r invalid; must be a single character or ' 'a Unicode code' % char) return char def single_char_or_whitespace_or_unicode(argument): """ As with `single_char_or_unicode`, but "tab" and "space" are also supported. (Directive option conversion function.) """ if argument == 'tab': char = '\t' elif argument == 'space': char = ' ' else: char = single_char_or_unicode(argument) return char def positive_int(argument): """ Converts the argument into an integer. Raises ValueError for negative, zero, or non-integer values. (Directive option conversion function.) """ value = int(argument) if value < 1: raise ValueError('negative or zero value; must be positive') return value def positive_int_list(argument): """ Converts a space- or comma-separated list of values into a Python list of integers. (Directive option conversion function.) Raises ValueError for non-positive-integer values. """ if ',' in argument: entries = argument.split(',') else: entries = argument.split() return [positive_int(entry) for entry in entries] def encoding(argument): """ Verfies the encoding argument by lookup. (Directive option conversion function.) Raises ValueError for unknown encodings. """ try: codecs.lookup(argument) except LookupError: raise ValueError('unknown encoding: "%s"' % argument) return argument def choice(argument, values): """ Directive option utility function, supplied to enable options whose argument must be a member of a finite set of possible values (must be lower case). A custom conversion function must be written to use it. For example:: from docutils.parsers.rst import directives def yesno(argument): return directives.choice(argument, ('yes', 'no')) Raise ``ValueError`` if no argument is found or if the argument's value is not valid (not an entry in the supplied list). """ try: value = argument.lower().strip() except AttributeError: raise ValueError('must supply an argument; choose from %s' % format_values(values)) if value in values: return value else: raise ValueError('"%s" unknown; choose from %s' % (argument, format_values(values))) def format_values(values): return '%s, or "%s"' % (', '.join(['"%s"' % s for s in values[:-1]]), values[-1])
apache-2.0
kvar/ansible
test/units/modules/storage/netapp/test_na_ontap_igroup.py
43
9635
# (c) 2018, NetApp, Inc # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) ''' unit test template for ONTAP Ansible module ''' from __future__ import print_function import json import pytest from units.compat import unittest from units.compat.mock import patch, Mock from ansible.module_utils import basic from ansible.module_utils._text import to_bytes import ansible.module_utils.netapp as netapp_utils from ansible.modules.storage.netapp.na_ontap_igroup \ import NetAppOntapIgroup as igroup # module under test if not netapp_utils.has_netapp_lib(): pytestmark = pytest.mark.skip('skipping as missing required netapp_lib') def set_module_args(args): """prepare arguments so that they will be picked up during module creation""" args = json.dumps({'ANSIBLE_MODULE_ARGS': args}) basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access class AnsibleExitJson(Exception): """Exception class to be raised by module.exit_json and caught by the test case""" pass class AnsibleFailJson(Exception): """Exception class to be raised by module.fail_json and caught by the test case""" pass def exit_json(*args, **kwargs): # pylint: disable=unused-argument """function to patch over exit_json; package return data into an exception""" if 'changed' not in kwargs: kwargs['changed'] = False raise AnsibleExitJson(kwargs) def fail_json(*args, **kwargs): # pylint: disable=unused-argument """function to patch over fail_json; package return data into an exception""" kwargs['failed'] = True raise AnsibleFailJson(kwargs) class MockONTAPConnection(object): ''' mock server connection to ONTAP host ''' def __init__(self, kind=None, data=None): ''' save arguments ''' self.kind = kind self.data = data self.xml_in = None self.xml_out = None def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument ''' mock invoke_successfully returning xml data ''' self.xml_in = xml if self.kind == 'igroup': xml = self.build_igroup() if self.kind == 'igroup_no_initiators': xml = self.build_igroup_no_initiators() self.xml_out = xml return xml @staticmethod def build_igroup(): ''' build xml data for initiator ''' xml = netapp_utils.zapi.NaElement('xml') attributes = { 'num-records': 1, 'attributes-list': { 'initiator-group-info': { 'initiators': [ { 'initiator-info': { 'initiator-name': 'init1' }}, { 'initiator-info': { 'initiator-name': 'init2' }} ] } } } xml.translate_struct(attributes) return xml @staticmethod def build_igroup_no_initiators(): ''' build xml data for igroup with no initiators ''' xml = netapp_utils.zapi.NaElement('xml') attributes = { 'num-records': 1, 'attributes-list': { 'initiator-group-info': { 'vserver': 'test' } } } xml.translate_struct(attributes) return xml class TestMyModule(unittest.TestCase): ''' a group of related Unit Tests ''' def setUp(self): self.mock_module_helper = patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json) self.mock_module_helper.start() self.addCleanup(self.mock_module_helper.stop) self.server = MockONTAPConnection() def mock_args(self): return { 'vserver': 'vserver', 'name': 'test', 'initiators': 'init1', 'ostype': 'linux', 'initiator_group_type': 'fcp', 'bind_portset': 'true', 'hostname': 'hostname', 'username': 'username', 'password': 'password' } def get_igroup_mock_object(self, kind=None): """ Helper method to return an na_ontap_igroup object :param kind: passes this param to MockONTAPConnection() :return: na_ontap_igroup object """ obj = igroup() obj.autosupport_log = Mock(return_value=None) if kind is None: obj.server = MockONTAPConnection() else: obj.server = MockONTAPConnection(kind=kind) return obj def test_module_fail_when_required_args_missing(self): ''' required arguments are reported as errors ''' with pytest.raises(AnsibleFailJson) as exc: set_module_args({}) igroup() def test_get_nonexistent_igroup(self): ''' Test if get_igroup returns None for non-existent igroup ''' data = self.mock_args() set_module_args(data) result = self.get_igroup_mock_object().get_igroup('dummy') assert result is None def test_get_existing_igroup_with_initiators(self): ''' Test if get_igroup returns list of existing initiators ''' data = self.mock_args() set_module_args(data) result = self.get_igroup_mock_object('igroup').get_igroup(data['name']) assert data['initiators'] in result['initiators'] assert result['initiators'] == ['init1', 'init2'] def test_get_existing_igroup_without_initiators(self): ''' Test if get_igroup returns empty list() ''' data = self.mock_args() set_module_args(data) result = self.get_igroup_mock_object('igroup_no_initiators').get_igroup(data['name']) assert result['initiators'] == [] @patch('ansible.modules.storage.netapp.na_ontap_igroup.NetAppOntapIgroup.add_initiators') @patch('ansible.modules.storage.netapp.na_ontap_igroup.NetAppOntapIgroup.remove_initiators') def test_modify_initiator_calls_add_and_remove(self, remove, add): '''Test remove_initiator() is called followed by add_initiator() on modify operation''' data = self.mock_args() data['initiators'] = 'replacewithme' set_module_args(data) obj = self.get_igroup_mock_object('igroup') with pytest.raises(AnsibleExitJson) as exc: current = obj.get_igroup(data['name']) obj.apply() remove.assert_called_with(current['initiators']) add.assert_called_with() @patch('ansible.modules.storage.netapp.na_ontap_igroup.NetAppOntapIgroup.modify_initiator') def test_modify_called_from_add(self, modify): '''Test remove_initiator() and add_initiator() calls modify''' data = self.mock_args() data['initiators'] = 'replacewithme' add, remove = 'igroup-add', 'igroup-remove' set_module_args(data) with pytest.raises(AnsibleExitJson) as exc: self.get_igroup_mock_object('igroup_no_initiators').apply() modify.assert_called_with('replacewithme', add) assert modify.call_count == 1 # remove nothing, add 1 new @patch('ansible.modules.storage.netapp.na_ontap_igroup.NetAppOntapIgroup.modify_initiator') def test_modify_called_from_remove(self, modify): '''Test remove_initiator() and add_initiator() calls modify''' data = self.mock_args() data['initiators'] = '' remove = 'igroup-remove' set_module_args(data) with pytest.raises(AnsibleExitJson) as exc: self.get_igroup_mock_object('igroup').apply() modify.assert_called_with('init2', remove) assert modify.call_count == 2 # remove existing 2, add nothing @patch('ansible.modules.storage.netapp.na_ontap_igroup.NetAppOntapIgroup.add_initiators') def test_successful_create(self, add): ''' Test successful create ''' set_module_args(self.mock_args()) with pytest.raises(AnsibleExitJson) as exc: self.get_igroup_mock_object().apply() assert exc.value.args[0]['changed'] add.assert_called_with() def test_successful_delete(self): ''' Test successful delete ''' data = self.mock_args() data['state'] = 'absent' set_module_args(self.mock_args()) with pytest.raises(AnsibleExitJson) as exc: self.get_igroup_mock_object('igroup').apply() assert exc.value.args[0]['changed'] def test_successful_modify(self): ''' Test successful modify ''' data = self.mock_args() data['initiators'] = 'new' set_module_args(self.mock_args()) with pytest.raises(AnsibleExitJson) as exc: self.get_igroup_mock_object('igroup').apply() assert exc.value.args[0]['changed'] @patch('ansible.modules.storage.netapp.na_ontap_igroup.NetAppOntapIgroup.get_igroup') def test_successful_rename(self, get_vserver): '''Test successful rename''' data = self.mock_args() data['from_name'] = 'test' data['name'] = 'test_new' set_module_args(data) current = { 'initiators': ['init1', 'init2'] } get_vserver.side_effect = [ None, current ] with pytest.raises(AnsibleExitJson) as exc: self.get_igroup_mock_object().apply() assert exc.value.args[0]['changed']
gpl-3.0
abhattad4/Digi-Menu
digimenu2/build/lib.linux-x86_64-2.7/django/template/loaders/filesystem.py
95
1761
""" Wrapper for loading templates from the filesystem. """ import io from django.core.exceptions import SuspiciousFileOperation from django.template.base import TemplateDoesNotExist from django.utils._os import safe_join from .base import Loader as BaseLoader class Loader(BaseLoader): is_usable = True def get_template_sources(self, template_name, template_dirs=None): """ Returns the absolute paths to "template_name", when appended to each directory in "template_dirs". Any paths that don't lie inside one of the template dirs are excluded from the result set, for security reasons. """ if not template_dirs: template_dirs = self.engine.dirs for template_dir in template_dirs: try: yield safe_join(template_dir, template_name) except SuspiciousFileOperation: # The joined path was located outside of this template_dir # (it might be inside another one, so this isn't fatal). pass def load_template_source(self, template_name, template_dirs=None): tried = [] for filepath in self.get_template_sources(template_name, template_dirs): try: with io.open(filepath, encoding=self.engine.file_charset) as fp: return fp.read(), filepath except IOError: tried.append(filepath) if tried: error_msg = "Tried %s" % tried else: error_msg = ("Your template directories configuration is empty. " "Change it to point to at least one template directory.") raise TemplateDoesNotExist(error_msg) load_template_source.is_usable = True
bsd-3-clause
gfyoung/pandas
pandas/tests/tslibs/test_conversion.py
3
3973
from datetime import datetime import numpy as np import pytest from pytz import UTC from pandas._libs.tslibs import ( OutOfBoundsTimedelta, conversion, iNaT, timezones, tzconversion, ) from pandas import Timestamp, date_range import pandas._testing as tm def _compare_utc_to_local(tz_didx): def f(x): return tzconversion.tz_convert_from_utc_single(x, tz_didx.tz) result = tzconversion.tz_convert_from_utc(tz_didx.asi8, tz_didx.tz) expected = np.vectorize(f)(tz_didx.asi8) tm.assert_numpy_array_equal(result, expected) def _compare_local_to_utc(tz_didx, naive_didx): # Check that tz_localize behaves the same vectorized and pointwise. err1 = err2 = None try: result = tzconversion.tz_localize_to_utc(naive_didx.asi8, tz_didx.tz) err1 = None except Exception as err: err1 = err try: expected = naive_didx.map(lambda x: x.tz_localize(tz_didx.tz)).asi8 except Exception as err: err2 = err if err1 is not None: assert type(err1) == type(err2) else: assert err2 is None tm.assert_numpy_array_equal(result, expected) def test_tz_convert_single_matches_tz_convert_hourly(tz_aware_fixture): tz = tz_aware_fixture tz_didx = date_range("2014-03-01", "2015-01-10", freq="H", tz=tz) naive_didx = date_range("2014-03-01", "2015-01-10", freq="H") _compare_utc_to_local(tz_didx) _compare_local_to_utc(tz_didx, naive_didx) @pytest.mark.parametrize("freq", ["D", "A"]) def test_tz_convert_single_matches_tz_convert(tz_aware_fixture, freq): tz = tz_aware_fixture tz_didx = date_range("2000-01-01", "2020-01-01", freq=freq, tz=tz) naive_didx = date_range("2000-01-01", "2020-01-01", freq=freq) _compare_utc_to_local(tz_didx) _compare_local_to_utc(tz_didx, naive_didx) @pytest.mark.parametrize( "arr", [ pytest.param(np.array([], dtype=np.int64), id="empty"), pytest.param(np.array([iNaT], dtype=np.int64), id="all_nat"), ], ) def test_tz_convert_corner(arr): result = tzconversion.tz_convert_from_utc(arr, timezones.maybe_get_tz("Asia/Tokyo")) tm.assert_numpy_array_equal(result, arr) def test_tz_convert_readonly(): # GH#35530 arr = np.array([0], dtype=np.int64) arr.setflags(write=False) result = tzconversion.tz_convert_from_utc(arr, UTC) tm.assert_numpy_array_equal(result, arr) @pytest.mark.parametrize("copy", [True, False]) @pytest.mark.parametrize("dtype", ["M8[ns]", "M8[s]"]) def test_length_zero_copy(dtype, copy): arr = np.array([], dtype=dtype) result = conversion.ensure_datetime64ns(arr, copy=copy) assert result.base is (None if copy else arr) def test_ensure_datetime64ns_bigendian(): # GH#29684 arr = np.array([np.datetime64(1, "ms")], dtype=">M8[ms]") result = conversion.ensure_datetime64ns(arr) expected = np.array([np.datetime64(1, "ms")], dtype="M8[ns]") tm.assert_numpy_array_equal(result, expected) def test_ensure_timedelta64ns_overflows(): arr = np.arange(10).astype("m8[Y]") * 100 msg = r"Out of bounds for nanosecond timedelta64\[Y\] 900" with pytest.raises(OutOfBoundsTimedelta, match=msg): conversion.ensure_timedelta64ns(arr) class SubDatetime(datetime): pass @pytest.mark.parametrize( "dt, expected", [ pytest.param( Timestamp("2000-01-01"), Timestamp("2000-01-01", tz=UTC), id="timestamp" ), pytest.param( datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=UTC), id="datetime" ), pytest.param( SubDatetime(2000, 1, 1), SubDatetime(2000, 1, 1, tzinfo=UTC), id="subclassed_datetime", ), ], ) def test_localize_pydatetime_dt_types(dt, expected): # GH 25851 # ensure that subclassed datetime works with # localize_pydatetime result = conversion.localize_pydatetime(dt, UTC) assert result == expected
bsd-3-clause
tilde-lab/tilde
tilde/parsers/CRYSTAL/CRYSTAL.py
1
2021
""" An updated CRYSTAL logs parser wrapping a standalone parser called pycrystal Authors: Evgeny Blokhin and Andrey Sobolev """ import os.path from pycrystal import CRYSTOUT as _CRYSTOUT, CRYSTOUT_Error from tilde.parsers import Output class CRYSTOUT(Output): def __init__(self, filename): Output.__init__(self, filename) try: result = _CRYSTOUT(filename) except CRYSTOUT_Error as ex: raise RuntimeError(ex) for key in self.info: if result.info.get(key): self.info[key] = result.info[key] self.structures = result.info['structures'] self.convergence = result.info['convergence'] self.tresholds = result.info['optgeom'] self.ncycles = result.info['ncycles'] self.phonons = result.info['phonons'] self.electrons = result.info['electrons'] self.electrons['basis_set']['ps'] = self.electrons['basis_set']['ecp'] self.elastic = result.info['elastic'] self.info['framework'] = 0x3 self.info['ansatz'] = 0x3 self.related_files.append(filename) cur_folder = os.path.dirname(filename) check_files = [] if filename.endswith('.cryst.out'): check_files = [filename.replace('.cryst.out', '') + '.d12', filename.replace('.cryst.out', '') + '.gui'] elif filename.endswith('.out'): check_files = [filename.replace('.out', '') + '.d12', filename.replace('.out', '') + '.gui'] for check in check_files: if os.path.exists(os.path.join(cur_folder, check)): self.related_files.append(os.path.join(cur_folder, check)) err_file = os.path.join(cur_folder, 'fort.87') if os.path.exists(err_file): with open(err_file, 'r') as f: err_msg = f.readline() if err_msg: self.info['warns'].append(err_msg) @staticmethod def fingerprints(test_string): return _CRYSTOUT.detect(test_string)
mit
theguardian/CherryStrap
cherrypy/test/test_dynamicobjectmapping.py
22
12517
import cherrypy from cherrypy._cpcompat import sorted, unicodestr from cherrypy._cptree import Application from cherrypy.test import helper script_names = ["", "/foo", "/users/fred/blog", "/corp/blog"] def setup_server(): class SubSubRoot: def index(self): return "SubSubRoot index" index.exposed = True def default(self, *args): return "SubSubRoot default" default.exposed = True def handler(self): return "SubSubRoot handler" handler.exposed = True def dispatch(self): return "SubSubRoot dispatch" dispatch.exposed = True subsubnodes = { '1': SubSubRoot(), '2': SubSubRoot(), } class SubRoot: def index(self): return "SubRoot index" index.exposed = True def default(self, *args): return "SubRoot %s" % (args,) default.exposed = True def handler(self): return "SubRoot handler" handler.exposed = True def _cp_dispatch(self, vpath): return subsubnodes.get(vpath[0], None) subnodes = { '1': SubRoot(), '2': SubRoot(), } class Root: def index(self): return "index" index.exposed = True def default(self, *args): return "default %s" % (args,) default.exposed = True def handler(self): return "handler" handler.exposed = True def _cp_dispatch(self, vpath): return subnodes.get(vpath[0]) #-------------------------------------------------------------------------- # DynamicNodeAndMethodDispatcher example. # This example exposes a fairly naive HTTP api class User(object): def __init__(self, id, name): self.id = id self.name = name def __unicode__(self): return unicode(self.name) def __str__(self): return str(self.name) user_lookup = { 1: User(1, 'foo'), 2: User(2, 'bar'), } def make_user(name, id=None): if not id: id = max(*list(user_lookup.keys())) + 1 user_lookup[id] = User(id, name) return id class UserContainerNode(object): exposed = True def POST(self, name): """ Allow the creation of a new Object """ return "POST %d" % make_user(name) def GET(self): return unicodestr(sorted(user_lookup.keys())) def dynamic_dispatch(self, vpath): try: id = int(vpath[0]) except (ValueError, IndexError): return None return UserInstanceNode(id) class UserInstanceNode(object): exposed = True def __init__(self, id): self.id = id self.user = user_lookup.get(id, None) # For all but PUT methods there MUST be a valid user identified # by self.id if not self.user and cherrypy.request.method != 'PUT': raise cherrypy.HTTPError(404) def GET(self, *args, **kwargs): """ Return the appropriate representation of the instance. """ return unicodestr(self.user) def POST(self, name): """ Update the fields of the user instance. """ self.user.name = name return "POST %d" % self.user.id def PUT(self, name): """ Create a new user with the specified id, or edit it if it already exists """ if self.user: # Edit the current user self.user.name = name return "PUT %d" % self.user.id else: # Make a new user with said attributes. return "PUT %d" % make_user(name, self.id) def DELETE(self): """ Delete the user specified at the id. """ id = self.user.id del user_lookup[self.user.id] del self.user return "DELETE %d" % id class ABHandler: class CustomDispatch: def index(self, a, b): return "custom" index.exposed = True def _cp_dispatch(self, vpath): """Make sure that if we don't pop anything from vpath, processing still works. """ return self.CustomDispatch() def index(self, a, b=None): body = ['a:' + str(a)] if b is not None: body.append(',b:' + str(b)) return ''.join(body) index.exposed = True def delete(self, a, b): return 'deleting ' + str(a) + ' and ' + str(b) delete.exposed = True class IndexOnly: def _cp_dispatch(self, vpath): """Make sure that popping ALL of vpath still shows the index handler. """ while vpath: vpath.pop() return self def index(self): return "IndexOnly index" index.exposed = True class DecoratedPopArgs: """Test _cp_dispatch with @cherrypy.popargs.""" def index(self): return "no params" index.exposed = True def hi(self): return "hi was not interpreted as 'a' param" hi.exposed = True DecoratedPopArgs = cherrypy.popargs( 'a', 'b', handler=ABHandler())(DecoratedPopArgs) class NonDecoratedPopArgs: """Test _cp_dispatch = cherrypy.popargs()""" _cp_dispatch = cherrypy.popargs('a') def index(self, a): return "index: " + str(a) index.exposed = True class ParameterizedHandler: """Special handler created for each request""" def __init__(self, a): self.a = a def index(self): if 'a' in cherrypy.request.params: raise Exception( "Parameterized handler argument ended up in " "request.params") return self.a index.exposed = True class ParameterizedPopArgs: """Test cherrypy.popargs() with a function call handler""" ParameterizedPopArgs = cherrypy.popargs( 'a', handler=ParameterizedHandler)(ParameterizedPopArgs) Root.decorated = DecoratedPopArgs() Root.undecorated = NonDecoratedPopArgs() Root.index_only = IndexOnly() Root.parameter_test = ParameterizedPopArgs() Root.users = UserContainerNode() md = cherrypy.dispatch.MethodDispatcher('dynamic_dispatch') for url in script_names: conf = {'/': { 'user': (url or "/").split("/")[-2], }, '/users': { 'request.dispatch': md }, } cherrypy.tree.mount(Root(), url, conf) class DynamicObjectMappingTest(helper.CPWebCase): setup_server = staticmethod(setup_server) def testObjectMapping(self): for url in script_names: prefix = self.script_name = url self.getPage('/') self.assertBody('index') self.getPage('/handler') self.assertBody('handler') # Dynamic dispatch will succeed here for the subnodes # so the subroot gets called self.getPage('/1/') self.assertBody('SubRoot index') self.getPage('/2/') self.assertBody('SubRoot index') self.getPage('/1/handler') self.assertBody('SubRoot handler') self.getPage('/2/handler') self.assertBody('SubRoot handler') # Dynamic dispatch will fail here for the subnodes # so the default gets called self.getPage('/asdf/') self.assertBody("default ('asdf',)") self.getPage('/asdf/asdf') self.assertBody("default ('asdf', 'asdf')") self.getPage('/asdf/handler') self.assertBody("default ('asdf', 'handler')") # Dynamic dispatch will succeed here for the subsubnodes # so the subsubroot gets called self.getPage('/1/1/') self.assertBody('SubSubRoot index') self.getPage('/2/2/') self.assertBody('SubSubRoot index') self.getPage('/1/1/handler') self.assertBody('SubSubRoot handler') self.getPage('/2/2/handler') self.assertBody('SubSubRoot handler') self.getPage('/2/2/dispatch') self.assertBody('SubSubRoot dispatch') # The exposed dispatch will not be called as a dispatch # method. self.getPage('/2/2/foo/foo') self.assertBody("SubSubRoot default") # Dynamic dispatch will fail here for the subsubnodes # so the SubRoot gets called self.getPage('/1/asdf/') self.assertBody("SubRoot ('asdf',)") self.getPage('/1/asdf/asdf') self.assertBody("SubRoot ('asdf', 'asdf')") self.getPage('/1/asdf/handler') self.assertBody("SubRoot ('asdf', 'handler')") def testMethodDispatch(self): # GET acts like a container self.getPage("/users") self.assertBody("[1, 2]") self.assertHeader('Allow', 'GET, HEAD, POST') # POST to the container URI allows creation self.getPage("/users", method="POST", body="name=baz") self.assertBody("POST 3") self.assertHeader('Allow', 'GET, HEAD, POST') # POST to a specific instanct URI results in a 404 # as the resource does not exit. self.getPage("/users/5", method="POST", body="name=baz") self.assertStatus(404) # PUT to a specific instanct URI results in creation self.getPage("/users/5", method="PUT", body="name=boris") self.assertBody("PUT 5") self.assertHeader('Allow', 'DELETE, GET, HEAD, POST, PUT') # GET acts like a container self.getPage("/users") self.assertBody("[1, 2, 3, 5]") self.assertHeader('Allow', 'GET, HEAD, POST') test_cases = ( (1, 'foo', 'fooupdated', 'DELETE, GET, HEAD, POST, PUT'), (2, 'bar', 'barupdated', 'DELETE, GET, HEAD, POST, PUT'), (3, 'baz', 'bazupdated', 'DELETE, GET, HEAD, POST, PUT'), (5, 'boris', 'borisupdated', 'DELETE, GET, HEAD, POST, PUT'), ) for id, name, updatedname, headers in test_cases: self.getPage("/users/%d" % id) self.assertBody(name) self.assertHeader('Allow', headers) # Make sure POSTs update already existings resources self.getPage("/users/%d" % id, method='POST', body="name=%s" % updatedname) self.assertBody("POST %d" % id) self.assertHeader('Allow', headers) # Make sure PUTs Update already existing resources. self.getPage("/users/%d" % id, method='PUT', body="name=%s" % updatedname) self.assertBody("PUT %d" % id) self.assertHeader('Allow', headers) # Make sure DELETES Remove already existing resources. self.getPage("/users/%d" % id, method='DELETE') self.assertBody("DELETE %d" % id) self.assertHeader('Allow', headers) # GET acts like a container self.getPage("/users") self.assertBody("[]") self.assertHeader('Allow', 'GET, HEAD, POST') def testVpathDispatch(self): self.getPage("/decorated/") self.assertBody("no params") self.getPage("/decorated/hi") self.assertBody("hi was not interpreted as 'a' param") self.getPage("/decorated/yo/") self.assertBody("a:yo") self.getPage("/decorated/yo/there/") self.assertBody("a:yo,b:there") self.getPage("/decorated/yo/there/delete") self.assertBody("deleting yo and there") self.getPage("/decorated/yo/there/handled_by_dispatch/") self.assertBody("custom") self.getPage("/undecorated/blah/") self.assertBody("index: blah") self.getPage("/index_only/a/b/c/d/e/f/g/") self.assertBody("IndexOnly index") self.getPage("/parameter_test/argument2/") self.assertBody("argument2")
gpl-2.0
niboshi/chainer
tests/chainerx_tests/math_utils.py
4
9139
import unittest import numpy import chainerx from chainerx_tests import array_utils from chainerx_tests import dtype_utils class IgnoreNumpyFloatingPointError(object): def __enter__(self): self.old_settings = numpy.seterr(all='ignore') def __exit__(self, *args): numpy.seterr(**self.old_settings) class UnaryMathTestBase(object): input = None def setup(self): in_dtype, = self.in_dtypes in_kind = numpy.dtype(in_dtype).kind if numpy.dtype(in_dtype).kind != 'f': self.skip_backward_test = True self.skip_double_backward_test = True if in_dtype == 'float16': self.check_forward_options.update({'rtol': 1e-3, 'atol': 1e-3}) self.check_backward_options.update({'rtol': 3e-3, 'atol': 3e-3}) self.check_double_backward_options.update( {'rtol': 1e-2, 'atol': 1e-2}) else: self.check_backward_options.update({'rtol': 1e-3, 'atol': 1e-4}) self.check_double_backward_options.update( {'rtol': 1e-3, 'atol': 1e-4}) input = self.input if (in_kind == 'u' and isinstance(input, (int, float)) and input < 0): raise unittest.SkipTest( 'Combination of uint dtype and negative input cannot be ' 'tested') def generate_inputs(self): in_dtype, = self.in_dtypes if isinstance(self.input, numpy.ndarray): return self.input.astype(in_dtype), if self.input == 'random': return array_utils.uniform(self.shape, in_dtype), if isinstance(self.input, (bool, int, float)): return numpy.full(self.shape, self.input, dtype=in_dtype), assert False def forward_xp(self, inputs, xp): a, = inputs # This cast was introduced in order to avoid decreasing precision. # ex.) numpy.sqrt(x) becomes a float16 array where x is an int8 array. a = dtype_utils.cast_if_numpy_array(xp, a, self.out_dtype) with IgnoreNumpyFloatingPointError(): y = self.func(xp, a) y = dtype_utils.cast_if_numpy_array(xp, y, self.out_dtype) return y, class BinaryMathTestBase(object): def setup(self): in_dtype1, in_dtype2 = self.in_dtypes kind1 = numpy.dtype(in_dtype1).kind kind2 = numpy.dtype(in_dtype2).kind if kind1 != 'f' or kind2 != 'f': self.skip_backward_test = True self.skip_double_backward_test = True if in_dtype1 == 'float16' or in_dtype2 == 'float16': self.check_forward_options.update({'rtol': 1e-3, 'atol': 1e-3}) self.check_backward_options.update({'rtol': 1e-2, 'atol': 3e-3}) self.check_double_backward_options.update( {'rtol': 1e-2, 'atol': 3e-3}) def generate_inputs(self): in_dtype1, in_dtype2 = self.in_dtypes in_shape1, in_shape2 = self.in_shapes if self.input_lhs == 'random': a = array_utils.uniform(in_shape1, in_dtype1) elif isinstance(self.input_lhs, (bool, int, float)): a = numpy.full(in_shape1, self.input_lhs, dtype=in_dtype1) else: assert False if self.input_rhs == 'random': b = array_utils.uniform(in_shape2, in_dtype2) elif isinstance(self.input_rhs, (bool, int, float)): b = numpy.full(in_shape2, self.input_rhs, dtype=in_dtype2) else: assert False return a, b def forward_xp(self, inputs, xp): a, b = inputs # This cast was introduced in order to avoid decreasing precision. # ex.) x / y becomes a float16 array where x and y are an int8 arrays. a = dtype_utils.cast_if_numpy_array(xp, a, self.out_dtype) b = dtype_utils.cast_if_numpy_array(xp, b, self.out_dtype) with IgnoreNumpyFloatingPointError(): y = self.func(xp, a, b) y = dtype_utils.cast_if_numpy_array(xp, y, self.out_dtype) return y, class InplaceUnaryMathTestBase(UnaryMathTestBase): skip_backward_test = True skip_double_backward_test = True def forward_xp(self, inputs, xp): a, = inputs if xp is chainerx: a_ = a.as_grad_stopped().copy() else: a_ = a.copy() with IgnoreNumpyFloatingPointError(): ret = self.func(xp, a_) assert ret is None # func should not return anything return a_, class InplaceBinaryMathTestBase(BinaryMathTestBase): skip_backward_test = True skip_double_backward_test = True def forward_xp(self, inputs, xp): a, b = inputs b = dtype_utils.cast_if_numpy_array(xp, b, a.dtype) if xp is chainerx: a_ = a.as_grad_stopped().copy() b_ = b.as_grad_stopped() else: a_ = a.copy() b_ = b with IgnoreNumpyFloatingPointError(): ret = self.func(xp, a_, b_) assert ret is None # func should not return anything return a_, def _convert_numpy_scalar(scalar, dtype): # Implicit casting in NumPy's multiply depends on the 'casting' argument, # which is not yet supported (ChainerX always casts). # Therefore, we explicitly cast the scalar to the dtype of the ndarray # before the multiplication for NumPy. return numpy.dtype(dtype).type(scalar) class MathScalarTestBase(UnaryMathTestBase): def func(self, xp, a): scalar = self.scalar_type(self.scalar_value) return self.func_scalar(xp, a, scalar) class InplaceMathScalarTestBase(InplaceUnaryMathTestBase): def func(self, xp, a): scalar = self.scalar_type(self.scalar_value) if xp is numpy: # This cast is to avoid TypeError in the following case # a: uint8 0-dim numpy.ndarray # scalar: int in_dtype, = self.in_dtypes scalar = _convert_numpy_scalar(scalar, in_dtype) return self.func_scalar(xp, a, scalar) def _permutate_shapes(shapes_list): # Permutates input shapes permutated_shapes_list = [] for in_shape1, in_shape2 in shapes_list: permutated_shapes_list.append((in_shape1, in_shape2)) permutated_shapes_list.append((in_shape2, in_shape1)) return list(set(permutated_shapes_list)) shapes_combination_inplace_binary = [ # Same shapes ((1,), (1,)), ((3, 4), (3, 4)), # Broadcast ((10,), (1,)), ((3, 4), (3, 1)), ((3, 4), (1, 4)), ((3, 4), (4,)), ((3, 4), (1, 1)), ((3, 4), (1,)), ((2, 3, 4), (1, 1, 1)), # 0-dim shape ((), ()), ((1,), ()), ((3,), ()), ((2, 3), ()), # 0-size shape ((0,), (0,)), ((0,), (1,)), ((0,), ()), ((2, 0, 3), (2, 0, 3)), # TODO(imanishi): Fix strides # ((2, 0, 3), (0, 1)), ] shapes_combination_binary = _permutate_shapes([ # Broadcast ((3, 1), (1, 4)), ((2, 1, 4), (3, 1)), # 0-size shape # TODO(imanishi): Fix strides # ((0, 1), (0, 1, 0)), ]) + _permutate_shapes(shapes_combination_inplace_binary) # An association list that associates a dtype to the type which ChainerX's # real-valued functions should return. in_out_float_dtypes_math_functions = [ # Float. (('float16',), 'float16'), (('float32',), 'float32'), (('float64',), 'float64'), ] in_out_dtypes_math_functions = in_out_float_dtypes_math_functions + [ # Signed int. (('int8',), 'float32'), (('int16',), 'float32'), (('int32',), 'float32'), (('int64',), 'float32'), # Unsigned int. (('uint8',), 'float32'), # Bool. (('bool_',), 'float32'), ] in_out_dtypes_math_binary_functions = [ # integer mixed (('int8', 'int16'), 'float32'), (('int8', 'int64'), 'float32'), (('int8', 'uint8'), 'float32'), (('int16', 'int32'), 'float32'), (('int16', 'int64'), 'float32'), (('int32', 'uint8'), 'float32'), (('int32', 'int8'), 'float32'), (('int64', 'int32'), 'float32'), (('int64', 'uint8'), 'float32'), (('uint8', 'int16'), 'float32'), # integer float mixed (('int8', 'float16'), 'float16'), (('int8', 'float64'), 'float64'), (('int16', 'float16'), 'float16'), (('int16', 'float32'), 'float32'), (('int32', 'float32'), 'float32'), (('int32', 'float64'), 'float64'), (('int64', 'float16'), 'float16'), (('int64', 'float64'), 'float64'), (('uint8', 'float16'), 'float16'), (('uint8', 'float32'), 'float32'), (('float32', 'int8'), 'float32'), (('float64', 'int16'), 'float64'), (('float16', 'int32'), 'float16'), (('float32', 'int64'), 'float32'), (('float64', 'uint8'), 'float64'), # float mixed (('float16', 'float16'), 'float16'), (('float16', 'float32'), 'float32'), (('float16', 'float64'), 'float64'), (('float32', 'float16'), 'float32'), (('float32', 'float32'), 'float32'), (('float32', 'float64'), 'float64'), (('float64', 'float16'), 'float64'), (('float64', 'float32'), 'float64'), (('float64', 'float64'), 'float64'), ]
mit
hgiemza/DIRAC
Core/scripts/dirac-service.py
9
1589
#!/usr/bin/env python ######################################################################## # File : dirac-service # Author : Adria Casajus ######################################################################## __RCSID__ = "$Id$" import sys from DIRAC.ConfigurationSystem.Client.LocalConfiguration import LocalConfiguration from DIRAC.FrameworkSystem.Client.Logger import gLogger from DIRAC.Core.DISET.ServiceReactor import ServiceReactor from DIRAC.Core.Utilities.DErrno import includeExtensionErrors localCfg = LocalConfiguration() positionalArgs = localCfg.getPositionalArguments() if len( positionalArgs ) == 0: gLogger.fatal( "You must specify which server to run!" ) sys.exit( 1 ) serverName = positionalArgs[0] localCfg.setConfigurationForServer( serverName ) localCfg.addMandatoryEntry( "Port" ) #localCfg.addMandatoryEntry( "HandlerPath" ) localCfg.addMandatoryEntry( "/DIRAC/Setup" ) localCfg.addDefaultEntry( "/DIRAC/Security/UseServerCertificate", "yes" ) localCfg.addDefaultEntry( "LogLevel", "INFO" ) localCfg.addDefaultEntry( "LogColor", True ) resultDict = localCfg.loadUserData() if not resultDict[ 'OK' ]: gLogger.initialize( serverName, "/" ) gLogger.error( "There were errors when loading configuration", resultDict[ 'Message' ] ) sys.exit( 1 ) includeExtensionErrors() serverToLaunch = ServiceReactor() result = serverToLaunch.initialize( positionalArgs ) if not result[ 'OK' ]: gLogger.error( result[ 'Message' ] ) sys.exit( 1 ) result = serverToLaunch.serve() if not result[ 'OK' ]: gLogger.error( result[ 'Message' ] ) sys.exit( 1 )
gpl-3.0
wooga/airflow
airflow/utils/db.py
1
19281
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import logging import os import time from sqlalchemy import Table from airflow import settings from airflow.configuration import conf # noinspection PyUnresolvedReferences from airflow.jobs.base_job import BaseJob # noqa: F401 # pylint: disable=unused-import # noinspection PyUnresolvedReferences from airflow.models import ( # noqa: F401 # pylint: disable=unused-import DAG, XCOM_RETURN_KEY, BaseOperator, BaseOperatorLink, Connection, DagBag, DagModel, DagPickle, DagRun, DagTag, Log, Pool, SkipMixin, SlaMiss, TaskFail, TaskInstance, TaskReschedule, Variable, XCom, ) # We need to add this model manually to get reset working well # noinspection PyUnresolvedReferences from airflow.models.serialized_dag import SerializedDagModel # noqa: F401 # pylint: disable=unused-import # TODO: remove create_session once we decide to break backward compatibility from airflow.utils.session import ( # noqa: F401 # pylint: disable=unused-import create_session, provide_session, ) log = logging.getLogger(__name__) @provide_session def merge_conn(conn, session=None): """ Add new Connection. """ if not session.query(Connection).filter(Connection.conn_id == conn.conn_id).first(): session.add(conn) session.commit() @provide_session def add_default_pool_if_not_exists(session=None): """ Add default pool if it does not exist. """ if not Pool.get_pool(Pool.DEFAULT_POOL_NAME, session=session): default_pool = Pool( pool=Pool.DEFAULT_POOL_NAME, slots=conf.getint(section='core', key='non_pooled_task_slot_count', fallback=128), description="Default pool", ) session.add(default_pool) session.commit() @provide_session def create_default_connections(session=None): """ Create default Airflow connections. """ merge_conn( Connection( conn_id="airflow_db", conn_type="mysql", host="mysql", login="root", password="", schema="airflow", ), session ) merge_conn( Connection( conn_id="aws_default", conn_type="aws", ), session ) merge_conn( Connection( conn_id="azure_batch_default", conn_type="azure_batch", extra='''{"account_name": "<ACCOUNT_NAME>", "account_key": "<ACCOUNT_KEY>", "account_url": "<ACCOUNT_URL>", "vm_publisher": "<VM_PUBLISHER>", "vm_offer": "<VM_OFFER>", "vm_sku": "<VM_SKU>", "vm_version": "<VM_VERSION>", "node_agent_sku_id": "<NODE_AGENT_SKU_ID>"}''' ) ) merge_conn( Connection( conn_id="azure_container_instances_default", conn_type="azure_container_instances", extra='{"tenantId": "<TENANT>", "subscriptionId": "<SUBSCRIPTION ID>" }', ), session ) merge_conn( Connection( conn_id="azure_cosmos_default", conn_type="azure_cosmos", extra='{"database_name": "<DATABASE_NAME>", "collection_name": "<COLLECTION_NAME>" }', ), session ) merge_conn( Connection( conn_id='azure_data_explorer_default', conn_type='azure_data_explorer', host='https://<CLUSTER>.kusto.windows.net', extra='''{"auth_method": "<AAD_APP | AAD_APP_CERT | AAD_CREDS | AAD_DEVICE>", "tenant": "<TENANT ID>", "certificate": "<APPLICATION PEM CERTIFICATE>", "thumbprint": "<APPLICATION CERTIFICATE THUMBPRINT>"}''' ), session, ) merge_conn( Connection( conn_id="azure_data_lake_default", conn_type="azure_data_lake", extra='{"tenant": "<TENANT>", "account_name": "<ACCOUNTNAME>" }', ), session ) merge_conn( Connection( conn_id="cassandra_default", conn_type="cassandra", host="cassandra", port=9042, ), session ) merge_conn( Connection( conn_id="databricks_default", conn_type="databricks", host="localhost", ), session ) merge_conn( Connection( conn_id="dingding_default", conn_type="http", host="", password="", ), session ) merge_conn( Connection( conn_id="druid_broker_default", conn_type="druid", host="druid-broker", port=8082, extra='{"endpoint": "druid/v2/sql"}', ), session ) merge_conn( Connection( conn_id="druid_ingest_default", conn_type="druid", host="druid-overlord", port=8081, extra='{"endpoint": "druid/indexer/v1/task"}', ), session ) merge_conn( Connection( conn_id="elasticsearch_default", conn_type="elasticsearch", host="localhost", schema="http", port=9200 ), session ) merge_conn( Connection( conn_id="emr_default", conn_type="emr", extra=""" { "Name": "default_job_flow_name", "LogUri": "s3://my-emr-log-bucket/default_job_flow_location", "ReleaseLabel": "emr-4.6.0", "Instances": { "Ec2KeyName": "mykey", "Ec2SubnetId": "somesubnet", "InstanceGroups": [ { "Name": "Master nodes", "Market": "ON_DEMAND", "InstanceRole": "MASTER", "InstanceType": "r3.2xlarge", "InstanceCount": 1 }, { "Name": "Slave nodes", "Market": "ON_DEMAND", "InstanceRole": "CORE", "InstanceType": "r3.2xlarge", "InstanceCount": 1 } ], "TerminationProtected": false, "KeepJobFlowAliveWhenNoSteps": false }, "Applications":[ { "Name": "Spark" } ], "VisibleToAllUsers": true, "JobFlowRole": "EMR_EC2_DefaultRole", "ServiceRole": "EMR_DefaultRole", "Tags": [ { "Key": "app", "Value": "analytics" }, { "Key": "environment", "Value": "development" } ] } """, ), session ) merge_conn( Connection( conn_id="facebook_default", conn_type="facebook_social", extra=""" { "account_id": "<AD_ACCOUNNT_ID>", "app_id": "<FACEBOOK_APP_ID>", "app_secret": "<FACEBOOK_APP_SECRET>", "access_token": "<FACEBOOK_AD_ACCESS_TOKEN>" } """, ), session ) merge_conn( Connection( conn_id="fs_default", conn_type="fs", extra='{"path": "/"}', ), session, ) merge_conn( Connection( conn_id="google_cloud_default", conn_type="google_cloud_platform", schema="default", ), session ) merge_conn( Connection( conn_id="hive_cli_default", conn_type="hive_cli", port=10000, host="localhost", extra='{"use_beeline": true, "auth": ""}', schema="default", ), session ) merge_conn( Connection( conn_id="hiveserver2_default", conn_type="hiveserver2", host="localhost", schema="default", port=10000, ), session ) merge_conn( Connection( conn_id="http_default", conn_type="http", host="https://www.httpbin.org/", ), session ) merge_conn( Connection( conn_id='kubernetes_default', conn_type='kubernetes', ), session ) merge_conn( Connection( conn_id="livy_default", conn_type="livy", host="livy", port=8998 ), session ) merge_conn( Connection( conn_id="local_mysql", conn_type="mysql", host="localhost", login="airflow", password="airflow", schema="airflow", ), session ) merge_conn( Connection( conn_id="metastore_default", conn_type="hive_metastore", host="localhost", extra='{"authMechanism": "PLAIN"}', port=9083, ), session ) merge_conn( Connection( conn_id="mongo_default", conn_type="mongo", host="mongo", port=27017 ), session ) merge_conn( Connection( conn_id="mssql_default", conn_type="mssql", host="localhost", port=1433, ), session ) merge_conn( Connection( conn_id="mysql_default", conn_type="mysql", login="root", schema="airflow", host="mysql", ), session ) merge_conn( Connection( conn_id="opsgenie_default", conn_type="http", host="", password="", ), session ) merge_conn( Connection( conn_id="pig_cli_default", conn_type="pig_cli", schema="default", ), session ) merge_conn( Connection( conn_id="pinot_admin_default", conn_type="pinot", host="localhost", port=9000, ), session ) merge_conn( Connection( conn_id="pinot_broker_default", conn_type="pinot", host="localhost", port=9000, extra='{"endpoint": "/query", "schema": "http"}', ), session ) merge_conn( Connection( conn_id="postgres_default", conn_type="postgres", login="postgres", password="airflow", schema="airflow", host="postgres", ), session ) merge_conn( Connection( conn_id="presto_default", conn_type="presto", host="localhost", schema="hive", port=3400, ), session ) merge_conn( Connection( conn_id="qubole_default", conn_type="qubole", host="localhost", ), session ) merge_conn( Connection( conn_id="redis_default", conn_type="redis", host="redis", port=6379, extra='{"db": 0}', ), session ) merge_conn( Connection( conn_id="segment_default", conn_type="segment", extra='{"write_key": "my-segment-write-key"}', ), session ) merge_conn( Connection( conn_id="sftp_default", conn_type="sftp", host="localhost", port=22, login="airflow", extra='{"key_file": "~/.ssh/id_rsa", "no_host_key_check": true}', ), session ) merge_conn( Connection( conn_id="spark_default", conn_type="spark", host="yarn", extra='{"queue": "root.default"}', ), session ) merge_conn( Connection( conn_id="sqlite_default", conn_type="sqlite", host="/tmp/sqlite_default.db", ), session ) merge_conn( Connection( conn_id="sqoop_default", conn_type="sqoop", host="rdbms", ), session ) merge_conn( Connection( conn_id="ssh_default", conn_type="ssh", host="localhost", ), session ) merge_conn( Connection( conn_id="tableau_default", conn_type="tableau", host="https://tableau.server.url", login="user", password="password", extra='{"site_id": "my_site"}', ), session ) merge_conn( Connection( conn_id="vertica_default", conn_type="vertica", host="localhost", port=5433, ), session ) merge_conn( Connection( conn_id="wasb_default", conn_type="wasb", extra='{"sas_token": null}', ), session ) merge_conn( Connection( conn_id="webhdfs_default", conn_type="hdfs", host="localhost", port=50070, ), session ) merge_conn( Connection( conn_id='yandexcloud_default', conn_type='yandexcloud', schema='default', ), session ) def initdb(): """ Initialize Airflow database. """ upgradedb() if conf.getboolean('core', 'LOAD_DEFAULT_CONNECTIONS'): create_default_connections() dagbag = DagBag() # Save DAGs in the ORM dagbag.sync_to_db() # Deactivate the unknown ones DAG.deactivate_unknown_dags(dagbag.dags.keys()) from flask_appbuilder.models.sqla import Base Base.metadata.create_all(settings.engine) # pylint: disable=no-member def _get_alembic_config(): from alembic.config import Config log.info("Creating tables") current_dir = os.path.dirname(os.path.abspath(__file__)) package_dir = os.path.normpath(os.path.join(current_dir, '..')) directory = os.path.join(package_dir, 'migrations') config = Config(os.path.join(package_dir, 'alembic.ini')) config.set_main_option('script_location', directory.replace('%', '%%')) config.set_main_option('sqlalchemy.url', settings.SQL_ALCHEMY_CONN.replace('%', '%%')) return config def check_migrations(timeout): """ Function to wait for all airflow migrations to complete. @param timeout: @return: """ from alembic.runtime.migration import MigrationContext from alembic.script import ScriptDirectory config = _get_alembic_config() script_ = ScriptDirectory.from_config(config) with settings.engine.connect() as connection: context = MigrationContext.configure(connection) ticker = 0 while True: source_heads = set(script_.get_heads()) db_heads = set(context.get_current_heads()) if source_heads == db_heads: break if ticker >= timeout: raise TimeoutError("There are still unapplied migrations after {} " "seconds.".format(ticker)) ticker += 1 time.sleep(1) log.info('Waiting for migrations... %s second(s)', ticker) def upgradedb(): """ Upgrade the database. """ # alembic adds significant import time, so we import it lazily from alembic import command log.info("Creating tables") config = _get_alembic_config() config.set_main_option('sqlalchemy.url', settings.SQL_ALCHEMY_CONN.replace('%', '%%')) command.upgrade(config, 'heads') add_default_pool_if_not_exists() def resetdb(): """ Clear out the database """ log.info("Dropping tables that exist") connection = settings.engine.connect() drop_airflow_models(connection) drop_flask_models(connection) initdb() def drop_airflow_models(connection): """ Drops all airflow models. @param connection: @return: None """ from airflow.models.base import Base # Drop connection and chart - those tables have been deleted and in case you # run resetdb on schema with chart or users table will fail chart = Table('chart', Base.metadata) chart.drop(settings.engine, checkfirst=True) user = Table('user', Base.metadata) user.drop(settings.engine, checkfirst=True) users = Table('users', Base.metadata) users.drop(settings.engine, checkfirst=True) dag_stats = Table('dag_stats', Base.metadata) dag_stats.drop(settings.engine, checkfirst=True) Base.metadata.drop_all(connection) # we remove the Tables here so that if resetdb is run metadata does not keep the old tables. Base.metadata.remove(dag_stats) Base.metadata.remove(users) Base.metadata.remove(user) Base.metadata.remove(chart) # alembic adds significant import time, so we import it lazily # noinspection PyUnresolvedReferences from alembic.migration import MigrationContext migration_ctx = MigrationContext.configure(connection) # noinspection PyProtectedMember version = migration_ctx._version # pylint: disable=protected-access if version.exists(connection): version.drop(connection) def drop_flask_models(connection): """ Drops all Flask models. @param connection: @return: """ from flask_appbuilder.models.sqla import Base Base.metadata.drop_all(connection) # pylint: disable=no-member @provide_session def check(session=None): """ Checks if the database works. :param session: session of the sqlalchemy """ session.execute('select 1 as is_alive;') log.info("Connection successful.")
apache-2.0
remiolsen/NouGAT
sciLifeLab_utils/run_QC_analysis.py
3
6886
from __future__ import absolute_import import sys, os, yaml, glob import subprocess import argparse import re from sciLifeLab_utils import submit_job def main(args): projectFolder = os.getcwd() samples_data_dir = args.sample_data_dir projectName = os.path.basename(os.path.normpath(samples_data_dir)) for sample_dir_name in [dir for dir in os.listdir(samples_data_dir) \ if os.path.isdir(os.path.join(samples_data_dir, dir))]: sample_folder = os.path.join(os.getcwd(), sample_dir_name) if not os.path.exists(sample_folder): os.makedirs(sample_folder) os.chdir(sample_folder) # now I am in the folder, i can run at the same time QC and MP anlaysis pipeline = "QCcontrol" tools = ["trimmomatic", "fastqc", "abyss", "align"] if args.reference is None: tools = ["trimmomatic", "fastqc", "abyss"] sample_YAML_name = os.path.join(sample_folder, "{}_{}.yaml".format( sample_dir_name, pipeline)) sample_YAML = open(sample_YAML_name, 'w') sample_YAML.write("pipeline:\n") sample_YAML.write(" {}\n".format(pipeline)) sample_YAML.write("tools:\n") sample_YAML.write(" {}\n".format(tools)) ##TODO: output must became sampleName sample_YAML.write("output: {}\n".format(sample_dir_name)) sample_YAML.write("projectName: {}\n".format(projectName)) sample_YAML.write("kmer: 35\n") sample_YAML.write("threads: {}\n".format(args.threads)) sample_YAML.write("genomeSize: \n") sample_YAML.write("adapters: {}\n".format(args.adapter)) if args.reference is not None: sample_YAML.write("reference: {}\n".format(args.reference)) sample_YAML.write("libraries:\n") sample_data_dir = os.path.join(samples_data_dir,sample_dir_name) # helper variables for collecting FCs fc_pat, prep_pat = (r'^\d{6}_.*_?.*$', r'^[A-Z]$') def _get_expected_dir(path, pat): return [os.path.join(path, d) for d in os.listdir(path) if re.match(pat, d) \ and os.path.isdir(os.path.join(path, d))] #collect FC directories flowcells_dirs = _get_expected_dir(sample_data_dir, fc_pat) # to adapt the directory structure in IRMA where it have lib prep dir lib_prep_dirs = _get_expected_dir(sample_data_dir, prep_pat) # Check and collect the flowcells in the lib prep directory for prep_dir in lib_prep_dirs: flowcells_dirs.extend(_get_expected_dir(prep_dir, fc_pat)) sample_files = [] for flowcell in flowcells_dirs: sample_files.extend([os.path.join(flowcell, f) for f in \ os.listdir(flowcell) \ if (os.path.isfile(os.path.realpath(os.path.join(flowcell,f))) \ and re.search('.gz$',f))]) # now sample_files contains all the file sequenced for this sample pair1_file = "" pair2_file = "" single = "" library = 1 while len(sample_files) > 0: file = sample_files[0] sample_YAML.write(" lib{}:\n".format(library)) if "_1.fastq.gz" in file: pair1_file = file pair2_file = re.sub("_1.fastq.gz", "_2.fastq.gz", file) elif "_2.fastq.gz" in file: pair2_file = file pair1_file = re.sub("_2.fastq.gz", "_1.fastq.gz", file) elif "R1_001.fastq.gz" in file: pair1_file = file pair2_file = re.sub("R1_001.fastq.gz", "R2_001.fastq.gz", file) elif "R2_001.fastq.gz" in file: pair2_file = file pair1_file = re.sub("R2_001.fastq.gz", "R1_001.fastq.gz", file) else: sys.exit("file {} does not respect naming convection. \ Exit!".format(file)) sample_YAML.write(" pair1: {}\n".format(pair1_file)) sample_YAML.write(" pair2: {}\n".format(pair2_file)) sample_YAML.write(" orientation: {}\n".format(args.orientation)) sample_YAML.write(" insert: {}\n".format(args.insert)) sample_YAML.write(" std: {}\n".format(args.std)) sample_files.remove(pair1_file) sample_files.remove(pair2_file) library += 1 sample_YAML.close # Run the job extramodules = [] if "abyss" in tools: extramodules.append("module load abyss/1.3.5\n") if "align" in tools: extramodules.append("module load samtools\nmodule load bwa\n") jobname = "{}_{}".format(sample_dir_name, pipeline) submit_job(sample_YAML_name, jobname, os.getcwd(), args, extramodules) os.chdir(projectFolder) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--reference', type=str, default=None, help="path to the reference file") parser.add_argument('--adapter', type=str, required=True, help="path to the file containing the adaptor sequence to be removed") parser.add_argument('--global-config', type=str, required=True, help="global configuration file") parser.add_argument('--sample-data-dir', type=str, required=True, help=("Path to directory (usually INBOX) containing the project " "(one dir per sample, scilife structure project/sample/flowcell/)")) parser.add_argument('--orientation', type=str, required=True, help="orientation of the libraries") parser.add_argument('--insert', type=str, required=True, help="expected insert size of the libraries") parser.add_argument('--std', type=str, required=True, help=("expected stdandard variation of the insert size of " "the libraries")) parser.add_argument('--env', type=str, default="DeNovoPipeline", help=("name of the virtual enviorment " "(default is DeNovoPipeline)")) parser.add_argument('--email', type=str, help=("Send notifications/job status updates to this email " "address.")) parser.add_argument('--time', type=str, default="1-00:00:00", help="required time for the job (default is 1 day : 1-00:00:00)") parser.add_argument('--project', type=str, default="a2010002", help="project name for slurm submission (default is a2010002)") parser.add_argument('--threads', type=int, default=16, help="Number of thread the job will require") parser.add_argument('--qos', type=str, help=("Specify a quality of service preset for the job (eg. " "--qos short)")) args = parser.parse_args() main(args)
mit
hackathon-3d/ice-cream-manwich-repo
server/src/BooksWithFriends/libs/django/core/cache/backends/base.py
100
7927
"Base Cache class." import warnings from django.core.exceptions import ImproperlyConfigured, DjangoRuntimeWarning from django.utils.encoding import smart_str from django.utils.importlib import import_module class InvalidCacheBackendError(ImproperlyConfigured): pass class CacheKeyWarning(DjangoRuntimeWarning): pass # Memcached does not accept keys longer than this. MEMCACHE_MAX_KEY_LENGTH = 250 def default_key_func(key, key_prefix, version): """ Default function to generate keys. Constructs the key used by all other methods. By default it prepends the `key_prefix'. KEY_FUNCTION can be used to specify an alternate function with custom key making behavior. """ return ':'.join([key_prefix, str(version), smart_str(key)]) def get_key_func(key_func): """ Function to decide which key function to use. Defaults to ``default_key_func``. """ if key_func is not None: if callable(key_func): return key_func else: key_func_module_path, key_func_name = key_func.rsplit('.', 1) key_func_module = import_module(key_func_module_path) return getattr(key_func_module, key_func_name) return default_key_func class BaseCache(object): def __init__(self, params): timeout = params.get('timeout', params.get('TIMEOUT', 300)) try: timeout = int(timeout) except (ValueError, TypeError): timeout = 300 self.default_timeout = timeout options = params.get('OPTIONS', {}) max_entries = params.get('max_entries', options.get('MAX_ENTRIES', 300)) try: self._max_entries = int(max_entries) except (ValueError, TypeError): self._max_entries = 300 cull_frequency = params.get('cull_frequency', options.get('CULL_FREQUENCY', 3)) try: self._cull_frequency = int(cull_frequency) except (ValueError, TypeError): self._cull_frequency = 3 self.key_prefix = smart_str(params.get('KEY_PREFIX', '')) self.version = params.get('VERSION', 1) self.key_func = get_key_func(params.get('KEY_FUNCTION', None)) def make_key(self, key, version=None): """Constructs the key used by all other methods. By default it uses the key_func to generate a key (which, by default, prepends the `key_prefix' and 'version'). An different key function can be provided at the time of cache construction; alternatively, you can subclass the cache backend to provide custom key making behavior. """ if version is None: version = self.version new_key = self.key_func(key, self.key_prefix, version) return new_key def add(self, key, value, timeout=None, version=None): """ Set a value in the cache if the key does not already exist. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. Returns True if the value was stored, False otherwise. """ raise NotImplementedError def get(self, key, default=None, version=None): """ Fetch a given key from the cache. If the key does not exist, return default, which itself defaults to None. """ raise NotImplementedError def set(self, key, value, timeout=None, version=None): """ Set a value in the cache. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. """ raise NotImplementedError def delete(self, key, version=None): """ Delete a key from the cache, failing silently. """ raise NotImplementedError def get_many(self, keys, version=None): """ Fetch a bunch of keys from the cache. For certain backends (memcached, pgsql) this can be *much* faster when fetching multiple values. Returns a dict mapping each key in keys to its value. If the given key is missing, it will be missing from the response dict. """ d = {} for k in keys: val = self.get(k, version=version) if val is not None: d[k] = val return d def has_key(self, key, version=None): """ Returns True if the key is in the cache and has not expired. """ return self.get(key, version=version) is not None def incr(self, key, delta=1, version=None): """ Add delta to value in the cache. If the key does not exist, raise a ValueError exception. """ value = self.get(key, version=version) if value is None: raise ValueError("Key '%s' not found" % key) new_value = value + delta self.set(key, new_value, version=version) return new_value def decr(self, key, delta=1, version=None): """ Subtract delta from value in the cache. If the key does not exist, raise a ValueError exception. """ return self.incr(key, -delta, version=version) def __contains__(self, key): """ Returns True if the key is in the cache and has not expired. """ # This is a separate method, rather than just a copy of has_key(), # so that it always has the same functionality as has_key(), even # if a subclass overrides it. return self.has_key(key) def set_many(self, data, timeout=None, version=None): """ Set a bunch of values in the cache at once from a dict of key/value pairs. For certain backends (memcached), this is much more efficient than calling set() multiple times. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. """ for key, value in data.items(): self.set(key, value, timeout=timeout, version=version) def delete_many(self, keys, version=None): """ Set a bunch of values in the cache at once. For certain backends (memcached), this is much more efficient than calling delete() multiple times. """ for key in keys: self.delete(key, version=version) def clear(self): """Remove *all* values from the cache at once.""" raise NotImplementedError def validate_key(self, key): """ Warn about keys that would not be portable to the memcached backend. This encourages (but does not force) writing backend-portable cache code. """ if len(key) > MEMCACHE_MAX_KEY_LENGTH: warnings.warn('Cache key will cause errors if used with memcached: ' '%s (longer than %s)' % (key, MEMCACHE_MAX_KEY_LENGTH), CacheKeyWarning) for char in key: if ord(char) < 33 or ord(char) == 127: warnings.warn('Cache key contains characters that will cause ' 'errors if used with memcached: %r' % key, CacheKeyWarning) def incr_version(self, key, delta=1, version=None): """Adds delta to the cache version for the supplied key. Returns the new version. """ if version is None: version = self.version value = self.get(key, version=version) if value is None: raise ValueError("Key '%s' not found" % key) self.set(key, value, version=version+delta) self.delete(key, version=version) return version+delta def decr_version(self, key, delta=1, version=None): """Substracts delta from the cache version for the supplied key. Returns the new version. """ return self.incr_version(key, -delta, version)
gpl-2.0
andrius-preimantas/account-financial-tools
account_credit_control_dunning_fees/model/run.py
37
1448
# -*- coding: utf-8 -*- ############################################################################## # # Author: Nicolas Bessi # Copyright 2014 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import models, api class CreditControlRun(models.Model): """Add computation of fees""" _inherit = "credit.control.run" @api.multi @api.returns('credit.control.line') def _generate_credit_lines(self): """Override method to add fees computation""" credit_lines = super(CreditControlRun, self)._generate_credit_lines() fees_model = self.env['credit.control.dunning.fees.computer'] fees_model._compute_fees(credit_lines) return credit_lines
agpl-3.0
morphis/home-assistant
homeassistant/components/light/rfxtrx.py
29
2043
""" Support for RFXtrx lights. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/light.rfxtrx/ """ import logging import homeassistant.components.rfxtrx as rfxtrx from homeassistant.components.light import (ATTR_BRIGHTNESS, SUPPORT_BRIGHTNESS, Light) DEPENDENCIES = ['rfxtrx'] _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = rfxtrx.DEFAULT_SCHEMA SUPPORT_RFXTRX = SUPPORT_BRIGHTNESS def setup_platform(hass, config, add_devices, discovery_info=None): """Setup the RFXtrx platform.""" import RFXtrx as rfxtrxmod lights = rfxtrx.get_devices_from_config(config, RfxtrxLight) add_devices(lights) def light_update(event): """Callback for light updates from the RFXtrx gateway.""" if not isinstance(event.device, rfxtrxmod.LightingDevice) or \ not event.device.known_to_be_dimmable: return new_device = rfxtrx.get_new_device(event, config, RfxtrxLight) if new_device: add_devices([new_device]) rfxtrx.apply_received_command(event) # Subscribe to main rfxtrx events if light_update not in rfxtrx.RECEIVED_EVT_SUBSCRIBERS: rfxtrx.RECEIVED_EVT_SUBSCRIBERS.append(light_update) class RfxtrxLight(rfxtrx.RfxtrxDevice, Light): """Represenation of a RFXtrx light.""" @property def brightness(self): """Return the brightness of this light between 0..255.""" return self._brightness @property def supported_features(self): """Flag supported features.""" return SUPPORT_RFXTRX def turn_on(self, **kwargs): """Turn the light on.""" brightness = kwargs.get(ATTR_BRIGHTNESS) if brightness is None: self._brightness = 255 self._send_command("turn_on") else: self._brightness = brightness _brightness = (brightness * 100 // 255) self._send_command("dim", _brightness)
apache-2.0
Venturi/cms
env/lib/python2.7/site-packages/unidecode/x023.py
165
4341
data = ( '[?]', # 0x00 '[?]', # 0x01 '[?]', # 0x02 '^', # 0x03 '[?]', # 0x04 '[?]', # 0x05 '[?]', # 0x06 '[?]', # 0x07 '[?]', # 0x08 '[?]', # 0x09 '[?]', # 0x0a '[?]', # 0x0b '[?]', # 0x0c '[?]', # 0x0d '[?]', # 0x0e '[?]', # 0x0f '[?]', # 0x10 '[?]', # 0x11 '[?]', # 0x12 '[?]', # 0x13 '[?]', # 0x14 '[?]', # 0x15 '[?]', # 0x16 '[?]', # 0x17 '[?]', # 0x18 '[?]', # 0x19 '[?]', # 0x1a '[?]', # 0x1b '[?]', # 0x1c '[?]', # 0x1d '[?]', # 0x1e '[?]', # 0x1f '[?]', # 0x20 '[?]', # 0x21 '[?]', # 0x22 '[?]', # 0x23 '[?]', # 0x24 '[?]', # 0x25 '[?]', # 0x26 '[?]', # 0x27 '[?]', # 0x28 '<', # 0x29 '> ', # 0x2a '[?]', # 0x2b '[?]', # 0x2c '[?]', # 0x2d '[?]', # 0x2e '[?]', # 0x2f '[?]', # 0x30 '[?]', # 0x31 '[?]', # 0x32 '[?]', # 0x33 '[?]', # 0x34 '[?]', # 0x35 '[?]', # 0x36 '[?]', # 0x37 '[?]', # 0x38 '[?]', # 0x39 '[?]', # 0x3a '[?]', # 0x3b '[?]', # 0x3c '[?]', # 0x3d '[?]', # 0x3e '[?]', # 0x3f '[?]', # 0x40 '[?]', # 0x41 '[?]', # 0x42 '[?]', # 0x43 '[?]', # 0x44 '[?]', # 0x45 '[?]', # 0x46 '[?]', # 0x47 '[?]', # 0x48 '[?]', # 0x49 '[?]', # 0x4a '[?]', # 0x4b '[?]', # 0x4c '[?]', # 0x4d '[?]', # 0x4e '[?]', # 0x4f '[?]', # 0x50 '[?]', # 0x51 '[?]', # 0x52 '[?]', # 0x53 '[?]', # 0x54 '[?]', # 0x55 '[?]', # 0x56 '[?]', # 0x57 '[?]', # 0x58 '[?]', # 0x59 '[?]', # 0x5a '[?]', # 0x5b '[?]', # 0x5c '[?]', # 0x5d '[?]', # 0x5e '[?]', # 0x5f '[?]', # 0x60 '[?]', # 0x61 '[?]', # 0x62 '[?]', # 0x63 '[?]', # 0x64 '[?]', # 0x65 '[?]', # 0x66 '[?]', # 0x67 '[?]', # 0x68 '[?]', # 0x69 '[?]', # 0x6a '[?]', # 0x6b '[?]', # 0x6c '[?]', # 0x6d '[?]', # 0x6e '[?]', # 0x6f '[?]', # 0x70 '[?]', # 0x71 '[?]', # 0x72 '[?]', # 0x73 '[?]', # 0x74 '[?]', # 0x75 '[?]', # 0x76 '[?]', # 0x77 '[?]', # 0x78 '[?]', # 0x79 '[?]', # 0x7a '[?]', # 0x7b '[?]', # 0x7c '[?]', # 0x7d '[?]', # 0x7e '[?]', # 0x7f '[?]', # 0x80 '[?]', # 0x81 '[?]', # 0x82 '[?]', # 0x83 '[?]', # 0x84 '[?]', # 0x85 '[?]', # 0x86 '[?]', # 0x87 '[?]', # 0x88 '[?]', # 0x89 '[?]', # 0x8a '[?]', # 0x8b '[?]', # 0x8c '[?]', # 0x8d '[?]', # 0x8e '[?]', # 0x8f '[?]', # 0x90 '[?]', # 0x91 '[?]', # 0x92 '[?]', # 0x93 '[?]', # 0x94 '[?]', # 0x95 '[?]', # 0x96 '[?]', # 0x97 '[?]', # 0x98 '[?]', # 0x99 '[?]', # 0x9a '[?]', # 0x9b '[?]', # 0x9c '[?]', # 0x9d '[?]', # 0x9e '[?]', # 0x9f '[?]', # 0xa0 '[?]', # 0xa1 '[?]', # 0xa2 '[?]', # 0xa3 '[?]', # 0xa4 '[?]', # 0xa5 '[?]', # 0xa6 '[?]', # 0xa7 '[?]', # 0xa8 '[?]', # 0xa9 '[?]', # 0xaa '[?]', # 0xab '[?]', # 0xac '[?]', # 0xad '[?]', # 0xae '[?]', # 0xaf '[?]', # 0xb0 '[?]', # 0xb1 '[?]', # 0xb2 '[?]', # 0xb3 '[?]', # 0xb4 '[?]', # 0xb5 '[?]', # 0xb6 '[?]', # 0xb7 '[?]', # 0xb8 '[?]', # 0xb9 '[?]', # 0xba '[?]', # 0xbb '[?]', # 0xbc '[?]', # 0xbd '[?]', # 0xbe '[?]', # 0xbf '[?]', # 0xc0 '[?]', # 0xc1 '[?]', # 0xc2 '[?]', # 0xc3 '[?]', # 0xc4 '[?]', # 0xc5 '[?]', # 0xc6 '[?]', # 0xc7 '[?]', # 0xc8 '[?]', # 0xc9 '[?]', # 0xca '[?]', # 0xcb '[?]', # 0xcc '[?]', # 0xcd '[?]', # 0xce '[?]', # 0xcf '[?]', # 0xd0 '[?]', # 0xd1 '[?]', # 0xd2 '[?]', # 0xd3 '[?]', # 0xd4 '[?]', # 0xd5 '[?]', # 0xd6 '[?]', # 0xd7 '[?]', # 0xd8 '[?]', # 0xd9 '[?]', # 0xda '[?]', # 0xdb '[?]', # 0xdc '[?]', # 0xdd '[?]', # 0xde '[?]', # 0xdf '[?]', # 0xe0 '[?]', # 0xe1 '[?]', # 0xe2 '[?]', # 0xe3 '[?]', # 0xe4 '[?]', # 0xe5 '[?]', # 0xe6 '[?]', # 0xe7 '[?]', # 0xe8 '[?]', # 0xe9 '[?]', # 0xea '[?]', # 0xeb '[?]', # 0xec '[?]', # 0xed '[?]', # 0xee '[?]', # 0xef '[?]', # 0xf0 '[?]', # 0xf1 '[?]', # 0xf2 '[?]', # 0xf3 '[?]', # 0xf4 '[?]', # 0xf5 '[?]', # 0xf6 '[?]', # 0xf7 '[?]', # 0xf8 '[?]', # 0xf9 '[?]', # 0xfa '[?]', # 0xfb '[?]', # 0xfc '[?]', # 0xfd '[?]', # 0xfe )
gpl-2.0
Jobava/pootle
pootle/core/utils/version.py
6
6550
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) Pootle contributors. # # This file is a part of the Pootle project. It is distributed under the GPL3 # or later license. See the LICENSE file for a copy of the license and the # AUTHORS file for copyright and authorship information. # Some functions are taken from or modelled on the version management in # Django. Those are: # Copyright (c) Django Software Foundation and individual contributors. All # rights reserved. import datetime import os import subprocess try: from django.utils.lru_cache import lru_cache except ImportError: # Required for Python 2.7 support and when backported Django version is # unavailable def lru_cache(): def fake(func): return func return fake CANDIDATE_MARKERS = ('alpha', 'beta', 'rc', 'final') def get_version(version=None): """Returns a PEP 440-compliant version number from VERSION. The following examples show a progression from development through pre-release to release and the resultant versions generated: >>> get_version((2, 7, 1, 'alpha', 0)) '2.7.1.dev20150530132219' >>> get_version((2, 7, 1, 'alpha', 1)) '2.7.1a1' >>> get_version((2, 7, 1, 'beta', 1)) '2.7.1b1' >>> get_version((2, 7, 1, 'rc', 2)) '2.7.1rc2' >>> get_version((2, 7, 1, 'final', 0)) '2.7.1' """ version = get_complete_version(version) # Now build the two parts of the version number: # main = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|rc}N - for alpha, beta and rc releases main = get_main_version(version) candidate_pos = _get_candidate_pos(version) candidate = version[candidate_pos] candidate_extra = version[candidate_pos+1] sub = '' if _is_development_candidate(version): git_changeset = get_git_changeset() if git_changeset: sub = '.dev%s' % git_changeset else: sub = '.dev0' elif candidate != 'final': mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'rc'} sub = mapping[candidate] + str(candidate_extra) return str(main + sub) def _is_development_candidate(version): """Is this a pre-alpha release >>> _is_development_candidate((2, 1, 0, 'alpha', 0)) True >>> _is_development_candidate((2, 1, 0, 'beta', 1)) False """ candidate_pos = _get_candidate_pos(version) candidate = version[candidate_pos] candidate_extra = version[candidate_pos+1] return candidate == 'alpha' and candidate_extra == 0 def _get_candidate_pos(version): """Returns the position of the candidate marker. >>> _get_candidate_pos((1, 2, 0, 'alpha', 0)) 3 """ return [i for i, part in enumerate(version) if part in CANDIDATE_MARKERS][0] def _get_candidate(version): """Returns the candidate. One of alpha, beta, rc or final. >>> _get_candidate((0, 1, 2, 'rc', 1)) 'rc' """ return version[_get_candidate_pos(version)] def _get_version_string(parts): """Returns an X.Y.Z version from the list of version parts. >>> _get_version_string((1, 1, 0)) '1.1.0' >>> _get_version_string((1, 1, 0, 1)) '1.1.0.1' """ return '.'.join(str(x) for x in parts) def get_main_version(version=None): """Returns main version (X.Y[.Z]) from VERSION. >>> get_main_version((1, 2, 3, 'alpha', 1)) '1.2.3' """ version = get_complete_version(version) candidate_pos = _get_candidate_pos(version) return _get_version_string(version[:candidate_pos]) def get_major_minor_version(version=None): """Returns X.Y from VERSION. >>> get_major_minor_version((1, 2, 3, 'final', 0)) '1.2' """ version = get_complete_version(version) return _get_version_string(version[:2]) def get_complete_version(version=None): """Returns a tuple of the Pootle version. Or the supplied ``version`` >>> get_complete_version((1, 2, 3, 'alpha', 0)) (1, 2, 3, 'alpha', 0) """ if version is None: from pootle import VERSION as version return version def get_docs_version(version=None, positions=2): """Return the version used in documentation. >>> get_docs_version((1, 2, 1, 'alpha', 0)) 'dev' >>> get_docs_version((1, 2, 1, 'rc', 2)) '1.2' """ version = get_complete_version(version) candidate_pos = _get_candidate_pos(version) if positions > candidate_pos: positions = candidate_pos if _is_development_candidate(version): return 'dev' return _get_version_string(version[:positions]) def _shell_command(command): """Return the first result of a shell ``command``""" repo_dir = os.path.dirname(os.path.abspath(__file__)) command_subprocess = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=repo_dir, universal_newlines=True ) return command_subprocess.communicate()[0] @lru_cache() def get_git_changeset(): """Returns a numeric identifier of the latest git changeset. The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format. This value isn't guaranteed to be unique, but collisions are very unlikely, so it's sufficient for generating the development version numbers. >>> get_git_changeset() '20150530132219' """ timestamp = _shell_command( ['/usr/bin/git', 'log', '--pretty=format:%ct', '--quiet', '-1', 'HEAD'] ) try: timestamp = datetime.datetime.utcfromtimestamp(int(timestamp)) except ValueError: return None return timestamp.strftime('%Y%m%d%H%M%S') @lru_cache() def get_git_branch(): """Returns the current git branch. >>> get_git_branch() 'feature/proper_version' """ branch = _shell_command(['/usr/bin/git', 'symbolic-ref', '-q', 'HEAD']).strip() if not branch: return None return "/".join(branch.split("/")[2:]) @lru_cache() def get_git_hash(): """Returns the current git commit hash. >>> get_git_hash() 'ad768e8' """ return _shell_command( ['/usr/bin/git', 'rev-parse', '--verify', '--short', 'HEAD'] ).strip() if __name__ == "__main__": from sys import argv if len(argv) == 2: if argv[1] == "main": print(get_main_version()) elif argv[1] == "major_minor": print(get_major_minor_version()) elif argv[1] == "docs": print(get_docs_version()) else: print(get_version())
gpl-3.0
DominoTree/servo
tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_tokenizer.py
38
11087
import unittest from .. import parser from ..parser import token_types class TokenizerTest(unittest.TestCase): def setUp(self): self.tokenizer = parser.Tokenizer() def tokenize(self, input_str): rv = [] for item in self.tokenizer.tokenize(input_str): rv.append(item) if item[0] == token_types.eof: break return rv def compare(self, input_text, expected): expected = expected + [(token_types.eof, None)] actual = self.tokenize(input_text) self.assertEqual(actual, expected) def test_heading_0(self): self.compare(b"""[Heading text]""", [(token_types.paren, "["), (token_types.string, "Heading text"), (token_types.paren, "]")]) def test_heading_1(self): self.compare(br"""[Heading [text\]]""", [(token_types.paren, "["), (token_types.string, "Heading [text]"), (token_types.paren, "]")]) def test_heading_2(self): self.compare(b"""[Heading #text]""", [(token_types.paren, "["), (token_types.string, "Heading #text"), (token_types.paren, "]")]) def test_heading_3(self): self.compare(br"""[Heading [\]text]""", [(token_types.paren, "["), (token_types.string, "Heading []text"), (token_types.paren, "]")]) def test_heading_4(self): with self.assertRaises(parser.ParseError): self.tokenize(b"[Heading") def test_heading_5(self): self.compare(br"""[Heading [\]text] #comment""", [(token_types.paren, "["), (token_types.string, "Heading []text"), (token_types.paren, "]")]) def test_heading_6(self): self.compare(br"""[Heading \ttext]""", [(token_types.paren, "["), (token_types.string, "Heading \ttext"), (token_types.paren, "]")]) def test_key_0(self): self.compare(b"""key:value""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.string, "value")]) def test_key_1(self): self.compare(b"""key : value""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.string, "value")]) def test_key_2(self): self.compare(b"""key : val ue""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.string, "val ue")]) def test_key_3(self): self.compare(b"""key: value#comment""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.string, "value")]) def test_key_4(self): with self.assertRaises(parser.ParseError): self.tokenize(b"""ke y: value""") def test_key_5(self): with self.assertRaises(parser.ParseError): self.tokenize(b"""key""") def test_key_6(self): self.compare(b"""key: "value\"""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.string, "value")]) def test_key_7(self): self.compare(b"""key: 'value'""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.string, "value")]) def test_key_8(self): self.compare(b"""key: "#value\"""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.string, "#value")]) def test_key_9(self): self.compare(b"""key: '#value\'""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.string, "#value")]) def test_key_10(self): with self.assertRaises(parser.ParseError): self.tokenize(b"""key: "value""") def test_key_11(self): with self.assertRaises(parser.ParseError): self.tokenize(b"""key: 'value""") def test_key_12(self): with self.assertRaises(parser.ParseError): self.tokenize(b"""key: 'value""") def test_key_13(self): with self.assertRaises(parser.ParseError): self.tokenize(b"""key: 'value' abc""") def test_key_14(self): self.compare(br"""key: \\nb""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.string, r"\nb")]) def test_list_0(self): self.compare(b""" key: []""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.list_start, "["), (token_types.list_end, "]")]) def test_list_1(self): self.compare(b""" key: [a, "b"]""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.list_start, "["), (token_types.string, "a"), (token_types.string, "b"), (token_types.list_end, "]")]) def test_list_2(self): self.compare(b""" key: [a, b]""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.list_start, "["), (token_types.string, "a"), (token_types.string, "b"), (token_types.list_end, "]")]) def test_list_3(self): self.compare(b""" key: [a, #b] c]""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.list_start, "["), (token_types.string, "a"), (token_types.string, "c"), (token_types.list_end, "]")]) def test_list_4(self): with self.assertRaises(parser.ParseError): self.tokenize(b"""key: [a #b] c]""") def test_list_5(self): with self.assertRaises(parser.ParseError): self.tokenize(b"""key: [a \\ c]""") def test_list_6(self): self.compare(b"""key: [a , b]""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.list_start, "["), (token_types.string, "a"), (token_types.string, "b"), (token_types.list_end, "]")]) def test_expr_0(self): self.compare(b""" key: if cond == 1: value""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.group_start, None), (token_types.ident, "if"), (token_types.ident, "cond"), (token_types.ident, "=="), (token_types.number, "1"), (token_types.separator, ":"), (token_types.string, "value")]) def test_expr_1(self): self.compare(b""" key: if cond == 1: value1 value2""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.group_start, None), (token_types.ident, "if"), (token_types.ident, "cond"), (token_types.ident, "=="), (token_types.number, "1"), (token_types.separator, ":"), (token_types.string, "value1"), (token_types.string, "value2")]) def test_expr_2(self): self.compare(b""" key: if cond=="1": value""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.group_start, None), (token_types.ident, "if"), (token_types.ident, "cond"), (token_types.ident, "=="), (token_types.string, "1"), (token_types.separator, ":"), (token_types.string, "value")]) def test_expr_3(self): self.compare(b""" key: if cond==1.1: value""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.group_start, None), (token_types.ident, "if"), (token_types.ident, "cond"), (token_types.ident, "=="), (token_types.number, "1.1"), (token_types.separator, ":"), (token_types.string, "value")]) def test_expr_4(self): self.compare(b""" key: if cond==1.1 and cond2 == "a": value""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.group_start, None), (token_types.ident, "if"), (token_types.ident, "cond"), (token_types.ident, "=="), (token_types.number, "1.1"), (token_types.ident, "and"), (token_types.ident, "cond2"), (token_types.ident, "=="), (token_types.string, "a"), (token_types.separator, ":"), (token_types.string, "value")]) def test_expr_5(self): self.compare(b""" key: if (cond==1.1 ): value""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.group_start, None), (token_types.ident, "if"), (token_types.paren, "("), (token_types.ident, "cond"), (token_types.ident, "=="), (token_types.number, "1.1"), (token_types.paren, ")"), (token_types.separator, ":"), (token_types.string, "value")]) def test_expr_6(self): self.compare(b""" key: if "\\ttest": value""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.group_start, None), (token_types.ident, "if"), (token_types.string, "\ttest"), (token_types.separator, ":"), (token_types.string, "value")]) def test_expr_7(self): with self.assertRaises(parser.ParseError): self.tokenize(b""" key: if 1A: value""") def test_expr_8(self): with self.assertRaises(parser.ParseError): self.tokenize(b""" key: if 1a: value""") def test_expr_9(self): with self.assertRaises(parser.ParseError): self.tokenize(b""" key: if 1.1.1: value""") def test_expr_10(self): self.compare(b""" key: if 1.: value""", [(token_types.string, "key"), (token_types.separator, ":"), (token_types.group_start, None), (token_types.ident, "if"), (token_types.number, "1."), (token_types.separator, ":"), (token_types.string, "value")]) if __name__ == "__main__": unittest.main()
mpl-2.0
Communities-Communications/cc-odoo
addons/pad_project/__openerp__.py
312
1500
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Pad on tasks', 'version': '1.0', 'category': 'Project Management', 'description': """ This module adds a PAD in all project kanban views. =================================================== """, 'author': 'OpenERP SA', 'website': 'https://www.odoo.com/page/project-management', 'depends': ['project', 'pad'], 'data': ['project_task.xml'], 'demo': [], 'installable': True, 'auto_install': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
yannh/ansible-modules-core
system/setup.py
99
5265
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: setup version_added: historical short_description: Gathers facts about remote hosts options: filter: version_added: "1.1" description: - if supplied, only return facts that match this shell-style (fnmatch) wildcard. required: false default: '*' fact_path: version_added: "1.3" description: - path used for local ansible facts (*.fact) - files in this dir will be run (if executable) and their results be added to ansible_local facts if a file is not executable it is read. File/results format can be json or ini-format required: false default: '/etc/ansible/facts.d' description: - This module is automatically called by playbooks to gather useful variables about remote hosts that can be used in playbooks. It can also be executed directly by C(/usr/bin/ansible) to check what variables are available to a host. Ansible provides many I(facts) about the system, automatically. notes: - More ansible facts will be added with successive releases. If I(facter) or I(ohai) are installed, variables from these programs will also be snapshotted into the JSON file for usage in templating. These variables are prefixed with C(facter_) and C(ohai_) so it's easy to tell their source. All variables are bubbled up to the caller. Using the ansible facts and choosing to not install I(facter) and I(ohai) means you can avoid Ruby-dependencies on your remote systems. (See also M(facter) and M(ohai).) - The filter option filters only the first level subkey below ansible_facts. - If the target host is Windows, you will not currently have the ability to use C(fact_path) or C(filter) as this is provided by a simpler implementation of the module. Different facts are returned for Windows hosts. author: - "Ansible Core Team" - "Michael DeHaan" ''' EXAMPLES = """ # Display facts from all hosts and store them indexed by I(hostname) at C(/tmp/facts). ansible all -m setup --tree /tmp/facts # Display only facts regarding memory found by ansible on all hosts and output them. ansible all -m setup -a 'filter=ansible_*_mb' # Display only facts returned by facter. ansible all -m setup -a 'filter=facter_*' # Display only facts about certain interfaces. ansible all -m setup -a 'filter=ansible_eth[0-2]' """ def run_setup(module): setup_options = dict(module_setup=True) facts = ansible_facts(module) for (k, v) in facts.items(): setup_options["ansible_%s" % k.replace('-', '_')] = v # Look for the path to the facter and ohai binary and set # the variable to that path. facter_path = module.get_bin_path('facter') ohai_path = module.get_bin_path('ohai') # if facter is installed, and we can use --json because # ruby-json is ALSO installed, include facter data in the JSON if facter_path is not None: rc, out, err = module.run_command(facter_path + " --puppet --json") facter = True try: facter_ds = json.loads(out) except: facter = False if facter: for (k,v) in facter_ds.items(): setup_options["facter_%s" % k] = v # ditto for ohai if ohai_path is not None: rc, out, err = module.run_command(ohai_path) ohai = True try: ohai_ds = json.loads(out) except: ohai = False if ohai: for (k,v) in ohai_ds.items(): k2 = "ohai_%s" % k.replace('-', '_') setup_options[k2] = v setup_result = { 'ansible_facts': {} } for (k,v) in setup_options.items(): if module.params['filter'] == '*' or fnmatch.fnmatch(k, module.params['filter']): setup_result['ansible_facts'][k] = v # hack to keep --verbose from showing all the setup module results setup_result['_ansible_verbose_override'] = True return setup_result def main(): global module module = AnsibleModule( argument_spec = dict( filter=dict(default="*", required=False), fact_path=dict(default='/etc/ansible/facts.d', required=False), ), supports_check_mode = True, ) data = run_setup(module) module.exit_json(**data) # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.facts import * main()
gpl-3.0
DualSpark/ansible
lib/ansible/errors/__init__.py
163
7342
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os from ansible.errors.yaml_strings import * class AnsibleError(Exception): ''' This is the base class for all errors raised from Ansible code, and can be instantiated with two optional parameters beyond the error message to control whether detailed information is displayed when the error occurred while parsing a data file of some kind. Usage: raise AnsibleError('some message here', obj=obj, show_content=True) Where "obj" is some subclass of ansible.parsing.yaml.objects.AnsibleBaseYAMLObject, which should be returned by the DataLoader() class. ''' def __init__(self, message, obj=None, show_content=True): # we import this here to prevent an import loop problem, # since the objects code also imports ansible.errors from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject self._obj = obj self._show_content = show_content if obj and isinstance(obj, AnsibleBaseYAMLObject): extended_error = self._get_extended_error() if extended_error: self.message = 'ERROR! %s\n\n%s' % (message, extended_error) else: self.message = 'ERROR! %s' % message def __str__(self): return self.message def __repr__(self): return self.message def _get_error_lines_from_file(self, file_name, line_number): ''' Returns the line in the file which coresponds to the reported error location, as well as the line preceding it (if the error did not occur on the first line), to provide context to the error. ''' target_line = '' prev_line = '' with open(file_name, 'r') as f: lines = f.readlines() target_line = lines[line_number] if line_number > 0: prev_line = lines[line_number - 1] return (target_line, prev_line) def _get_extended_error(self): ''' Given an object reporting the location of the exception in a file, return detailed information regarding it including: * the line which caused the error as well as the one preceding it * causes and suggested remedies for common syntax errors If this error was created with show_content=False, the reporting of content is suppressed, as the file contents may be sensitive (ie. vault data). ''' error_message = '' try: (src_file, line_number, col_number) = self._obj.ansible_pos error_message += YAML_POSITION_DETAILS % (src_file, line_number, col_number) if src_file not in ('<string>', '<unicode>') and self._show_content: (target_line, prev_line) = self._get_error_lines_from_file(src_file, line_number - 1) if target_line: stripped_line = target_line.replace(" ","") arrow_line = (" " * (col_number-1)) + "^ here" #header_line = ("=" * 73) error_message += "\nThe offending line appears to be:\n\n%s\n%s\n%s\n" % (prev_line.rstrip(), target_line.rstrip(), arrow_line) # common error/remediation checking here: # check for unquoted vars starting lines if ('{{' in target_line and '}}' in target_line) and ('"{{' not in target_line or "'{{" not in target_line): error_message += YAML_COMMON_UNQUOTED_VARIABLE_ERROR # check for common dictionary mistakes elif ":{{" in stripped_line and "}}" in stripped_line: error_message += YAML_COMMON_DICT_ERROR # check for common unquoted colon mistakes elif len(target_line) and len(target_line) > 1 and len(target_line) > col_number and target_line[col_number] == ":" and target_line.count(':') > 1: error_message += YAML_COMMON_UNQUOTED_COLON_ERROR # otherwise, check for some common quoting mistakes else: parts = target_line.split(":") if len(parts) > 1: middle = parts[1].strip() match = False unbalanced = False if middle.startswith("'") and not middle.endswith("'"): match = True elif middle.startswith('"') and not middle.endswith('"'): match = True if len(middle) > 0 and middle[0] in [ '"', "'" ] and middle[-1] in [ '"', "'" ] and target_line.count("'") > 2 or target_line.count('"') > 2: unbalanced = True if match: error_message += YAML_COMMON_PARTIALLY_QUOTED_LINE_ERROR if unbalanced: error_message += YAML_COMMON_UNBALANCED_QUOTES_ERROR except (IOError, TypeError): error_message += '\n(could not open file to display line)' except IndexError: error_message += '\n(specified line no longer in file, maybe it changed?)' return error_message class AnsibleOptionsError(AnsibleError): ''' bad or incomplete options passed ''' pass class AnsibleParserError(AnsibleError): ''' something was detected early that is wrong about a playbook or data file ''' pass class AnsibleInternalError(AnsibleError): ''' internal safeguards tripped, something happened in the code that should never happen ''' pass class AnsibleRuntimeError(AnsibleError): ''' ansible had a problem while running a playbook ''' pass class AnsibleModuleError(AnsibleRuntimeError): ''' a module failed somehow ''' pass class AnsibleConnectionFailure(AnsibleRuntimeError): ''' the transport / connection_plugin had a fatal error ''' pass class AnsibleFilterError(AnsibleRuntimeError): ''' a templating failure ''' pass class AnsibleLookupError(AnsibleRuntimeError): ''' a lookup failure ''' pass class AnsibleCallbackError(AnsibleRuntimeError): ''' a callback failure ''' pass class AnsibleUndefinedVariable(AnsibleRuntimeError): ''' a templating failure ''' pass class AnsibleFileNotFound(AnsibleRuntimeError): ''' a file missing failure ''' pass
gpl-3.0
olivierdalang/QGIS
tests/src/python/test_qgsproviderconnectionmodel.py
33
10669
# -*- coding: utf-8 -*- """QGIS Unit tests for OGR GeoPackage QgsProviderConnectionModel. .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ __author__ = 'Nyall Dawson' __date__ = '07/08/2020' __copyright__ = 'Copyright 2019, The QGIS Project' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import os import shutil import tempfile from qgis.core import ( QgsVectorLayer, QgsProviderRegistry, QgsProviderConnectionModel, ) from qgis.PyQt.QtCore import ( QModelIndex, Qt, QCoreApplication ) from qgis.testing import unittest from utilities import unitTestDataPath, start_app TEST_DATA_DIR = unitTestDataPath() class TestPyQgsProviderConnectionModel(unittest.TestCase): @classmethod def setUpClass(cls): """Run before all tests""" QCoreApplication.setOrganizationName("QGIS_Test") QCoreApplication.setOrganizationDomain(cls.__name__) QCoreApplication.setApplicationName(cls.__name__) start_app() gpkg_original_path = '{}/qgis_server/test_project_wms_grouped_layers.gpkg'.format(TEST_DATA_DIR) cls.basetestpath = tempfile.mkdtemp() cls.gpkg_path = '{}/test_gpkg.gpkg'.format(cls.basetestpath) shutil.copy(gpkg_original_path, cls.gpkg_path) vl = QgsVectorLayer('{}|layername=cdb_lines'.format(cls.gpkg_path), 'test', 'ogr') assert vl.isValid() gpkg2_original_path = '{}/points_gpkg.gpkg'.format(TEST_DATA_DIR) cls.gpkg_path2 = '{}/test_gpkg2.gpkg'.format(cls.basetestpath) shutil.copy(gpkg2_original_path, cls.gpkg_path2) vl = QgsVectorLayer('{}'.format(cls.gpkg_path2), 'test', 'ogr') assert vl.isValid() @classmethod def tearDownClass(cls): """Run after all tests""" os.unlink(cls.gpkg_path) os.unlink(cls.gpkg_path2) def test_model(self): """Test model functionality""" md = QgsProviderRegistry.instance().providerMetadata('ogr') conn = md.createConnection(self.gpkg_path, {}) md.saveConnection(conn, 'qgis_test1') model = QgsProviderConnectionModel('ogr') self.assertEqual(model.rowCount(), 1) self.assertEqual(model.columnCount(), 1) self.assertEqual(model.data(model.index(0, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test1') self.assertEqual(model.data(model.index(0, 0, QModelIndex()), Qt.ToolTipRole), self.gpkg_path) self.assertEqual(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleConnectionName), 'qgis_test1') self.assertEqual(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleUri), self.gpkg_path) self.assertEqual(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleConfiguration), {}) md.saveConnection(conn, 'qgis_test1') self.assertEqual(model.rowCount(), 1) self.assertEqual(model.data(model.index(0, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test1') conn2 = md.createConnection(self.gpkg_path2, {}) md.saveConnection(conn2, 'qgis_test2') self.assertEqual(model.rowCount(), 2) self.assertEqual(model.data(model.index(0, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test1') self.assertEqual(model.data(model.index(0, 0, QModelIndex()), Qt.ToolTipRole), self.gpkg_path) self.assertEqual(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleConnectionName), 'qgis_test1') self.assertEqual(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleUri), self.gpkg_path) self.assertEqual(model.data(model.index(1, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test2') self.assertEqual(model.data(model.index(1, 0, QModelIndex()), Qt.ToolTipRole), self.gpkg_path2) self.assertEqual(model.data(model.index(1, 0, QModelIndex()), QgsProviderConnectionModel.RoleConnectionName), 'qgis_test2') self.assertEqual(model.data(model.index(1, 0, QModelIndex()), QgsProviderConnectionModel.RoleUri), self.gpkg_path2) md.deleteConnection('qgis_test1') self.assertEqual(model.rowCount(), 1) self.assertEqual(model.data(model.index(0, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test2') md.deleteConnection('qgis_test2') def test_model_allow_empty(self): """Test model with empty entry""" model = QgsProviderConnectionModel('ogr') self.assertEqual(model.rowCount(), 0) model.setAllowEmptyConnection(True) self.assertEqual(model.rowCount(), 1) self.assertFalse(model.data(model.index(0, 0, QModelIndex()), Qt.DisplayRole)) self.assertTrue(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleEmpty)) md = QgsProviderRegistry.instance().providerMetadata('ogr') conn = md.createConnection(self.gpkg_path, {}) md.saveConnection(conn, 'qgis_test1') model.setAllowEmptyConnection(False) model.setAllowEmptyConnection(False) self.assertEqual(model.rowCount(), 1) self.assertEqual(model.columnCount(), 1) self.assertEqual(model.data(model.index(0, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test1') self.assertEqual(model.data(model.index(0, 0, QModelIndex()), Qt.ToolTipRole), self.gpkg_path) self.assertEqual(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleConnectionName), 'qgis_test1') self.assertEqual(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleUri), self.gpkg_path) self.assertEqual(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleConfiguration), {}) self.assertFalse(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleEmpty)) model.setAllowEmptyConnection(True) model.setAllowEmptyConnection(True) self.assertEqual(model.rowCount(), 2) self.assertFalse(model.data(model.index(0, 0, QModelIndex()), Qt.DisplayRole)) self.assertFalse(model.data(model.index(0, 0, QModelIndex()), Qt.ToolTipRole)) self.assertFalse(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleConnectionName)) self.assertFalse(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleUri)) self.assertFalse(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleConfiguration)) self.assertTrue(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleEmpty)) self.assertEqual(model.data(model.index(1, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test1') self.assertEqual(model.data(model.index(1, 0, QModelIndex()), Qt.ToolTipRole), self.gpkg_path) self.assertEqual(model.data(model.index(1, 0, QModelIndex()), QgsProviderConnectionModel.RoleConnectionName), 'qgis_test1') self.assertEqual(model.data(model.index(1, 0, QModelIndex()), QgsProviderConnectionModel.RoleUri), self.gpkg_path) self.assertEqual(model.data(model.index(1, 0, QModelIndex()), QgsProviderConnectionModel.RoleConfiguration), {}) self.assertFalse(model.data(model.index(1, 0, QModelIndex()), QgsProviderConnectionModel.RoleEmpty)) md.saveConnection(conn, 'qgis_test1') self.assertEqual(model.rowCount(), 2) self.assertFalse(model.data(model.index(0, 0, QModelIndex()), Qt.DisplayRole)) self.assertTrue(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleEmpty)) self.assertEqual(model.data(model.index(1, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test1') self.assertFalse(model.data(model.index(1, 0, QModelIndex()), QgsProviderConnectionModel.RoleEmpty)) model.setAllowEmptyConnection(False) self.assertEqual(model.rowCount(), 1) self.assertEqual(model.data(model.index(0, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test1') self.assertFalse(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleEmpty)) model.setAllowEmptyConnection(True) conn2 = md.createConnection(self.gpkg_path2, {}) md.saveConnection(conn2, 'qgis_test2') self.assertEqual(model.rowCount(), 3) self.assertFalse(model.data(model.index(0, 0, QModelIndex()), Qt.DisplayRole)) self.assertTrue(model.data(model.index(0, 0, QModelIndex()), QgsProviderConnectionModel.RoleEmpty)) self.assertEqual(model.data(model.index(1, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test1') self.assertEqual(model.data(model.index(1, 0, QModelIndex()), Qt.ToolTipRole), self.gpkg_path) self.assertEqual(model.data(model.index(1, 0, QModelIndex()), QgsProviderConnectionModel.RoleConnectionName), 'qgis_test1') self.assertEqual(model.data(model.index(1, 0, QModelIndex()), QgsProviderConnectionModel.RoleUri), self.gpkg_path) self.assertFalse(model.data(model.index(1, 0, QModelIndex()), QgsProviderConnectionModel.RoleEmpty)) self.assertEqual(model.data(model.index(2, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test2') self.assertEqual(model.data(model.index(2, 0, QModelIndex()), Qt.ToolTipRole), self.gpkg_path2) self.assertEqual(model.data(model.index(2, 0, QModelIndex()), QgsProviderConnectionModel.RoleConnectionName), 'qgis_test2') self.assertEqual(model.data(model.index(2, 0, QModelIndex()), QgsProviderConnectionModel.RoleUri), self.gpkg_path2) self.assertFalse(model.data(model.index(2, 0, QModelIndex()), QgsProviderConnectionModel.RoleEmpty)) model.setAllowEmptyConnection(False) self.assertEqual(model.rowCount(), 2) self.assertEqual(model.data(model.index(0, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test1') self.assertEqual(model.data(model.index(1, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test2') model.setAllowEmptyConnection(True) md.deleteConnection('qgis_test1') self.assertEqual(model.rowCount(), 2) self.assertFalse(model.data(model.index(0, 0, QModelIndex()), Qt.DisplayRole)) self.assertEqual(model.data(model.index(1, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test2') model.setAllowEmptyConnection(False) self.assertEqual(model.rowCount(), 1) self.assertEqual(model.data(model.index(0, 0, QModelIndex()), Qt.DisplayRole), 'qgis_test2') if __name__ == '__main__': unittest.main()
gpl-2.0
zefciu/django-extensions
django_extensions/management/commands/set_default_site.py
28
2244
""" set_default_site.py """ import socket from optparse import make_option from django.core.management.base import CommandError, NoArgsCommand from django_extensions.management.utils import signalcommand class Command(NoArgsCommand): option_list = NoArgsCommand.option_list + ( make_option('--name', dest='site_name', default=None, help='Use this as site name.'), make_option('--domain', dest='site_domain', default=None, help='Use this as site domain.'), make_option('--system-fqdn', dest='set_as_system_fqdn', default=False, action="store_true", help='Use the systems FQDN (Fully Qualified Domain Name) as name and domain. Can be used in combination with --name'), ) help = "Set parameters of the default django.contrib.sites Site" @signalcommand def handle_noargs(self, **options): from django.contrib.sites.models import Site try: site = Site.objects.get(pk=1) except Site.DoesNotExist: raise CommandError("Default site with pk=1 does not exist") else: name = options.get("site_name", None) domain = options.get("site_domain", None) if options.get('set_as_system_fqdn', False): domain = socket.getfqdn() if not domain: raise CommandError("Cannot find systems FQDN") if name is None: name = domain update_kwargs = {} if name and name != site.name: update_kwargs["name"] = name if domain and domain != site.domain: update_kwargs["domain"] = domain if update_kwargs: Site.objects.filter(pk=1).update(**update_kwargs) site = Site.objects.get(pk=1) print("Updated default site. You might need to restart django as sites are cached aggressively.") else: print("Nothing to update (need --name, --domain and/or --system-fqdn)") print("Default Site:") print("\tid = %s" % site.id) print("\tname = %s" % site.name) print("\tdomain = %s" % site.domain)
mit
dkodnik/arp
addons/stock/wizard/stock_change_product_qty.py
41
5163
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv, orm import openerp.addons.decimal_precision as dp from openerp.tools.translate import _ from openerp import tools class stock_change_product_qty(osv.osv_memory): _name = "stock.change.product.qty" _description = "Change Product Quantity" _columns = { 'product_id' : fields.many2one('product.product', 'Product'), 'new_quantity': fields.float('New Quantity on Hand', digits_compute=dp.get_precision('Product Unit of Measure'), required=True, help='This quantity is expressed in the Default Unit of Measure of the product.'), 'lot_id': fields.many2one('stock.production.lot', 'Serial Number', domain="[('product_id','=',product_id)]"), 'location_id': fields.many2one('stock.location', 'Location', required=True, domain="[('usage', '=', 'internal')]"), } _defaults = { 'new_quantity': 1, 'product_id': lambda self, cr, uid, ctx: ctx and ctx.get('active_id', False) or False } def default_get(self, cr, uid, fields, context): """ To get default values for the object. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param fields: List of fields for which we want default values @param context: A standard dictionary @return: A dictionary which of fields with values. """ res = super(stock_change_product_qty, self).default_get(cr, uid, fields, context=context) if 'location_id' in fields: location_id = res.get('location_id', False) if not location_id: try: model, location_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'stock', 'stock_location_stock') except (orm.except_orm, ValueError): pass if location_id: try: self.pool.get('stock.location').check_access_rule(cr, uid, [location_id], 'read', context=context) except (orm.except_orm, ValueError): pass res['location_id'] = location_id return res def change_product_qty(self, cr, uid, ids, context=None): """ Changes the Product Quantity by making a Physical Inventory. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: List of IDs selected @param context: A standard dictionary @return: """ if context is None: context = {} rec_id = context and context.get('active_id', False) assert rec_id, _('Active ID is not set in Context') inventory_obj = self.pool.get('stock.inventory') inventory_line_obj = self.pool.get('stock.inventory.line') prod_obj_pool = self.pool.get('product.product') for data in self.browse(cr, uid, ids, context=context): if data.new_quantity < 0: raise osv.except_osv(_('Warning!'), _('Quantity cannot be negative.')) ctx = context.copy() ctx['location'] = data.location_id.id ctx['lot_id'] = data.lot_id.id res_original = prod_obj_pool.browse(cr, uid, rec_id, context=ctx) inventory_id = inventory_obj.create(cr, uid, {'name': _('INV: %s') % tools.ustr(res_original.name), 'product_id': rec_id, 'location_id': data.location_id.id, 'lot_id': data.lot_id.id}, context=context) th_qty = res_original.qty_available line_data = { 'inventory_id': inventory_id, 'product_qty': data.new_quantity, 'location_id': data.location_id.id, 'product_id': rec_id, 'product_uom_id': res_original.uom_id.id, 'th_qty': th_qty, 'prod_lot_id': data.lot_id.id } inventory_line_obj.create(cr , uid, line_data, context=context) inventory_obj.action_done(cr, uid, [inventory_id], context=context) return {} # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
andnovar/networkx
networkx/algorithms/link_analysis/hits_alg.py
8
9422
"""Hubs and authorities analysis of graph structure. """ # Copyright (C) 2008-2012 by # Aric Hagberg <hagberg@lanl.gov> # Dan Schult <dschult@colgate.edu> # Pieter Swart <swart@lanl.gov> # All rights reserved. # BSD license. # NetworkX:http://networkx.github.io/ import networkx as nx from networkx.exception import NetworkXError __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __all__ = ['hits','hits_numpy','hits_scipy','authority_matrix','hub_matrix'] def hits(G,max_iter=100,tol=1.0e-8,nstart=None,normalized=True): """Return HITS hubs and authorities values for nodes. The HITS algorithm computes two numbers for a node. Authorities estimates the node value based on the incoming links. Hubs estimates the node value based on outgoing links. Parameters ---------- G : graph A NetworkX graph max_iter : interger, optional Maximum number of iterations in power method. tol : float, optional Error tolerance used to check convergence in power method iteration. nstart : dictionary, optional Starting value of each node for power method iteration. normalized : bool (default=True) Normalize results by the sum of all of the values. Returns ------- (hubs,authorities) : two-tuple of dictionaries Two dictionaries keyed by node containing the hub and authority values. Examples -------- >>> G=nx.path_graph(4) >>> h,a=nx.hits(G) Notes ----- The eigenvector calculation is done by the power iteration method and has no guarantee of convergence. The iteration will stop after max_iter iterations or an error tolerance of number_of_nodes(G)*tol has been reached. The HITS algorithm was designed for directed graphs but this algorithm does not check if the input graph is directed and will execute on undirected graphs. References ---------- .. [1] A. Langville and C. Meyer, "A survey of eigenvector methods of web information retrieval." http://citeseer.ist.psu.edu/713792.html .. [2] Jon Kleinberg, Authoritative sources in a hyperlinked environment Journal of the ACM 46 (5): 604-32, 1999. doi:10.1145/324133.324140. http://www.cs.cornell.edu/home/kleinber/auth.pdf. """ if type(G) == nx.MultiGraph or type(G) == nx.MultiDiGraph: raise Exception("hits() not defined for graphs with multiedges.") if len(G) == 0: return {},{} # choose fixed starting vector if not given if nstart is None: h=dict.fromkeys(G,1.0/G.number_of_nodes()) else: h=nstart # normalize starting vector s=1.0/sum(h.values()) for k in h: h[k]*=s i=0 while True: # power iteration: make up to max_iter iterations hlast=h h=dict.fromkeys(hlast.keys(),0) a=dict.fromkeys(hlast.keys(),0) # this "matrix multiply" looks odd because it is # doing a left multiply a^T=hlast^T*G for n in h: for nbr in G[n]: a[nbr]+=hlast[n]*G[n][nbr].get('weight',1) # now multiply h=Ga for n in h: for nbr in G[n]: h[n]+=a[nbr]*G[n][nbr].get('weight',1) # normalize vector s=1.0/max(h.values()) for n in h: h[n]*=s # normalize vector s=1.0/max(a.values()) for n in a: a[n]*=s # check convergence, l1 norm err=sum([abs(h[n]-hlast[n]) for n in h]) if err < tol: break if i>max_iter: raise NetworkXError(\ "HITS: power iteration failed to converge in %d iterations."%(i+1)) i+=1 if normalized: s = 1.0/sum(a.values()) for n in a: a[n] *= s s = 1.0/sum(h.values()) for n in h: h[n] *= s return h,a def authority_matrix(G,nodelist=None): """Return the HITS authority matrix.""" M=nx.to_numpy_matrix(G,nodelist=nodelist) return M.T*M def hub_matrix(G,nodelist=None): """Return the HITS hub matrix.""" M=nx.to_numpy_matrix(G,nodelist=nodelist) return M*M.T def hits_numpy(G,normalized=True): """Return HITS hubs and authorities values for nodes. The HITS algorithm computes two numbers for a node. Authorities estimates the node value based on the incoming links. Hubs estimates the node value based on outgoing links. Parameters ---------- G : graph A NetworkX graph normalized : bool (default=True) Normalize results by the sum of all of the values. Returns ------- (hubs,authorities) : two-tuple of dictionaries Two dictionaries keyed by node containing the hub and authority values. Examples -------- >>> G=nx.path_graph(4) >>> h,a=nx.hits(G) Notes ----- The eigenvector calculation uses NumPy's interface to LAPACK. The HITS algorithm was designed for directed graphs but this algorithm does not check if the input graph is directed and will execute on undirected graphs. References ---------- .. [1] A. Langville and C. Meyer, "A survey of eigenvector methods of web information retrieval." http://citeseer.ist.psu.edu/713792.html .. [2] Jon Kleinberg, Authoritative sources in a hyperlinked environment Journal of the ACM 46 (5): 604-32, 1999. doi:10.1145/324133.324140. http://www.cs.cornell.edu/home/kleinber/auth.pdf. """ try: import numpy as np except ImportError: raise ImportError(\ "hits_numpy() requires NumPy: http://scipy.org/") if len(G) == 0: return {},{} H = nx.hub_matrix(G, list(G)) e,ev=np.linalg.eig(H) m=e.argsort()[-1] # index of maximum eigenvalue h=np.array(ev[:,m]).flatten() A=nx.authority_matrix(G, list(G)) e,ev=np.linalg.eig(A) m=e.argsort()[-1] # index of maximum eigenvalue a=np.array(ev[:,m]).flatten() if normalized: h = h/h.sum() a = a/a.sum() else: h = h/h.max() a = a/a.max() hubs = dict(zip(G, map(float, h))) authorities = dict(zip(G, map(float, a))) return hubs,authorities def hits_scipy(G,max_iter=100,tol=1.0e-6,normalized=True): """Return HITS hubs and authorities values for nodes. The HITS algorithm computes two numbers for a node. Authorities estimates the node value based on the incoming links. Hubs estimates the node value based on outgoing links. Parameters ---------- G : graph A NetworkX graph max_iter : interger, optional Maximum number of iterations in power method. tol : float, optional Error tolerance used to check convergence in power method iteration. nstart : dictionary, optional Starting value of each node for power method iteration. normalized : bool (default=True) Normalize results by the sum of all of the values. Returns ------- (hubs,authorities) : two-tuple of dictionaries Two dictionaries keyed by node containing the hub and authority values. Examples -------- >>> G=nx.path_graph(4) >>> h,a=nx.hits(G) Notes ----- This implementation uses SciPy sparse matrices. The eigenvector calculation is done by the power iteration method and has no guarantee of convergence. The iteration will stop after max_iter iterations or an error tolerance of number_of_nodes(G)*tol has been reached. The HITS algorithm was designed for directed graphs but this algorithm does not check if the input graph is directed and will execute on undirected graphs. References ---------- .. [1] A. Langville and C. Meyer, "A survey of eigenvector methods of web information retrieval." http://citeseer.ist.psu.edu/713792.html .. [2] Jon Kleinberg, Authoritative sources in a hyperlinked environment Journal of the ACM 46 (5): 604-632, 1999. doi:10.1145/324133.324140. http://www.cs.cornell.edu/home/kleinber/auth.pdf. """ try: import scipy.sparse import numpy as np except ImportError: raise ImportError(\ "hits_scipy() requires SciPy: http://scipy.org/") if len(G) == 0: return {},{} M = nx.to_scipy_sparse_matrix(G, nodelist=list(G)) (n,m)=M.shape # should be square A=M.T*M # authority matrix x=scipy.ones((n,1))/n # initial guess # power iteration on authority matrix i=0 while True: xlast=x x=A*x x=x/x.max() # check convergence, l1 norm err=scipy.absolute(x-xlast).sum() if err < tol: break if i>max_iter: raise NetworkXError(\ "HITS: power iteration failed to converge in %d iterations."%(i+1)) i+=1 a=np.asarray(x).flatten() # h=M*a h=np.asarray(M*a).flatten() if normalized: h = h/h.sum() a = a/a.sum() hubs = dict(zip(G, map(float, h))) authorities = dict(zip(G, map(float, a))) return hubs,authorities # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") try: import scipy except: raise SkipTest("SciPy not available")
bsd-3-clause
programadorjc/django
django/contrib/messages/api.py
512
3202
from django.contrib.messages import constants from django.contrib.messages.storage import default_storage from django.http import HttpRequest __all__ = ( 'add_message', 'get_messages', 'get_level', 'set_level', 'debug', 'info', 'success', 'warning', 'error', 'MessageFailure', ) class MessageFailure(Exception): pass def add_message(request, level, message, extra_tags='', fail_silently=False): """ Attempts to add a message to the request using the 'messages' app. """ if not isinstance(request, HttpRequest): raise TypeError("add_message() argument must be an HttpRequest object, " "not '%s'." % request.__class__.__name__) if hasattr(request, '_messages'): return request._messages.add(level, message, extra_tags) if not fail_silently: raise MessageFailure('You cannot add messages without installing ' 'django.contrib.messages.middleware.MessageMiddleware') def get_messages(request): """ Returns the message storage on the request if it exists, otherwise returns an empty list. """ if hasattr(request, '_messages'): return request._messages else: return [] def get_level(request): """ Returns the minimum level of messages to be recorded. The default level is the ``MESSAGE_LEVEL`` setting. If this is not found, the ``INFO`` level is used. """ if hasattr(request, '_messages'): storage = request._messages else: storage = default_storage(request) return storage.level def set_level(request, level): """ Sets the minimum level of messages to be recorded, returning ``True`` if the level was recorded successfully. If set to ``None``, the default level will be used (see the ``get_level`` method). """ if not hasattr(request, '_messages'): return False request._messages.level = level return True def debug(request, message, extra_tags='', fail_silently=False): """ Adds a message with the ``DEBUG`` level. """ add_message(request, constants.DEBUG, message, extra_tags=extra_tags, fail_silently=fail_silently) def info(request, message, extra_tags='', fail_silently=False): """ Adds a message with the ``INFO`` level. """ add_message(request, constants.INFO, message, extra_tags=extra_tags, fail_silently=fail_silently) def success(request, message, extra_tags='', fail_silently=False): """ Adds a message with the ``SUCCESS`` level. """ add_message(request, constants.SUCCESS, message, extra_tags=extra_tags, fail_silently=fail_silently) def warning(request, message, extra_tags='', fail_silently=False): """ Adds a message with the ``WARNING`` level. """ add_message(request, constants.WARNING, message, extra_tags=extra_tags, fail_silently=fail_silently) def error(request, message, extra_tags='', fail_silently=False): """ Adds a message with the ``ERROR`` level. """ add_message(request, constants.ERROR, message, extra_tags=extra_tags, fail_silently=fail_silently)
bsd-3-clause
harisibrahimkv/django
django/core/management/color.py
43
1821
""" Sets up the terminal color scheme. """ import functools import os import sys from django.utils import termcolors def supports_color(): """ Return True if the running system's terminal supports color, and False otherwise. """ plat = sys.platform supported_platform = plat != 'Pocket PC' and (plat != 'win32' or 'ANSICON' in os.environ) # isatty is not always implemented, #6223. is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty() if not supported_platform or not is_a_tty: return False return True class Style: pass def make_style(config_string=''): """ Create a Style object from the given config_string. If config_string is empty django.utils.termcolors.DEFAULT_PALETTE is used. """ style = Style() color_settings = termcolors.parse_color_setting(config_string) # The nocolor palette has all available roles. # Use that palette as the basis for populating # the palette as defined in the environment. for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]: if color_settings: format = color_settings.get(role, {}) style_func = termcolors.make_style(**format) else: def style_func(x): return x setattr(style, role, style_func) # For backwards compatibility, # set style for ERROR_OUTPUT == ERROR style.ERROR_OUTPUT = style.ERROR return style @functools.lru_cache(maxsize=None) def no_style(): """ Return a Style object with no color scheme. """ return make_style('nocolor') def color_style(): """ Return a Style object from the Django color scheme. """ if not supports_color(): return no_style() return make_style(os.environ.get('DJANGO_COLORS', ''))
bsd-3-clause
indictranstech/Das_Erpnext
erpnext/hr/doctype/upload_attendance/upload_attendance.py
55
4418
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe.utils import cstr, add_days, date_diff from frappe import _ from frappe.utils.csvutils import UnicodeWriter from frappe.model.document import Document class UploadAttendance(Document): pass @frappe.whitelist() def get_template(): if not frappe.has_permission("Attendance", "create"): raise frappe.PermissionError args = frappe.local.form_dict w = UnicodeWriter() w = add_header(w) w = add_data(w, args) # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Attendance" def add_header(w): status = ", ".join((frappe.get_meta("Attendance").get_field("status").options or "").strip().split("\n")) w.writerow(["Notes:"]) w.writerow(["Please do not change the template headings"]) w.writerow(["Status should be one of these values: " + status]) w.writerow(["If you are overwriting existing attendance records, 'ID' column mandatory"]) w.writerow(["ID", "Employee", "Employee Name", "Date", "Status", "Fiscal Year", "Company", "Naming Series"]) return w def add_data(w, args): from erpnext.accounts.utils import get_fiscal_year dates = get_dates(args) employees = get_active_employees() existing_attendance_records = get_existing_attendance_records(args) for date in dates: for employee in employees: existing_attendance = {} if existing_attendance_records \ and tuple([date, employee.name]) in existing_attendance_records: existing_attendance = existing_attendance_records[tuple([date, employee.name])] row = [ existing_attendance and existing_attendance.name or "", employee.name, employee.employee_name, date, existing_attendance and existing_attendance.status or "", get_fiscal_year(date)[0], employee.company, existing_attendance and existing_attendance.naming_series or get_naming_series(), ] w.writerow(row) return w def get_dates(args): """get list of dates in between from date and to date""" no_of_days = date_diff(add_days(args["to_date"], 1), args["from_date"]) dates = [add_days(args["from_date"], i) for i in range(0, no_of_days)] return dates def get_active_employees(): employees = frappe.db.sql("""select name, employee_name, company from tabEmployee where docstatus < 2 and status = 'Active'""", as_dict=1) return employees def get_existing_attendance_records(args): attendance = frappe.db.sql("""select name, att_date, employee, status, naming_series from `tabAttendance` where att_date between %s and %s and docstatus < 2""", (args["from_date"], args["to_date"]), as_dict=1) existing_attendance = {} for att in attendance: existing_attendance[tuple([att.att_date, att.employee])] = att return existing_attendance def get_naming_series(): series = frappe.get_meta("Attendance").get_field("naming_series").options.strip().split("\n") if not series: frappe.throw(_("Please setup numbering series for Attendance via Setup > Numbering Series")) return series[0] @frappe.whitelist() def upload(): if not frappe.has_permission("Attendance", "create"): raise frappe.PermissionError from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.modules import scrub rows = read_csv_content_from_uploaded_file() rows = filter(lambda x: x and any(x), rows) if not rows: msg = [_("Please select a csv file")] return {"messages": msg, "error": msg} columns = [scrub(f) for f in rows[4]] columns[0] = "name" columns[3] = "att_date" ret = [] error = False from frappe.utils.csvutils import check_record, import_doc for i, row in enumerate(rows[5:]): if not row: continue row_idx = i + 5 d = frappe._dict(zip(columns, row)) d["doctype"] = "Attendance" if d.name: d["docstatus"] = frappe.db.get_value("Attendance", d.name, "docstatus") try: check_record(d) ret.append(import_doc(d, "Attendance", 1, row_idx, submit=True)) except Exception, e: error = True ret.append('Error for row (#%d) %s : %s' % (row_idx, len(row)>1 and row[1] or "", cstr(e))) frappe.errprint(frappe.get_traceback()) if error: frappe.db.rollback() else: frappe.db.commit() return {"messages": ret, "error": error}
agpl-3.0
blackzw/openwrt_sdk_dev1
staging_dir/host/lib/python2.7/idlelib/ScrolledList.py
160
3995
from Tkinter import * class ScrolledList: default = "(None)" def __init__(self, master, **options): # Create top frame, with scrollbar and listbox self.master = master self.frame = frame = Frame(master) self.frame.pack(fill="both", expand=1) self.vbar = vbar = Scrollbar(frame, name="vbar") self.vbar.pack(side="right", fill="y") self.listbox = listbox = Listbox(frame, exportselection=0, background="white") if options: listbox.configure(options) listbox.pack(expand=1, fill="both") # Tie listbox and scrollbar together vbar["command"] = listbox.yview listbox["yscrollcommand"] = vbar.set # Bind events to the list box listbox.bind("<ButtonRelease-1>", self.click_event) listbox.bind("<Double-ButtonRelease-1>", self.double_click_event) listbox.bind("<ButtonPress-3>", self.popup_event) listbox.bind("<Key-Up>", self.up_event) listbox.bind("<Key-Down>", self.down_event) # Mark as empty self.clear() def close(self): self.frame.destroy() def clear(self): self.listbox.delete(0, "end") self.empty = 1 self.listbox.insert("end", self.default) def append(self, item): if self.empty: self.listbox.delete(0, "end") self.empty = 0 self.listbox.insert("end", str(item)) def get(self, index): return self.listbox.get(index) def click_event(self, event): self.listbox.activate("@%d,%d" % (event.x, event.y)) index = self.listbox.index("active") self.select(index) self.on_select(index) return "break" def double_click_event(self, event): index = self.listbox.index("active") self.select(index) self.on_double(index) return "break" menu = None def popup_event(self, event): if not self.menu: self.make_menu() menu = self.menu self.listbox.activate("@%d,%d" % (event.x, event.y)) index = self.listbox.index("active") self.select(index) menu.tk_popup(event.x_root, event.y_root) def make_menu(self): menu = Menu(self.listbox, tearoff=0) self.menu = menu self.fill_menu() def up_event(self, event): index = self.listbox.index("active") if self.listbox.selection_includes(index): index = index - 1 else: index = self.listbox.size() - 1 if index < 0: self.listbox.bell() else: self.select(index) self.on_select(index) return "break" def down_event(self, event): index = self.listbox.index("active") if self.listbox.selection_includes(index): index = index + 1 else: index = 0 if index >= self.listbox.size(): self.listbox.bell() else: self.select(index) self.on_select(index) return "break" def select(self, index): self.listbox.focus_set() self.listbox.activate(index) self.listbox.selection_clear(0, "end") self.listbox.selection_set(index) self.listbox.see(index) # Methods to override for specific actions def fill_menu(self): pass def on_select(self, index): pass def on_double(self, index): pass def test(): root = Tk() root.protocol("WM_DELETE_WINDOW", root.destroy) class MyScrolledList(ScrolledList): def fill_menu(self): self.menu.add_command(label="pass") def on_select(self, index): print "select", self.get(index) def on_double(self, index): print "double", self.get(index) s = MyScrolledList(root) for i in range(30): s.append("item %02d" % i) return root def main(): root = test() root.mainloop() if __name__ == '__main__': main()
gpl-2.0
ESS-LLP/erpnext-medical
erpnext/hr/doctype/employee_advance/employee_advance.py
6
3851
# -*- coding: utf-8 -*- # Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe, erpnext from frappe import _ from frappe.model.document import Document from frappe.utils import flt, nowdate class EmployeeAdvanceOverPayment(frappe.ValidationError): pass class EmployeeAdvance(Document): def onload(self): self.get("__onload").make_payment_via_journal_entry = frappe.db.get_single_value('Accounts Settings', 'make_payment_via_journal_entry') def validate(self): self.set_status() self.validate_employee_advance_account() def on_cancel(self): self.set_status() def set_status(self): if self.docstatus == 0: self.status = "Draft" if self.docstatus == 1: if self.claimed_amount and flt(self.claimed_amount) == flt(self.paid_amount): self.status = "Claimed" elif self.paid_amount and self.advance_amount == flt(self.paid_amount): self.status = "Paid" else: self.status = "Unpaid" elif self.docstatus == 2: self.status = "Cancelled" def validate_employee_advance_account(self): company_currency = erpnext.get_company_currency(self.company) if (self.advance_account and company_currency != frappe.db.get_value('Account', self.advance_account, 'account_currency')): frappe.throw(_("Advance account currency should be same as company currency {0}") .format(company_currency)) def set_total_advance_paid(self): paid_amount = frappe.db.sql(""" select ifnull(sum(debit_in_account_currency), 0) as paid_amount from `tabGL Entry` where against_voucher_type = 'Employee Advance' and against_voucher = %s and party_type = 'Employee' and party = %s """, (self.name, self.employee), as_dict=1)[0].paid_amount if flt(paid_amount) > self.advance_amount: frappe.throw(_("Row {0}# Paid Amount cannot be greater than requested advance amount"), EmployeeAdvanceOverPayment) self.db_set("paid_amount", paid_amount) self.set_status() frappe.db.set_value("Employee Advance", self.name , "status", self.status) def update_claimed_amount(self): claimed_amount = frappe.db.sql(""" select sum(ifnull(allocated_amount, 0)) from `tabExpense Claim Advance` where employee_advance = %s and docstatus=1 and allocated_amount > 0 """, self.name)[0][0] or 0 if claimed_amount: frappe.db.set_value("Employee Advance", self.name, "claimed_amount", flt(claimed_amount)) @frappe.whitelist() def get_due_advance_amount(employee, posting_date): employee_due_amount = frappe.get_all("Employee Advance", \ filters = {"employee":employee, "docstatus":1, "posting_date":("<=", posting_date)}, \ fields = ["advance_amount", "paid_amount"]) return sum([(emp.advance_amount - emp.paid_amount) for emp in employee_due_amount]) @frappe.whitelist() def make_bank_entry(dt, dn): from erpnext.accounts.doctype.journal_entry.journal_entry import get_default_bank_cash_account doc = frappe.get_doc(dt, dn) payment_account = get_default_bank_cash_account(doc.company, account_type="Cash", mode_of_payment=doc.mode_of_payment) je = frappe.new_doc("Journal Entry") je.posting_date = nowdate() je.voucher_type = 'Bank Entry' je.company = doc.company je.remark = 'Payment against Employee Advance: ' + dn + '\n' + doc.purpose je.append("accounts", { "account": doc.advance_account, "debit_in_account_currency": flt(doc.advance_amount), "reference_type": "Employee Advance", "reference_name": doc.name, "party_type": "Employee", "party": doc.employee, "is_advance": "Yes" }) je.append("accounts", { "account": payment_account.account, "credit_in_account_currency": flt(doc.advance_amount), "account_currency": payment_account.account_currency, "account_type": payment_account.account_type }) return je.as_dict()
gpl-3.0
andela-earinde/bellatrix-py
app/js/lib/lib/modules/test/test_startfile.py
120
1421
# Ridiculously simple test of the os.startfile function for Windows. # # empty.vbs is an empty file (except for a comment), which does # nothing when run with cscript or wscript. # # A possible improvement would be to have empty.vbs do something that # we can detect here, to make sure that not only the os.startfile() # call succeeded, but also the the script actually has run. import unittest from test import test_support import os from os import path from time import sleep startfile = test_support.get_attribute(os, 'startfile') class TestCase(unittest.TestCase): def test_nonexisting(self): self.assertRaises(OSError, startfile, "nonexisting.vbs") def test_nonexisting_u(self): self.assertRaises(OSError, startfile, u"nonexisting.vbs") def test_empty(self): empty = path.join(path.dirname(__file__), "empty.vbs") startfile(empty) startfile(empty, "open") # Give the child process some time to exit before we finish. # Otherwise the cleanup code will not be able to delete the cwd, # because it is still in use. sleep(0.1) def test_empty_u(self): empty = path.join(path.dirname(__file__), "empty.vbs") startfile(unicode(empty, "mbcs")) startfile(unicode(empty, "mbcs"), "open") sleep(0.1) def test_main(): test_support.run_unittest(TestCase) if __name__=="__main__": test_main()
mit
titeuf87/evennia
evennia/web/website/views.py
4
3944
""" This file contains the generic, assorted views that don't fall under one of the other applications. Views are django's way of processing e.g. html templates on the fly. """ from django.contrib.admin.sites import site from django.conf import settings from django.contrib.auth import authenticate from django.contrib.admin.views.decorators import staff_member_required from django.shortcuts import render from evennia import SESSION_HANDLER from evennia.objects.models import ObjectDB from evennia.players.models import PlayerDB from evennia.utils import logger from django.contrib.auth import login _BASE_CHAR_TYPECLASS = settings.BASE_CHARACTER_TYPECLASS def _shared_login(request): """ Handle the shared login between website and webclient. """ csession = request.session player = request.user sesslogin = csession.get("logged_in", None) if csession.session_key is None: # this is necessary to build the sessid key csession.save() elif player.is_authenticated(): if not sesslogin: csession["logged_in"] = player.id elif sesslogin: # The webclient has previously registered a login to this csession player = PlayerDB.objects.get(id=sesslogin) try: # calls our custom authenticate, in web/utils/backend.py authenticate(autologin=player) login(request, player) except AttributeError: logger.log_trace() def _gamestats(): # Some misc. configurable stuff. # TODO: Move this to either SQL or settings.py based configuration. fpage_player_limit = 4 # A QuerySet of the most recently connected players. recent_users = PlayerDB.objects.get_recently_connected_players()[:fpage_player_limit] nplyrs_conn_recent = len(recent_users) or "none" nplyrs = PlayerDB.objects.num_total_players() or "none" nplyrs_reg_recent = len(PlayerDB.objects.get_recently_created_players()) or "none" nsess = SESSION_HANDLER.player_count() # nsess = len(PlayerDB.objects.get_connected_players()) or "no one" nobjs = ObjectDB.objects.all().count() nrooms = ObjectDB.objects.filter(db_location__isnull=True).exclude(db_typeclass_path=_BASE_CHAR_TYPECLASS).count() nexits = ObjectDB.objects.filter(db_location__isnull=False, db_destination__isnull=False).count() nchars = ObjectDB.objects.filter(db_typeclass_path=_BASE_CHAR_TYPECLASS).count() nothers = nobjs - nrooms - nchars - nexits pagevars = { "page_title": "Front Page", "players_connected_recent": recent_users, "num_players_connected": nsess or "no one", "num_players_registered": nplyrs or "no", "num_players_connected_recent": nplyrs_conn_recent or "no", "num_players_registered_recent": nplyrs_reg_recent or "no one", "num_rooms": nrooms or "none", "num_exits": nexits or "no", "num_objects": nobjs or "none", "num_characters": nchars or "no", "num_others": nothers or "no" } return pagevars def page_index(request): """ Main root page. """ # handle webclient-website shared login _shared_login(request) # get game db stats pagevars = _gamestats() return render(request, 'index.html', pagevars) def to_be_implemented(request): """ A notice letting the user know that this particular feature hasn't been implemented yet. """ pagevars = { "page_title": "To Be Implemented...", } return render(request, 'tbi.html', pagevars) @staff_member_required def evennia_admin(request): """ Helpful Evennia-specific admin page. """ return render( request, 'evennia_admin.html', { 'playerdb': PlayerDB}) def admin_wrapper(request): """ Wrapper that allows us to properly use the base Django admin site, if needed. """ return staff_member_required(site.index)(request)
bsd-3-clause
mzdaniel/oh-mainline
vendor/packages/python-openid/openid/extensions/draft/pape2.py
156
9330
"""An implementation of the OpenID Provider Authentication Policy Extension 1.0 @see: http://openid.net/developers/specs/ @since: 2.1.0 """ __all__ = [ 'Request', 'Response', 'ns_uri', 'AUTH_PHISHING_RESISTANT', 'AUTH_MULTI_FACTOR', 'AUTH_MULTI_FACTOR_PHYSICAL', ] from openid.extension import Extension import re ns_uri = "http://specs.openid.net/extensions/pape/1.0" AUTH_MULTI_FACTOR_PHYSICAL = \ 'http://schemas.openid.net/pape/policies/2007/06/multi-factor-physical' AUTH_MULTI_FACTOR = \ 'http://schemas.openid.net/pape/policies/2007/06/multi-factor' AUTH_PHISHING_RESISTANT = \ 'http://schemas.openid.net/pape/policies/2007/06/phishing-resistant' TIME_VALIDATOR = re.compile('^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ$') class Request(Extension): """A Provider Authentication Policy request, sent from a relying party to a provider @ivar preferred_auth_policies: The authentication policies that the relying party prefers @type preferred_auth_policies: [str] @ivar max_auth_age: The maximum time, in seconds, that the relying party wants to allow to have elapsed before the user must re-authenticate @type max_auth_age: int or NoneType """ ns_alias = 'pape' def __init__(self, preferred_auth_policies=None, max_auth_age=None): super(Request, self).__init__() if not preferred_auth_policies: preferred_auth_policies = [] self.preferred_auth_policies = preferred_auth_policies self.max_auth_age = max_auth_age def __nonzero__(self): return bool(self.preferred_auth_policies or self.max_auth_age is not None) def addPolicyURI(self, policy_uri): """Add an acceptable authentication policy URI to this request This method is intended to be used by the relying party to add acceptable authentication types to the request. @param policy_uri: The identifier for the preferred type of authentication. @see: http://openid.net/specs/openid-provider-authentication-policy-extension-1_0-01.html#auth_policies """ if policy_uri not in self.preferred_auth_policies: self.preferred_auth_policies.append(policy_uri) def getExtensionArgs(self): """@see: C{L{Extension.getExtensionArgs}} """ ns_args = { 'preferred_auth_policies':' '.join(self.preferred_auth_policies) } if self.max_auth_age is not None: ns_args['max_auth_age'] = str(self.max_auth_age) return ns_args def fromOpenIDRequest(cls, request): """Instantiate a Request object from the arguments in a C{checkid_*} OpenID message """ self = cls() args = request.message.getArgs(self.ns_uri) if args == {}: return None self.parseExtensionArgs(args) return self fromOpenIDRequest = classmethod(fromOpenIDRequest) def parseExtensionArgs(self, args): """Set the state of this request to be that expressed in these PAPE arguments @param args: The PAPE arguments without a namespace @rtype: None @raises ValueError: When the max_auth_age is not parseable as an integer """ # preferred_auth_policies is a space-separated list of policy URIs self.preferred_auth_policies = [] policies_str = args.get('preferred_auth_policies') if policies_str: for uri in policies_str.split(' '): if uri not in self.preferred_auth_policies: self.preferred_auth_policies.append(uri) # max_auth_age is base-10 integer number of seconds max_auth_age_str = args.get('max_auth_age') self.max_auth_age = None if max_auth_age_str: try: self.max_auth_age = int(max_auth_age_str) except ValueError: pass def preferredTypes(self, supported_types): """Given a list of authentication policy URIs that a provider supports, this method returns the subsequence of those types that are preferred by the relying party. @param supported_types: A sequence of authentication policy type URIs that are supported by a provider @returns: The sub-sequence of the supported types that are preferred by the relying party. This list will be ordered in the order that the types appear in the supported_types sequence, and may be empty if the provider does not prefer any of the supported authentication types. @returntype: [str] """ return filter(self.preferred_auth_policies.__contains__, supported_types) Request.ns_uri = ns_uri class Response(Extension): """A Provider Authentication Policy response, sent from a provider to a relying party """ ns_alias = 'pape' def __init__(self, auth_policies=None, auth_time=None, nist_auth_level=None): super(Response, self).__init__() if auth_policies: self.auth_policies = auth_policies else: self.auth_policies = [] self.auth_time = auth_time self.nist_auth_level = nist_auth_level def addPolicyURI(self, policy_uri): """Add a authentication policy to this response This method is intended to be used by the provider to add a policy that the provider conformed to when authenticating the user. @param policy_uri: The identifier for the preferred type of authentication. @see: http://openid.net/specs/openid-provider-authentication-policy-extension-1_0-01.html#auth_policies """ if policy_uri not in self.auth_policies: self.auth_policies.append(policy_uri) def fromSuccessResponse(cls, success_response): """Create a C{L{Response}} object from a successful OpenID library response (C{L{openid.consumer.consumer.SuccessResponse}}) response message @param success_response: A SuccessResponse from consumer.complete() @type success_response: C{L{openid.consumer.consumer.SuccessResponse}} @rtype: Response or None @returns: A provider authentication policy response from the data that was supplied with the C{id_res} response or None if the provider sent no signed PAPE response arguments. """ self = cls() # PAPE requires that the args be signed. args = success_response.getSignedNS(self.ns_uri) # Only try to construct a PAPE response if the arguments were # signed in the OpenID response. If not, return None. if args is not None: self.parseExtensionArgs(args) return self else: return None def parseExtensionArgs(self, args, strict=False): """Parse the provider authentication policy arguments into the internal state of this object @param args: unqualified provider authentication policy arguments @param strict: Whether to raise an exception when bad data is encountered @returns: None. The data is parsed into the internal fields of this object. """ policies_str = args.get('auth_policies') if policies_str and policies_str != 'none': self.auth_policies = policies_str.split(' ') nist_level_str = args.get('nist_auth_level') if nist_level_str: try: nist_level = int(nist_level_str) except ValueError: if strict: raise ValueError('nist_auth_level must be an integer between ' 'zero and four, inclusive') else: self.nist_auth_level = None else: if 0 <= nist_level < 5: self.nist_auth_level = nist_level auth_time = args.get('auth_time') if auth_time: if TIME_VALIDATOR.match(auth_time): self.auth_time = auth_time elif strict: raise ValueError("auth_time must be in RFC3339 format") fromSuccessResponse = classmethod(fromSuccessResponse) def getExtensionArgs(self): """@see: C{L{Extension.getExtensionArgs}} """ if len(self.auth_policies) == 0: ns_args = { 'auth_policies':'none', } else: ns_args = { 'auth_policies':' '.join(self.auth_policies), } if self.nist_auth_level is not None: if self.nist_auth_level not in range(0, 5): raise ValueError('nist_auth_level must be an integer between ' 'zero and four, inclusive') ns_args['nist_auth_level'] = str(self.nist_auth_level) if self.auth_time is not None: if not TIME_VALIDATOR.match(self.auth_time): raise ValueError('auth_time must be in RFC3339 format') ns_args['auth_time'] = self.auth_time return ns_args Response.ns_uri = ns_uri
agpl-3.0
danielharbor/openerp
addons/product/tests/test_uom.py
127
1543
from openerp.tests.common import TransactionCase class TestUom(TransactionCase): """Tests for unit of measure conversion""" def setUp(self): super(TestUom, self).setUp() self.product = self.registry('product.product') self.uom = self.registry('product.uom') self.imd = self.registry('ir.model.data') def test_10_conversion(self): cr, uid = self.cr, self.uid gram_id = self.imd.get_object_reference(cr, uid, 'product', 'product_uom_gram')[1] tonne_id = self.imd.get_object_reference(cr, uid, 'product', 'product_uom_ton')[1] qty = self.uom._compute_qty(cr, uid, gram_id, 1020000, tonne_id) self.assertEquals(qty, 1.02, "Converted quantity does not correspond.") price = self.uom._compute_price(cr, uid, gram_id, 2, tonne_id) self.assertEquals(price, 2000000.0, "Converted price does not correspond.") def test_20_rounding(self): cr, uid = self.cr, self.uid unit_id = self.imd.get_object_reference(cr, uid, 'product', 'product_uom_unit')[1] categ_unit_id = self.imd.get_object_reference(cr, uid, 'product', 'product_uom_categ_unit')[1] score_id = self.uom.create(cr, uid, { 'name': 'Score', 'factor_inv': 20, 'uom_type': 'bigger', 'rounding': 1.0, 'category_id': categ_unit_id }) qty = self.uom._compute_qty(cr, uid, unit_id, 2, score_id) self.assertEquals(qty, 1, "Converted quantity should be rounded up.")
agpl-3.0
adamhaney/airflow
tests/contrib/sensors/test_mongo_sensor.py
5
1984
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import unittest from airflow import DAG from airflow import configuration from airflow.contrib.hooks.mongo_hook import MongoHook from airflow.contrib.sensors.mongo_sensor import MongoSensor from airflow.models.connection import Connection from airflow.utils import db, timezone DEFAULT_DATE = timezone.datetime(2017, 1, 1) class TestMongoSensor(unittest.TestCase): def setUp(self): configuration.load_test_config() db.merge_conn( Connection( conn_id='mongo_test', conn_type='mongo', host='mongo', port='27017', schema='test')) args = { 'owner': 'airflow', 'start_date': DEFAULT_DATE } self.dag = DAG('test_dag_id', default_args=args) hook = MongoHook('mongo_test') hook.insert_one('foo', {'bar': 'baz'}) self.sensor = MongoSensor( task_id='test_task', mongo_conn_id='mongo_test', dag=self.dag, collection='foo', query={'bar': 'baz'} ) def test_poke(self): self.assertTrue(self.sensor.poke(None)) if __name__ == '__main__': unittest.main()
apache-2.0
Teagan42/home-assistant
homeassistant/components/lcn/light.py
7
5857
"""Support for LCN lights.""" import pypck from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_TRANSITION, SUPPORT_BRIGHTNESS, SUPPORT_TRANSITION, Light, ) from homeassistant.const import CONF_ADDRESS from . import LcnDevice from .const import ( CONF_CONNECTIONS, CONF_DIMMABLE, CONF_OUTPUT, CONF_TRANSITION, DATA_LCN, OUTPUT_PORTS, ) from .helpers import get_connection async def async_setup_platform( hass, hass_config, async_add_entities, discovery_info=None ): """Set up the LCN light platform.""" if discovery_info is None: return devices = [] for config in discovery_info: address, connection_id = config[CONF_ADDRESS] addr = pypck.lcn_addr.LcnAddr(*address) connections = hass.data[DATA_LCN][CONF_CONNECTIONS] connection = get_connection(connections, connection_id) address_connection = connection.get_address_conn(addr) if config[CONF_OUTPUT] in OUTPUT_PORTS: device = LcnOutputLight(config, address_connection) else: # in RELAY_PORTS device = LcnRelayLight(config, address_connection) devices.append(device) async_add_entities(devices) class LcnOutputLight(LcnDevice, Light): """Representation of a LCN light for output ports.""" def __init__(self, config, address_connection): """Initialize the LCN light.""" super().__init__(config, address_connection) self.output = pypck.lcn_defs.OutputPort[config[CONF_OUTPUT]] self._transition = pypck.lcn_defs.time_to_ramp_value(config[CONF_TRANSITION]) self.dimmable = config[CONF_DIMMABLE] self._brightness = 255 self._is_on = None self._is_dimming_to_zero = False async def async_added_to_hass(self): """Run when entity about to be added to hass.""" await super().async_added_to_hass() await self.address_connection.activate_status_request_handler(self.output) @property def supported_features(self): """Flag supported features.""" features = SUPPORT_TRANSITION if self.dimmable: features |= SUPPORT_BRIGHTNESS return features @property def brightness(self): """Return the brightness of this light between 0..255.""" return self._brightness @property def is_on(self): """Return True if entity is on.""" return self._is_on async def async_turn_on(self, **kwargs): """Turn the entity on.""" self._is_on = True self._is_dimming_to_zero = False if ATTR_BRIGHTNESS in kwargs: percent = int(kwargs[ATTR_BRIGHTNESS] / 255.0 * 100) else: percent = 100 if ATTR_TRANSITION in kwargs: transition = pypck.lcn_defs.time_to_ramp_value( kwargs[ATTR_TRANSITION] * 1000 ) else: transition = self._transition self.address_connection.dim_output(self.output.value, percent, transition) await self.async_update_ha_state() async def async_turn_off(self, **kwargs): """Turn the entity off.""" self._is_on = False if ATTR_TRANSITION in kwargs: transition = pypck.lcn_defs.time_to_ramp_value( kwargs[ATTR_TRANSITION] * 1000 ) else: transition = self._transition self._is_dimming_to_zero = bool(transition) self.address_connection.dim_output(self.output.value, 0, transition) await self.async_update_ha_state() def input_received(self, input_obj): """Set light state when LCN input object (command) is received.""" if ( not isinstance(input_obj, pypck.inputs.ModStatusOutput) or input_obj.get_output_id() != self.output.value ): return self._brightness = int(input_obj.get_percent() / 100.0 * 255) if self.brightness == 0: self._is_dimming_to_zero = False if not self._is_dimming_to_zero: self._is_on = self.brightness > 0 self.async_schedule_update_ha_state() class LcnRelayLight(LcnDevice, Light): """Representation of a LCN light for relay ports.""" def __init__(self, config, address_connection): """Initialize the LCN light.""" super().__init__(config, address_connection) self.output = pypck.lcn_defs.RelayPort[config[CONF_OUTPUT]] self._is_on = None async def async_added_to_hass(self): """Run when entity about to be added to hass.""" await super().async_added_to_hass() await self.address_connection.activate_status_request_handler(self.output) @property def is_on(self): """Return True if entity is on.""" return self._is_on async def async_turn_on(self, **kwargs): """Turn the entity on.""" self._is_on = True states = [pypck.lcn_defs.RelayStateModifier.NOCHANGE] * 8 states[self.output.value] = pypck.lcn_defs.RelayStateModifier.ON self.address_connection.control_relays(states) await self.async_update_ha_state() async def async_turn_off(self, **kwargs): """Turn the entity off.""" self._is_on = False states = [pypck.lcn_defs.RelayStateModifier.NOCHANGE] * 8 states[self.output.value] = pypck.lcn_defs.RelayStateModifier.OFF self.address_connection.control_relays(states) await self.async_update_ha_state() def input_received(self, input_obj): """Set light state when LCN input object (command) is received.""" if not isinstance(input_obj, pypck.inputs.ModStatusRelays): return self._is_on = input_obj.get_state(self.output.value) self.async_schedule_update_ha_state()
apache-2.0
dnozay/lettuce
tests/integration/lib/Django-1.2.5/django/core/files/uploadhandler.py
236
7028
""" Base file upload handler classes, and the built-in concrete subclasses """ try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.files.uploadedfile import TemporaryUploadedFile, InMemoryUploadedFile from django.utils import importlib __all__ = ['UploadFileException','StopUpload', 'SkipFile', 'FileUploadHandler', 'TemporaryFileUploadHandler', 'MemoryFileUploadHandler', 'load_handler', 'StopFutureHandlers'] class UploadFileException(Exception): """ Any error having to do with uploading files. """ pass class StopUpload(UploadFileException): """ This exception is raised when an upload must abort. """ def __init__(self, connection_reset=False): """ If ``connection_reset`` is ``True``, Django knows will halt the upload without consuming the rest of the upload. This will cause the browser to show a "connection reset" error. """ self.connection_reset = connection_reset def __unicode__(self): if self.connection_reset: return u'StopUpload: Halt current upload.' else: return u'StopUpload: Consume request data, then halt.' class SkipFile(UploadFileException): """ This exception is raised by an upload handler that wants to skip a given file. """ pass class StopFutureHandlers(UploadFileException): """ Upload handers that have handled a file and do not want future handlers to run should raise this exception instead of returning None. """ pass class FileUploadHandler(object): """ Base class for streaming upload handlers. """ chunk_size = 64 * 2 ** 10 #: The default chunk size is 64 KB. def __init__(self, request=None): self.file_name = None self.content_type = None self.content_length = None self.charset = None self.request = request def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None): """ Handle the raw input from the client. Parameters: :input_data: An object that supports reading via .read(). :META: ``request.META``. :content_length: The (integer) value of the Content-Length header from the client. :boundary: The boundary from the Content-Type header. Be sure to prepend two '--'. """ pass def new_file(self, field_name, file_name, content_type, content_length, charset=None): """ Signal that a new file has been started. Warning: As with any data from the client, you should not trust content_length (and sometimes won't even get it). """ self.field_name = field_name self.file_name = file_name self.content_type = content_type self.content_length = content_length self.charset = charset def receive_data_chunk(self, raw_data, start): """ Receive data from the streamed upload parser. ``start`` is the position in the file of the chunk. """ raise NotImplementedError() def file_complete(self, file_size): """ Signal that a file has completed. File size corresponds to the actual size accumulated by all the chunks. Subclasses should return a valid ``UploadedFile`` object. """ raise NotImplementedError() def upload_complete(self): """ Signal that the upload is complete. Subclasses should perform cleanup that is necessary for this handler. """ pass class TemporaryFileUploadHandler(FileUploadHandler): """ Upload handler that streams data into a temporary file. """ def __init__(self, *args, **kwargs): super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs) def new_file(self, file_name, *args, **kwargs): """ Create the file object to append to as data is coming in. """ super(TemporaryFileUploadHandler, self).new_file(file_name, *args, **kwargs) self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset) def receive_data_chunk(self, raw_data, start): self.file.write(raw_data) def file_complete(self, file_size): self.file.seek(0) self.file.size = file_size return self.file class MemoryFileUploadHandler(FileUploadHandler): """ File upload handler to stream uploads into memory (used for small files). """ def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None): """ Use the content_length to signal whether or not this handler should be in use. """ # Check the content-length header to see if we should # If the post is too large, we cannot use the Memory handler. if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE: self.activated = False else: self.activated = True def new_file(self, *args, **kwargs): super(MemoryFileUploadHandler, self).new_file(*args, **kwargs) if self.activated: self.file = StringIO() raise StopFutureHandlers() def receive_data_chunk(self, raw_data, start): """ Add the data to the StringIO file. """ if self.activated: self.file.write(raw_data) else: return raw_data def file_complete(self, file_size): """ Return a file object if we're activated. """ if not self.activated: return self.file.seek(0) return InMemoryUploadedFile( file = self.file, field_name = self.field_name, name = self.file_name, content_type = self.content_type, size = file_size, charset = self.charset ) def load_handler(path, *args, **kwargs): """ Given a path to a handler, return an instance of that handler. E.g.:: >>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request) <TemporaryFileUploadHandler object at 0x...> """ i = path.rfind('.') module, attr = path[:i], path[i+1:] try: mod = importlib.import_module(module) except ImportError, e: raise ImproperlyConfigured('Error importing upload handler module %s: "%s"' % (module, e)) except ValueError, e: raise ImproperlyConfigured('Error importing upload handler module. Is FILE_UPLOAD_HANDLERS a correctly defined list or tuple?') try: cls = getattr(mod, attr) except AttributeError: raise ImproperlyConfigured('Module "%s" does not define a "%s" upload handler backend' % (module, attr)) return cls(*args, **kwargs)
gpl-3.0
AMOboxTV/AMOBox.LegoBuild
script.module.youtube.dl/lib/youtube_dl/extractor/hearthisat.py
21
4301
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..compat import compat_urlparse from ..utils import ( HEADRequest, sanitized_Request, str_to_int, urlencode_postdata, urlhandle_detect_ext, ) class HearThisAtIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?hearthis\.at/(?P<artist>[^/]+)/(?P<title>[A-Za-z0-9\-]+)/?$' _PLAYLIST_URL = 'https://hearthis.at/playlist.php' _TEST = { 'url': 'https://hearthis.at/moofi/dr-kreep', 'md5': 'ab6ec33c8fed6556029337c7885eb4e0', 'info_dict': { 'id': '150939', 'ext': 'wav', 'title': 'Moofi - Dr. Kreep', 'thumbnail': 're:^https?://.*\.jpg$', 'timestamp': 1421564134, 'description': 'Creepy Patch. Mutable Instruments Braids Vowel + Formant Mode.', 'upload_date': '20150118', 'comment_count': int, 'view_count': int, 'like_count': int, 'duration': 71, 'categories': ['Experimental'], } } def _real_extract(self, url): m = re.match(self._VALID_URL, url) display_id = '{artist:s} - {title:s}'.format(**m.groupdict()) webpage = self._download_webpage(url, display_id) track_id = self._search_regex( r'intTrackId\s*=\s*(\d+)', webpage, 'track ID') payload = urlencode_postdata({'tracks[]': track_id}) req = sanitized_Request(self._PLAYLIST_URL, payload) req.add_header('Content-type', 'application/x-www-form-urlencoded') track = self._download_json(req, track_id, 'Downloading playlist')[0] title = '{artist:s} - {title:s}'.format(**track) categories = None if track.get('category'): categories = [track['category']] description = self._og_search_description(webpage) thumbnail = self._og_search_thumbnail(webpage) meta_span = r'<span[^>]+class="%s".*?</i>([^<]+)</span>' view_count = str_to_int(self._search_regex( meta_span % 'plays_count', webpage, 'view count', fatal=False)) like_count = str_to_int(self._search_regex( meta_span % 'likes_count', webpage, 'like count', fatal=False)) comment_count = str_to_int(self._search_regex( meta_span % 'comment_count', webpage, 'comment count', fatal=False)) duration = str_to_int(self._search_regex( r'data-length="(\d+)', webpage, 'duration', fatal=False)) timestamp = str_to_int(self._search_regex( r'<span[^>]+class="calctime"[^>]+data-time="(\d+)', webpage, 'timestamp', fatal=False)) formats = [] mp3_url = self._search_regex( r'(?s)<a class="player-link"\s+(?:[a-zA-Z0-9_:-]+="[^"]+"\s+)*?data-mp3="([^"]+)"', webpage, 'mp3 URL', fatal=False) if mp3_url: formats.append({ 'format_id': 'mp3', 'vcodec': 'none', 'acodec': 'mp3', 'url': mp3_url, }) download_path = self._search_regex( r'<a class="[^"]*download_fct[^"]*"\s+href="([^"]+)"', webpage, 'download URL', default=None) if download_path: download_url = compat_urlparse.urljoin(url, download_path) ext_req = HEADRequest(download_url) ext_handle = self._request_webpage( ext_req, display_id, note='Determining extension') ext = urlhandle_detect_ext(ext_handle) formats.append({ 'format_id': 'download', 'vcodec': 'none', 'ext': ext, 'url': download_url, 'preference': 2, # Usually better quality }) self._sort_formats(formats) return { 'id': track_id, 'display_id': display_id, 'title': title, 'formats': formats, 'thumbnail': thumbnail, 'description': description, 'duration': duration, 'timestamp': timestamp, 'view_count': view_count, 'comment_count': comment_count, 'like_count': like_count, 'categories': categories, }
gpl-2.0
eltomello/qutebrowser
qutebrowser/completion/completer.py
5
19243
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2014-2015 Florian Bruhin (The Compiler) <mail@qutebrowser.org> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Completer attached to a CompletionView.""" from PyQt5.QtCore import pyqtSignal, pyqtSlot, QObject, QTimer from qutebrowser.config import config from qutebrowser.commands import cmdexc, cmdutils, runners from qutebrowser.utils import usertypes, log, objreg, utils from qutebrowser.completion.models import instances, sortfilter class Completer(QObject): """Completer which manages completions in a CompletionView. Attributes: _cmd: The statusbar Command object this completer belongs to. _ignore_change: Whether to ignore the next completion update. _win_id: The window ID this completer is in. _timer: The timer used to trigger the completion update. _cursor_part: The cursor part index for the next completion update. _last_cursor_pos: The old cursor position so we avoid double completion updates. _last_text: The old command text so we avoid double completion updates. _signals_connected: Whether the signals are connected to update the completion when the command widget requests that. Signals: next_prev_item: Emitted to select the next/previous item in the completion. arg0: True for the previous item, False for the next. """ next_prev_item = pyqtSignal(bool) def __init__(self, cmd, win_id, parent=None): super().__init__(parent) self._win_id = win_id self._cmd = cmd self._signals_connected = False self._ignore_change = False self._empty_item_idx = None self._timer = QTimer() self._timer.setSingleShot(True) self._timer.setInterval(0) self._timer.timeout.connect(self.update_completion) self._cursor_part = None self._last_cursor_pos = None self._last_text = None objreg.get('config').changed.connect(self.on_auto_open_changed) self.handle_signal_connections() self._cmd.clear_completion_selection.connect( self.handle_signal_connections) def __repr__(self): return utils.get_repr(self) @config.change_filter('completion', 'auto-open') def on_auto_open_changed(self): self.handle_signal_connections() @pyqtSlot() def handle_signal_connections(self): self._connect_signals(config.get('completion', 'auto-open')) def _connect_signals(self, connect=True): """Connect or disconnect the completion signals. Args: connect: Whether to connect (True) or disconnect (False) the signals. Return: True if the signals were connected (connect=True and aren't connected yet) - otherwise False. """ connections = [ (self._cmd.update_completion, self.schedule_completion_update), (self._cmd.textChanged, self.on_text_edited), ] if connect and not self._signals_connected: for sender, receiver in connections: sender.connect(receiver) self._signals_connected = True return True elif not connect: for sender, receiver in connections: try: sender.disconnect(receiver) except TypeError: # Don't fail if not connected pass self._signals_connected = False return False def _open_completion_if_needed(self): """If auto-open is false, temporarily connect signals. Also opens the completion. """ if not config.get('completion', 'auto-open'): connected = self._connect_signals(True) if connected: self.update_completion() def _model(self): """Convienience method to get the current completion model.""" completion = objreg.get('completion', scope='window', window=self._win_id) return completion.model() def _get_completion_model(self, completion, parts, cursor_part): """Get a completion model based on an enum member. Args: completion: A usertypes.Completion member. parts: The parts currently in the commandline. cursor_part: The part the cursor is in. Return: A completion model or None. """ if completion == usertypes.Completion.option: section = parts[cursor_part - 1] model = instances.get(completion).get(section) elif completion == usertypes.Completion.value: section = parts[cursor_part - 2] option = parts[cursor_part - 1] try: model = instances.get(completion)[section][option] except KeyError: # No completion model for this section/option. model = None else: model = instances.get(completion) if model is None: return None else: return sortfilter.CompletionFilterModel(source=model, parent=self) def _filter_cmdline_parts(self, parts, cursor_part): """Filter a list of commandline parts to exclude flags. Args: parts: A list of parts. cursor_part: The index of the part the cursor is over. Return: A (parts, cursor_part) tuple with the modified values. """ if parts == ['']: # Empty commandline, i.e. only :. return [''], 0 filtered_parts = [] for i, part in enumerate(parts): if part == '--': break elif part.startswith('-'): if cursor_part >= i: cursor_part -= 1 else: filtered_parts.append(part) return filtered_parts, cursor_part def _get_new_completion(self, parts, cursor_part): """Get a new completion. Args: parts: The command chunks to get a completion for. cursor_part: The part the cursor is over currently. Return: A completion model. """ try: if parts[cursor_part].startswith('-'): # cursor on a flag return except IndexError: pass log.completion.debug("Before filtering flags: parts {}, cursor_part " "{}".format(parts, cursor_part)) parts, cursor_part = self._filter_cmdline_parts(parts, cursor_part) log.completion.debug("After filtering flags: parts {}, cursor_part " "{}".format(parts, cursor_part)) if cursor_part == 0: # '|' or 'set|' model = instances.get(usertypes.Completion.command) return sortfilter.CompletionFilterModel(source=model, parent=self) # delegate completion to command try: completions = cmdutils.cmd_dict[parts[0]].completion except KeyError: # entering an unknown command return None if completions is None: # command without any available completions return None dbg_completions = [c.name for c in completions] try: idx = cursor_part - 1 completion = completions[idx] except IndexError: # More arguments than completions log.completion.debug("completions: {}".format( ', '.join(dbg_completions))) return None dbg_completions[idx] = '*' + dbg_completions[idx] + '*' log.completion.debug("completions: {}".format( ', '.join(dbg_completions))) model = self._get_completion_model(completion, parts, cursor_part) return model def _quote(self, s): """Quote s if it needs quoting for the commandline. Note we don't use shlex.quote because that quotes a lot of shell metachars we don't need to have quoted. """ if not s: return "''" elif any(c in s for c in ' \'\t\n\\'): # use single quotes, and put single quotes into double quotes # the string $'b is then quoted as '$'"'"'b' return "'" + s.replace("'", "'\"'\"'") + "'" else: return s def selection_changed(self, selected, _deselected): """Change the completed part if a new item was selected. Called from the views selectionChanged method. Args: selected: New selection. _delected: Previous selection. """ indexes = selected.indexes() if not indexes: return model = self._model() data = model.data(indexes[0]) if data is None: return parts = self.split() try: needs_quoting = cmdutils.cmd_dict[parts[0]].maxsplit is None except KeyError: needs_quoting = True if needs_quoting: data = self._quote(data) if model.count() == 1 and config.get('completion', 'quick-complete'): # If we only have one item, we want to apply it immediately # and go on to the next part. self.change_completed_part(data, immediate=True) else: log.completion.debug("Will ignore next completion update.") self._ignore_change = True self.change_completed_part(data) @pyqtSlot() def schedule_completion_update(self): """Schedule updating/enabling completion. For performance reasons we don't want to block here, instead we do this in the background. """ if (self._cmd.cursorPosition() == self._last_cursor_pos and self._cmd.text() == self._last_text): log.completion.debug("Ignoring update because there were no " "changes.") else: log.completion.debug("Scheduling completion update.") self._timer.start() self._last_cursor_pos = self._cmd.cursorPosition() self._last_text = self._cmd.text() @pyqtSlot() def update_completion(self): """Check if completions are available and activate them.""" self.update_cursor_part() parts = self.split() log.completion.debug( "Updating completion - prefix {}, parts {}, cursor_part {}".format( self._cmd.prefix(), parts, self._cursor_part)) if self._ignore_change: log.completion.debug("Ignoring completion update because " "ignore_change is True.") self._ignore_change = False return completion = objreg.get('completion', scope='window', window=self._win_id) if self._cmd.prefix() != ':': # This is a search or gibberish, so we don't need to complete # anything (yet) # FIXME complete searches # https://github.com/The-Compiler/qutebrowser/issues/32 completion.hide() return model = self._get_new_completion(parts, self._cursor_part) if model != self._model(): if model is None: completion.hide() else: completion.set_model(model) if model is None: log.completion.debug("No completion model for {}.".format(parts)) return try: pattern = parts[self._cursor_part].strip() except IndexError: pattern = '' completion.set_pattern(pattern) log.completion.debug( "New completion for {}: {}, with pattern '{}'".format( parts, model.srcmodel.__class__.__name__, pattern)) if self._model().count() == 0: completion.hide() return if completion.enabled: completion.show() def split(self, keep=False, aliases=False): """Get the text split up in parts. Args: keep: Whether to keep special chars and whitespace. aliases: Whether to resolve aliases. """ text = self._cmd.text()[len(self._cmd.prefix()):] if not text: # When only ":" is entered, we already have one imaginary part, # which just is empty at the moment. return [''] if not text.strip(): # Text is only whitespace so we treat this as a single element with # the whitespace. return [text] runner = runners.CommandRunner(self._win_id) result = runner.parse(text, fallback=True, aliases=aliases, keep=keep) parts = result.cmdline if self._empty_item_idx is not None: log.completion.debug("Empty element queued at {}, " "inserting.".format(self._empty_item_idx)) parts.insert(self._empty_item_idx, '') #log.completion.debug("Splitting '{}' -> {}".format(text, parts)) return parts @pyqtSlot() def update_cursor_part(self): """Get the part index of the commandline where the cursor is over.""" cursor_pos = self._cmd.cursorPosition() snippet = slice(cursor_pos - 1, cursor_pos + 1) if self._cmd.text()[snippet] == ' ': spaces = True else: spaces = False cursor_pos -= len(self._cmd.prefix()) parts = self.split(keep=True) log.completion.vdebug( "text: {}, parts: {}, cursor_pos after removing prefix '{}': " "{}".format(self._cmd.text(), parts, self._cmd.prefix(), cursor_pos)) skip = 0 for i, part in enumerate(parts): log.completion.vdebug("Checking part {}: {!r}".format(i, parts[i])) if not part: skip += 1 continue if cursor_pos <= len(part): # foo| bar self._cursor_part = i - skip if spaces: self._empty_item_idx = i - skip else: self._empty_item_idx = None log.completion.vdebug("cursor_pos {} <= len(part) {}, " "setting cursor_part {} - {} (skip), " "empty_item_idx {}".format( cursor_pos, len(part), i, skip, self._empty_item_idx)) break cursor_pos -= len(part) log.completion.vdebug( "Removing len({!r}) -> {} from cursor_pos -> {}".format( part, len(part), cursor_pos)) else: if i == 0: # Initial `:` press without any text. self._cursor_part = 0 else: self._cursor_part = i - skip if spaces: self._empty_item_idx = i - skip else: self._empty_item_idx = None log.completion.debug("cursor_part {}, spaces {}".format( self._cursor_part, spaces)) return def change_completed_part(self, newtext, immediate=False): """Change the part we're currently completing in the commandline. Args: text: The text to set (string). immediate: True if the text should be completed immediately including a trailing space and we shouldn't continue completing the current item. """ parts = self.split() log.completion.debug("changing part {} to '{}'".format( self._cursor_part, newtext)) try: parts[self._cursor_part] = newtext except IndexError: parts.append(newtext) # We want to place the cursor directly after the part we just changed. cursor_str = self._cmd.prefix() + ' '.join( parts[:self._cursor_part + 1]) if immediate: # If we should complete immediately, we want to move the cursor by # one more char, to get to the next field. cursor_str += ' ' text = self._cmd.prefix() + ' '.join(parts) if immediate and self._cursor_part == len(parts) - 1: # If we should complete immediately and we're completing the last # part in the commandline, we automatically add a space. text += ' ' self._cmd.setText(text) log.completion.debug("Placing cursor after '{}'".format(cursor_str)) log.modes.debug("Completion triggered, focusing {!r}".format(self)) self._cmd.setCursorPosition(len(cursor_str)) self._cmd.setFocus() self._cmd.show_cmd.emit() @pyqtSlot() def on_text_edited(self): """Reset _empty_item_idx if text was edited.""" self._empty_item_idx = None # We also want to update the cursor part and emit update_completion # here, but that's already done for us by cursorPositionChanged # anyways, so we don't need to do it twice. @cmdutils.register(instance='completer', hide=True, modes=[usertypes.KeyMode.command], scope='window') def completion_item_prev(self): """Select the previous completion item.""" self._open_completion_if_needed() self.next_prev_item.emit(True) @cmdutils.register(instance='completer', hide=True, modes=[usertypes.KeyMode.command], scope='window') def completion_item_next(self): """Select the next completion item.""" self._open_completion_if_needed() self.next_prev_item.emit(False) @cmdutils.register(instance='completion', hide=True, modes=[usertypes.KeyMode.command], scope='window') def completion_item_del(self): """Delete the current completion item.""" completion = objreg.get('completion', scope='window', window=self._win_id) if not completion.currentIndex().isValid(): raise cmdexc.CommandError("No item selected!") try: self.model().srcmodel.delete_cur_item(completion) except NotImplementedError: raise cmdexc.CommandError("Cannot delete this item.")
gpl-3.0
zhimin711/nova
plugins/xenserver/xenapi/etc/xapi.d/plugins/utils.py
9
16774
# Copyright (c) 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # NOTE: XenServer still only supports Python 2.4 in it's dom0 userspace # which means the Nova xenapi plugins must use only Python 2.4 features """Various utilities used by XenServer plugins.""" try: import cPickle as pickle except ImportError: import pickle import errno import logging import os import shutil import signal import subprocess import tempfile import XenAPIPlugin LOG = logging.getLogger(__name__) CHUNK_SIZE = 8192 class CommandNotFound(Exception): pass def delete_if_exists(path): try: os.unlink(path) except OSError, e: # noqa if e.errno == errno.ENOENT: LOG.warning("'%s' was already deleted, skipping delete" % path) else: raise def _link(src, dst): LOG.info("Hard-linking file '%s' -> '%s'" % (src, dst)) os.link(src, dst) def _rename(src, dst): LOG.info("Renaming file '%s' -> '%s'" % (src, dst)) try: os.rename(src, dst) except OSError, e: # noqa if e.errno == errno.EXDEV: LOG.error("Invalid cross-device link. Perhaps %s and %s should " "be symlinked on the same filesystem?" % (src, dst)) raise def make_subprocess(cmdline, stdout=False, stderr=False, stdin=False, universal_newlines=False, close_fds=True, env=None): """Make a subprocess according to the given command-line string """ LOG.info("Running cmd '%s'" % " ".join(cmdline)) kwargs = {} kwargs['stdout'] = stdout and subprocess.PIPE or None kwargs['stderr'] = stderr and subprocess.PIPE or None kwargs['stdin'] = stdin and subprocess.PIPE or None kwargs['universal_newlines'] = universal_newlines kwargs['close_fds'] = close_fds kwargs['env'] = env try: proc = subprocess.Popen(cmdline, **kwargs) except OSError, e: # noqa if e.errno == errno.ENOENT: raise CommandNotFound else: raise return proc class SubprocessException(Exception): def __init__(self, cmdline, ret, out, err): Exception.__init__(self, "'%s' returned non-zero exit code: " "retcode=%i, out='%s', stderr='%s'" % (cmdline, ret, out, err)) self.cmdline = cmdline self.ret = ret self.out = out self.err = err def finish_subprocess(proc, cmdline, cmd_input=None, ok_exit_codes=None): """Ensure that the process returned a zero exit code indicating success """ if ok_exit_codes is None: ok_exit_codes = [0] out, err = proc.communicate(cmd_input) ret = proc.returncode if ret not in ok_exit_codes: LOG.error("Command '%(cmdline)s' with process id '%(pid)s' expected " "return code in '%(ok)s' but got '%(rc)s': %(err)s" % {'cmdline': cmdline, 'pid': proc.pid, 'ok': ok_exit_codes, 'rc': ret, 'err': err}) raise SubprocessException(' '.join(cmdline), ret, out, err) return out def run_command(cmd, cmd_input=None, ok_exit_codes=None): """Abstracts out the basics of issuing system commands. If the command returns anything in stderr, an exception is raised with that information. Otherwise, the output from stdout is returned. cmd_input is passed to the process on standard input. """ proc = make_subprocess(cmd, stdout=True, stderr=True, stdin=True, close_fds=True) return finish_subprocess(proc, cmd, cmd_input=cmd_input, ok_exit_codes=ok_exit_codes) def try_kill_process(proc): """Sends the given process the SIGKILL signal.""" pid = proc.pid LOG.info("Killing process %s" % pid) try: os.kill(pid, signal.SIGKILL) except Exception: LOG.exception("Failed to kill %s" % pid) def make_staging_area(sr_path): """The staging area is a place where we can temporarily store and manipulate VHDs. The use of the staging area is different for upload and download: Download ======== When we download the tarball, the VHDs contained within will have names like "snap.vhd" and "image.vhd". We need to assign UUIDs to them before moving them into the SR. However, since 'image.vhd' may be a base_copy, we need to link it to 'snap.vhd' (using vhd-util modify) before moving both into the SR (otherwise the SR.scan will cause 'image.vhd' to be deleted). The staging area gives us a place to perform these operations before they are moved to the SR, scanned, and then registered with XenServer. Upload ====== On upload, we want to rename the VHDs to reflect what they are, 'snap.vhd' in the case of the snapshot VHD, and 'image.vhd' in the case of the base_copy. The staging area provides a directory in which we can create hard-links to rename the VHDs without affecting what's in the SR. NOTE ==== The staging area is created as a subdirectory within the SR in order to guarantee that it resides within the same filesystem and therefore permit hard-linking and cheap file moves. """ staging_path = tempfile.mkdtemp(dir=sr_path) return staging_path def cleanup_staging_area(staging_path): """Remove staging area directory On upload, the staging area contains hard-links to the VHDs in the SR; it's safe to remove the staging-area because the SR will keep the link count > 0 (so the VHDs in the SR will not be deleted). """ if os.path.exists(staging_path): shutil.rmtree(staging_path) def _handle_old_style_images(staging_path): """Rename files to conform to new image format, if needed. Old-Style: snap.vhd -> image.vhd -> base.vhd New-Style: 0.vhd -> 1.vhd -> ... (n-1).vhd The New-Style format has the benefit of being able to support a VDI chain of arbitrary length. """ file_num = 0 for filename in ('snap.vhd', 'image.vhd', 'base.vhd'): path = os.path.join(staging_path, filename) if os.path.exists(path): _rename(path, os.path.join(staging_path, "%d.vhd" % file_num)) file_num += 1 # Rename any format of name to 0.vhd when there is only single one contents = os.listdir(staging_path) if len(contents) == 1: filename = contents[0] if filename != '0.vhd' and filename.endswith('.vhd'): _rename( os.path.join(staging_path, filename), os.path.join(staging_path, '0.vhd')) def _assert_vhd_not_hidden(path): """Sanity check to ensure that only appropriate VHDs are marked as hidden. If this flag is incorrectly set, then when we move the VHD into the SR, it will be deleted out from under us. """ query_cmd = ["vhd-util", "query", "-n", path, "-f"] out = run_command(query_cmd) for line in out.splitlines(): if line.lower().startswith('hidden'): value = line.split(':')[1].strip() if value == "1": raise Exception( "VHD %s is marked as hidden without child" % path) def _vhd_util_check(vdi_path): check_cmd = ["vhd-util", "check", "-n", vdi_path, "-p"] out = run_command(check_cmd, ok_exit_codes=[0, 22]) first_line = out.splitlines()[0].strip() return out, first_line def _validate_vhd(vdi_path): """This checks for several errors in the VHD structure. Most notably, it checks that the timestamp in the footer is correct, but may pick up other errors also. This check ensures that the timestamps listed in the VHD footer aren't in the future. This can occur during a migration if the clocks on the two Dom0's are out-of-sync. This would corrupt the SR if it were imported, so generate an exception to bail. """ out, first_line = _vhd_util_check(vdi_path) if 'invalid' in first_line: LOG.warning("VHD invalid, attempting repair.") repair_cmd = ["vhd-util", "repair", "-n", vdi_path] run_command(repair_cmd) out, first_line = _vhd_util_check(vdi_path) if 'invalid' in first_line: if 'footer' in first_line: part = 'footer' elif 'header' in first_line: part = 'header' else: part = 'setting' details = first_line.split(':', 1) if len(details) == 2: details = details[1] else: details = first_line extra = '' if 'timestamp' in first_line: extra = (" ensure source and destination host machines have " "time set correctly") LOG.info("VDI Error details: %s" % out) raise Exception( "VDI '%(vdi_path)s' has an invalid %(part)s: '%(details)s'" "%(extra)s" % {'vdi_path': vdi_path, 'part': part, 'details': details, 'extra': extra}) LOG.info("VDI is valid: %s" % vdi_path) def _validate_vdi_chain(vdi_path): """This check ensures that the parent pointers on the VHDs are valid before we move the VDI chain to the SR. This is *very* important because a bad parent pointer will corrupt the SR causing a cascade of failures. """ def get_parent_path(path): query_cmd = ["vhd-util", "query", "-n", path, "-p"] out = run_command(query_cmd, ok_exit_codes=[0, 22]) first_line = out.splitlines()[0].strip() if first_line.endswith(".vhd"): return first_line elif 'has no parent' in first_line: return None elif 'query failed' in first_line: raise Exception("VDI '%s' not present which breaks" " the VDI chain, bailing out" % path) else: raise Exception("Unexpected output '%s' from vhd-util" % out) cur_path = vdi_path while cur_path: _validate_vhd(cur_path) cur_path = get_parent_path(cur_path) def _validate_sequenced_vhds(staging_path): """This check ensures that the VHDs in the staging area are sequenced properly from 0 to n-1 with no gaps. """ seq_num = 0 filenames = os.listdir(staging_path) for filename in filenames: if not filename.endswith('.vhd'): continue # Ignore legacy swap embedded in the image, generated on-the-fly now if filename == "swap.vhd": continue vhd_path = os.path.join(staging_path, "%d.vhd" % seq_num) if not os.path.exists(vhd_path): raise Exception("Corrupt image. Expected seq number: %d. Files: %s" % (seq_num, filenames)) seq_num += 1 def import_vhds(sr_path, staging_path, uuid_stack): """Move VHDs from staging area into the SR. The staging area is necessary because we need to perform some fixups (assigning UUIDs, relinking the VHD chain) before moving into the SR, otherwise the SR manager process could potentially delete the VHDs out from under us. Returns: A dict of imported VHDs: {'root': {'uuid': 'ffff-aaaa'}} """ _handle_old_style_images(staging_path) _validate_sequenced_vhds(staging_path) files_to_move = [] # Collect sequenced VHDs and assign UUIDs to them seq_num = 0 while True: orig_vhd_path = os.path.join(staging_path, "%d.vhd" % seq_num) if not os.path.exists(orig_vhd_path): break # Rename (0, 1 .. N).vhd -> aaaa-bbbb-cccc-dddd.vhd vhd_uuid = uuid_stack.pop() vhd_path = os.path.join(staging_path, "%s.vhd" % vhd_uuid) _rename(orig_vhd_path, vhd_path) if seq_num == 0: leaf_vhd_path = vhd_path leaf_vhd_uuid = vhd_uuid files_to_move.append(vhd_path) seq_num += 1 # Re-link VHDs, in reverse order, from base-copy -> leaf parent_path = None for vhd_path in reversed(files_to_move): if parent_path: # Link to parent modify_cmd = ["vhd-util", "modify", "-n", vhd_path, "-p", parent_path] run_command(modify_cmd) parent_path = vhd_path # Sanity check the leaf VHD _assert_vhd_not_hidden(leaf_vhd_path) _validate_vdi_chain(leaf_vhd_path) # Move files into SR for orig_path in files_to_move: new_path = os.path.join(sr_path, os.path.basename(orig_path)) _rename(orig_path, new_path) imported_vhds = dict(root=dict(uuid=leaf_vhd_uuid)) return imported_vhds def prepare_staging_area(sr_path, staging_path, vdi_uuids, seq_num=0): """Hard-link VHDs into staging area.""" for vdi_uuid in vdi_uuids: source = os.path.join(sr_path, "%s.vhd" % vdi_uuid) link_name = os.path.join(staging_path, "%d.vhd" % seq_num) _link(source, link_name) seq_num += 1 def create_tarball(fileobj, path, callback=None, compression_level=None): """Create a tarball from a given path. :param fileobj: a file-like object holding the tarball byte-stream. If None, then only the callback will be used. :param path: path to create tarball from :param callback: optional callback to call on each chunk written :param compression_level: compression level, e.g., 9 for gzip -9. """ tar_cmd = ["tar", "-zc", "--directory=%s" % path, "."] env = os.environ.copy() if compression_level and 1 <= compression_level <= 9: env["GZIP"] = "-%d" % compression_level tar_proc = make_subprocess(tar_cmd, stdout=True, stderr=True, env=env) try: while True: chunk = tar_proc.stdout.read(CHUNK_SIZE) if chunk == '': break if callback: callback(chunk) if fileobj: fileobj.write(chunk) except Exception: try_kill_process(tar_proc) raise finish_subprocess(tar_proc, tar_cmd) def extract_tarball(fileobj, path, callback=None): """Extract a tarball to a given path. :param fileobj: a file-like object holding the tarball byte-stream :param path: path to extract tarball into :param callback: optional callback to call on each chunk read """ tar_cmd = ["tar", "-zx", "--directory=%s" % path] tar_proc = make_subprocess(tar_cmd, stderr=True, stdin=True) try: while True: chunk = fileobj.read(CHUNK_SIZE) if chunk == '': break if callback: callback(chunk) tar_proc.stdin.write(chunk) # NOTE(tpownall): If we do not poll for the tar process exit # code when tar has exited pre maturely there is the chance # that tar will become a defunct zombie child under glance plugin # and re parented under init forever waiting on the stdin pipe to # close. Polling for the exit code allows us to break the pipe. returncode = tar_proc.poll() tar_pid = tar_proc.pid if returncode is not None: LOG.error("tar extract with process id '%(pid)s' " "exited early with '%(rc)s'" % {'pid': tar_pid, 'rc': returncode}) raise SubprocessException( ' '.join(tar_cmd), returncode, "", "") except SubprocessException: # no need to kill already dead process raise except Exception: LOG.exception("Failed while sending data to tar pid: %s" % tar_pid) try_kill_process(tar_proc) raise finish_subprocess(tar_proc, tar_cmd) def _handle_serialization(func): def wrapped(session, params): params = pickle.loads(params['params']) rv = func(session, *params['args'], **params['kwargs']) return pickle.dumps(rv) return wrapped def register_plugin_calls(*funcs): """Wrapper around XenAPIPlugin.dispatch which handles pickle serialization. """ wrapped_dict = {} for func in funcs: wrapped_dict[func.__name__] = _handle_serialization(func) XenAPIPlugin.dispatch(wrapped_dict)
apache-2.0
houseurmusic/my-swift
swift/common/db.py
1
66430
# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Database code for Swift """ from __future__ import with_statement from contextlib import contextmanager import hashlib import logging import operator import os from uuid import uuid4 import sys import time import cPickle as pickle import errno from random import randint from tempfile import mkstemp from eventlet import sleep import simplejson as json import sqlite3 from swift.common.utils import normalize_timestamp, renamer, \ mkdirs, lock_parent_directory, fallocate from swift.common.exceptions import LockTimeout #: Timeout for trying to connect to a DB BROKER_TIMEOUT = 25 #: Pickle protocol to use PICKLE_PROTOCOL = 2 #: Max number of pending entries PENDING_CAP = 131072 class DatabaseConnectionError(sqlite3.DatabaseError): """More friendly error messages for DB Errors.""" def __init__(self, path, msg, timeout=0): self.path = path self.timeout = timeout self.msg = msg def __str__(self): return 'DB connection error (%s, %s):\n%s' % ( self.path, self.timeout, self.msg) class GreenDBConnection(sqlite3.Connection): """SQLite DB Connection handler that plays well with eventlet.""" def __init__(self, *args, **kwargs): self.timeout = kwargs.get('timeout', BROKER_TIMEOUT) kwargs['timeout'] = 0 self.db_file = args and args[0] or '-' sqlite3.Connection.__init__(self, *args, **kwargs) def _timeout(self, call): with LockTimeout(self.timeout, self.db_file): while True: try: return call() except sqlite3.OperationalError, e: if 'locked' not in str(e): raise sleep(0.05) def execute(self, *args, **kwargs): return self._timeout(lambda: sqlite3.Connection.execute( self, *args, **kwargs)) def commit(self): return self._timeout(lambda: sqlite3.Connection.commit(self)) def dict_factory(crs, row): """ This should only be used when you need a real dict, i.e. when you're going to serialize the results. """ return dict( ((col[0], row[idx]) for idx, col in enumerate(crs.description))) def chexor(old, name, timestamp): """ Each entry in the account and container databases is XORed by the 128-bit hash on insert or delete. This serves as a rolling, order-independent hash of the contents. (check + XOR) :param old: hex representation of the current DB hash :param name: name of the object or container being inserted :param timestamp: timestamp of the new record :returns: a hex representation of the new hash value """ if name is None: raise Exception('name is None!') old = old.decode('hex') new = hashlib.md5(('%s-%s' % (name, timestamp)).encode('utf_8')).digest() response = ''.join( map(chr, map(operator.xor, map(ord, old), map(ord, new)))) return response.encode('hex') def get_db_connection(path, timeout=30, okay_to_create=False): """ Returns a properly configured SQLite database connection. :param path: path to DB :param timeout: timeout for connection :param okay_to_create: if True, create the DB if it doesn't exist :returns: DB connection object """ try: connect_time = time.time() conn = sqlite3.connect(path, check_same_thread=False, factory=GreenDBConnection, timeout=timeout) if path != ':memory:' and not okay_to_create: # attempt to detect and fail when connect creates the db file stat = os.stat(path) if stat.st_size == 0 and stat.st_ctime >= connect_time: os.unlink(path) raise DatabaseConnectionError(path, 'DB file created by connect?') conn.row_factory = sqlite3.Row conn.text_factory = str conn.execute('PRAGMA synchronous = NORMAL') conn.execute('PRAGMA count_changes = OFF') conn.execute('PRAGMA temp_store = MEMORY') conn.execute('PRAGMA journal_mode = DELETE') conn.create_function('chexor', 3, chexor) except sqlite3.DatabaseError: import traceback raise DatabaseConnectionError(path, traceback.format_exc(), timeout=timeout) return conn class DatabaseBroker(object): """Encapsulates working with a database.""" def __init__(self, db_file, timeout=BROKER_TIMEOUT, logger=None, account=None, container=None, pending_timeout=10, stale_reads_ok=False): """ Encapsulates working with a database. """ self.conn = None self.db_file = db_file self.pending_file = self.db_file + '.pending' self.pending_timeout = pending_timeout self.stale_reads_ok = stale_reads_ok self.db_dir = os.path.dirname(db_file) self.timeout = timeout self.logger = logger or logging.getLogger() self.account = account self.container = container self._db_version = -1 def initialize(self, put_timestamp=None): """ Create the DB :param put_timestamp: timestamp of initial PUT request """ if self.db_file == ':memory:': tmp_db_file = None conn = get_db_connection(self.db_file, self.timeout) else: mkdirs(self.db_dir) fd, tmp_db_file = mkstemp(suffix='.tmp', dir=self.db_dir) os.close(fd) conn = sqlite3.connect(tmp_db_file, check_same_thread=False, factory=GreenDBConnection, timeout=0) # creating dbs implicitly does a lot of transactions, so we # pick fast, unsafe options here and do a big fsync at the end. conn.execute('PRAGMA synchronous = OFF') conn.execute('PRAGMA temp_store = MEMORY') conn.execute('PRAGMA journal_mode = MEMORY') conn.create_function('chexor', 3, chexor) conn.row_factory = sqlite3.Row conn.text_factory = str conn.executescript(""" CREATE TABLE outgoing_sync ( remote_id TEXT UNIQUE, sync_point INTEGER, updated_at TEXT DEFAULT 0 ); CREATE TABLE incoming_sync ( remote_id TEXT UNIQUE, sync_point INTEGER, updated_at TEXT DEFAULT 0 ); CREATE TRIGGER outgoing_sync_insert AFTER INSERT ON outgoing_sync BEGIN UPDATE outgoing_sync SET updated_at = STRFTIME('%s', 'NOW') WHERE ROWID = new.ROWID; END; CREATE TRIGGER outgoing_sync_update AFTER UPDATE ON outgoing_sync BEGIN UPDATE outgoing_sync SET updated_at = STRFTIME('%s', 'NOW') WHERE ROWID = new.ROWID; END; CREATE TRIGGER incoming_sync_insert AFTER INSERT ON incoming_sync BEGIN UPDATE incoming_sync SET updated_at = STRFTIME('%s', 'NOW') WHERE ROWID = new.ROWID; END; CREATE TRIGGER incoming_sync_update AFTER UPDATE ON incoming_sync BEGIN UPDATE incoming_sync SET updated_at = STRFTIME('%s', 'NOW') WHERE ROWID = new.ROWID; END; """) if not put_timestamp: put_timestamp = normalize_timestamp(0) self._initialize(conn, put_timestamp) conn.commit() if tmp_db_file: conn.close() with open(tmp_db_file, 'r+b') as fp: os.fsync(fp.fileno()) with lock_parent_directory(self.db_file, self.pending_timeout): if os.path.exists(self.db_file): # It's as if there was a "condition" where different parts # of the system were "racing" each other. raise DatabaseConnectionError(self.db_file, 'DB created by someone else while working?') renamer(tmp_db_file, self.db_file) self.conn = get_db_connection(self.db_file, self.timeout) else: self.conn = conn def delete_db(self, timestamp): """ Mark the DB as deleted :param timestamp: delete timestamp """ timestamp = normalize_timestamp(timestamp) # first, clear the metadata cleared_meta = {} for k in self.metadata.iterkeys(): cleared_meta[k] = ('', timestamp) self.update_metadata(cleared_meta) # then mark the db as deleted with self.get() as conn: self._delete_db(conn, timestamp) conn.commit() def possibly_quarantine(self, exc_type, exc_value, exc_traceback): """ Checks the exception info to see if it indicates a quarantine situation (malformed or corrupted database). If not, the original exception will be reraised. If so, the database will be quarantined and a new sqlite3.DatabaseError will be raised indicating the action taken. """ if 'database disk image is malformed' in str(exc_value): exc_hint = 'malformed' elif 'file is encrypted or is not a database' in str(exc_value): exc_hint = 'corrupted' else: raise exc_type, exc_value, exc_traceback prefix_path = os.path.dirname(self.db_dir) partition_path = os.path.dirname(prefix_path) dbs_path = os.path.dirname(partition_path) device_path = os.path.dirname(dbs_path) quar_path = os.path.join(device_path, 'quarantined', self.db_type + 's', os.path.basename(self.db_dir)) try: renamer(self.db_dir, quar_path) except OSError, e: if e.errno not in (errno.EEXIST, errno.ENOTEMPTY): raise quar_path = "%s-%s" % (quar_path, uuid4().hex) renamer(self.db_dir, quar_path) detail = _('Quarantined %s to %s due to %s database') % \ (self.db_dir, quar_path, exc_hint) self.logger.error(detail) raise sqlite3.DatabaseError(detail) @contextmanager def get(self): """Use with the "with" statement; returns a database connection.""" if not self.conn: if self.db_file != ':memory:' and os.path.exists(self.db_file): try: self.conn = get_db_connection(self.db_file, self.timeout) except (sqlite3.DatabaseError, DatabaseConnectionError): self.possibly_quarantine(*sys.exc_info()) else: raise DatabaseConnectionError(self.db_file, "DB doesn't exist") conn = self.conn self.conn = None try: yield conn conn.rollback() self.conn = conn except sqlite3.DatabaseError, err: try: conn.close() except: pass self.possibly_quarantine(*sys.exc_info()) except Exception: conn.close() raise @contextmanager def lock(self): """Use with the "with" statement; locks a database.""" if not self.conn: if self.db_file != ':memory:' and os.path.exists(self.db_file): self.conn = get_db_connection(self.db_file, self.timeout) else: raise DatabaseConnectionError(self.db_file, "DB doesn't exist") conn = self.conn self.conn = None orig_isolation_level = conn.isolation_level conn.isolation_level = None conn.execute('BEGIN IMMEDIATE') try: yield True except Exception: pass try: conn.execute('ROLLBACK') conn.isolation_level = orig_isolation_level self.conn = conn except Exception: logging.exception( _('Broker error trying to rollback locked connection')) conn.close() def newid(self, remote_id): """ Re-id the database. This should be called after an rsync. :param remote_id: the ID of the remote database being rsynced in """ with self.get() as conn: row = conn.execute(''' UPDATE %s_stat SET id=? ''' % self.db_type, (str(uuid4()),)) row = conn.execute(''' SELECT ROWID FROM %s ORDER BY ROWID DESC LIMIT 1 ''' % self.db_contains_type).fetchone() sync_point = row['ROWID'] if row else -1 conn.execute(''' INSERT OR REPLACE INTO incoming_sync (sync_point, remote_id) VALUES (?, ?) ''', (sync_point, remote_id)) self._newid(conn) conn.commit() def _newid(self, conn): # Override for additional work when receiving an rsynced db. pass def merge_timestamps(self, created_at, put_timestamp, delete_timestamp): """ Used in replication to handle updating timestamps. :param created_at: create timestamp :param put_timestamp: put timestamp :param delete_timestamp: delete timestamp """ with self.get() as conn: conn.execute(''' UPDATE %s_stat SET created_at=MIN(?, created_at), put_timestamp=MAX(?, put_timestamp), delete_timestamp=MAX(?, delete_timestamp) ''' % self.db_type, (created_at, put_timestamp, delete_timestamp)) conn.commit() def get_items_since(self, start, count): """ Get a list of objects in the database between start and end. :param start: start ROWID :param count: number to get :returns: list of objects between start and end """ try: self._commit_puts() except LockTimeout: if not self.stale_reads_ok: raise with self.get() as conn: curs = conn.execute(''' SELECT * FROM %s WHERE ROWID > ? ORDER BY ROWID ASC LIMIT ? ''' % self.db_contains_type, (start, count)) curs.row_factory = dict_factory return [r for r in curs] def get_sync(self, id, incoming=True): """ Gets the most recent sync point for a server from the sync table. :param id: remote ID to get the sync_point for :param incoming: if True, get the last incoming sync, otherwise get the last outgoing sync :returns: the sync point, or -1 if the id doesn't exist. """ with self.get() as conn: row = conn.execute( "SELECT sync_point FROM %s_sync WHERE remote_id=?" % ('incoming' if incoming else 'outgoing'), (id,)).fetchone() if not row: return -1 return row['sync_point'] def get_syncs(self, incoming=True): """ Get a serialized copy of the sync table. :param incoming: if True, get the last incoming sync, otherwise get the last outgoing sync :returns: list of {'remote_id', 'sync_point'} """ with self.get() as conn: curs = conn.execute(''' SELECT remote_id, sync_point FROM %s_sync ''' % 'incoming' if incoming else 'outgoing') result = [] for row in curs: result.append({'remote_id': row[0], 'sync_point': row[1]}) return result def get_replication_info(self): """ Get information about the DB required for replication. :returns: dict containing keys: hash, id, created_at, put_timestamp, delete_timestamp, count, max_row, and metadata """ try: self._commit_puts() except LockTimeout: if not self.stale_reads_ok: raise query_part1 = ''' SELECT hash, id, created_at, put_timestamp, delete_timestamp, %s_count AS count, CASE WHEN SQLITE_SEQUENCE.seq IS NOT NULL THEN SQLITE_SEQUENCE.seq ELSE -1 END AS max_row, ''' % \ self.db_contains_type query_part2 = ''' FROM (%s_stat LEFT JOIN SQLITE_SEQUENCE ON SQLITE_SEQUENCE.name == '%s') LIMIT 1 ''' % (self.db_type, self.db_contains_type) with self.get() as conn: try: curs = conn.execute(query_part1 + 'metadata' + query_part2) except sqlite3.OperationalError, err: if 'no such column: metadata' not in str(err): raise curs = conn.execute(query_part1 + "'' as metadata" + query_part2) curs.row_factory = dict_factory return curs.fetchone() def _commit_puts(self): pass # stub to be overridden if need be def merge_syncs(self, sync_points, incoming=True): """ Merge a list of sync points with the incoming sync table. :param sync_points: list of sync points where a sync point is a dict of {'sync_point', 'remote_id'} :param incoming: if True, get the last incoming sync, otherwise get the last outgoing sync """ with self.get() as conn: for rec in sync_points: try: conn.execute(''' INSERT INTO %s_sync (sync_point, remote_id) VALUES (?, ?) ''' % ('incoming' if incoming else 'outgoing'), (rec['sync_point'], rec['remote_id'])) except sqlite3.IntegrityError: conn.execute(''' UPDATE %s_sync SET sync_point=max(?, sync_point) WHERE remote_id=? ''' % ('incoming' if incoming else 'outgoing'), (rec['sync_point'], rec['remote_id'])) conn.commit() def _preallocate(self): """ The idea is to allocate space in front of an expanding db. If it gets within 512k of a boundary, it allocates to the next boundary. Boundaries are 2m, 5m, 10m, 25m, 50m, then every 50m after. """ if self.db_file == ':memory:': return MB = (1024 * 1024) def prealloc_points(): for pm in (1, 2, 5, 10, 25, 50): yield pm * MB while True: pm += 50 yield pm * MB stat = os.stat(self.db_file) file_size = stat.st_size allocated_size = stat.st_blocks * 512 for point in prealloc_points(): if file_size <= point - MB / 2: prealloc_size = point break if allocated_size < prealloc_size: with open(self.db_file, 'rb+') as fp: fallocate(fp.fileno(), int(prealloc_size)) @property def metadata(self): """ Returns the metadata dict for the database. The metadata dict values are tuples of (value, timestamp) where the timestamp indicates when that key was set to that value. """ with self.get() as conn: try: metadata = conn.execute('SELECT metadata FROM %s_stat' % self.db_type).fetchone()[0] except sqlite3.OperationalError, err: if 'no such column: metadata' not in str(err): raise metadata = '' if metadata: metadata = json.loads(metadata) else: metadata = {} return metadata def update_metadata(self, metadata_updates): """ Updates the metadata dict for the database. The metadata dict values are tuples of (value, timestamp) where the timestamp indicates when that key was set to that value. Key/values will only be overwritten if the timestamp is newer. To delete a key, set its value to ('', timestamp). These empty keys will eventually be removed by :func:reclaim """ old_metadata = self.metadata if set(metadata_updates).issubset(set(old_metadata)): for key, (value, timestamp) in metadata_updates.iteritems(): if timestamp > old_metadata[key][1]: break else: return with self.get() as conn: try: md = conn.execute('SELECT metadata FROM %s_stat' % self.db_type).fetchone()[0] md = md and json.loads(md) or {} except sqlite3.OperationalError, err: if 'no such column: metadata' not in str(err): raise conn.execute(""" ALTER TABLE %s_stat ADD COLUMN metadata TEXT DEFAULT '' """ % self.db_type) md = {} for key, value_timestamp in metadata_updates.iteritems(): value, timestamp = value_timestamp if key not in md or timestamp > md[key][1]: md[key] = value_timestamp conn.execute('UPDATE %s_stat SET metadata = ?' % self.db_type, (json.dumps(md),)) conn.commit() def reclaim(self, timestamp): """Removes any empty metadata values older than the timestamp""" if not self.metadata: return with self.get() as conn: if self._reclaim(conn, timestamp): conn.commit() def _reclaim(self, conn, timestamp): """ Removes any empty metadata values older than the timestamp using the given database connection. This function will not call commit on the conn, but will instead return True if the database needs committing. This function was created as a worker to limit transactions and commits from other related functions. :param conn: Database connection to reclaim metadata within. :param timestamp: Empty metadata items last updated before this timestamp will be removed. :returns: True if conn.commit() should be called """ try: md = conn.execute('SELECT metadata FROM %s_stat' % self.db_type).fetchone()[0] if md: md = json.loads(md) keys_to_delete = [] for key, (value, value_timestamp) in md.iteritems(): if value == '' and value_timestamp < timestamp: keys_to_delete.append(key) if keys_to_delete: for key in keys_to_delete: del md[key] conn.execute('UPDATE %s_stat SET metadata = ?' % self.db_type, (json.dumps(md),)) return True except sqlite3.OperationalError, err: if 'no such column: metadata' not in str(err): raise return False class ContainerBroker(DatabaseBroker): """Encapsulates working with a container database.""" db_type = 'container' db_contains_type = 'object' def _initialize(self, conn, put_timestamp): """Creates a brand new database (tables, indices, triggers, etc.)""" if not self.account: raise ValueError( 'Attempting to create a new database with no account set') if not self.container: raise ValueError( 'Attempting to create a new database with no container set') self.create_object_table(conn) self.create_container_stat_table(conn, put_timestamp) def create_object_table(self, conn): """ Create the object table which is specifc to the container DB. :param conn: DB connection object """ conn.executescript(""" CREATE TABLE object ( ROWID INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT, created_at TEXT, size INTEGER, content_type TEXT, etag TEXT, deleted INTEGER DEFAULT 0 ); CREATE INDEX ix_object_deleted_name ON object (deleted, name); CREATE TRIGGER object_insert AFTER INSERT ON object BEGIN UPDATE container_stat SET object_count = object_count + (1 - new.deleted), bytes_used = bytes_used + new.size, hash = chexor(hash, new.name, new.created_at); END; CREATE TRIGGER object_update BEFORE UPDATE ON object BEGIN SELECT RAISE(FAIL, 'UPDATE not allowed; DELETE and INSERT'); END; CREATE TRIGGER object_delete AFTER DELETE ON object BEGIN UPDATE container_stat SET object_count = object_count - (1 - old.deleted), bytes_used = bytes_used - old.size, hash = chexor(hash, old.name, old.created_at); END; """) def create_container_stat_table(self, conn, put_timestamp=None): """ Create the container_stat table which is specific to the container DB. :param conn: DB connection object :param put_timestamp: put timestamp """ if put_timestamp is None: put_timestamp = normalize_timestamp(0) conn.executescript(""" CREATE TABLE container_stat ( account TEXT, container TEXT, created_at TEXT, put_timestamp TEXT DEFAULT '0', delete_timestamp TEXT DEFAULT '0', object_count INTEGER, bytes_used INTEGER, reported_put_timestamp TEXT DEFAULT '0', reported_delete_timestamp TEXT DEFAULT '0', reported_object_count INTEGER DEFAULT 0, reported_bytes_used INTEGER DEFAULT 0, hash TEXT default '00000000000000000000000000000000', id TEXT, status TEXT DEFAULT '', status_changed_at TEXT DEFAULT '0', metadata TEXT DEFAULT '', x_container_sync_point1 INTEGER DEFAULT -1, x_container_sync_point2 INTEGER DEFAULT -1 ); INSERT INTO container_stat (object_count, bytes_used) VALUES (0, 0); """) conn.execute(''' UPDATE container_stat SET account = ?, container = ?, created_at = ?, id = ?, put_timestamp = ? ''', (self.account, self.container, normalize_timestamp(time.time()), str(uuid4()), put_timestamp)) def get_db_version(self, conn): if self._db_version == -1: self._db_version = 0 for row in conn.execute(''' SELECT name FROM sqlite_master WHERE name = 'ix_object_deleted_name' '''): self._db_version = 1 return self._db_version def _newid(self, conn): conn.execute(''' UPDATE container_stat SET reported_put_timestamp = 0, reported_delete_timestamp = 0, reported_object_count = 0, reported_bytes_used = 0''') def update_put_timestamp(self, timestamp): """ Update the put_timestamp. Only modifies it if it is greater than the current timestamp. :param timestamp: put timestamp """ with self.get() as conn: conn.execute(''' UPDATE container_stat SET put_timestamp = ? WHERE put_timestamp < ? ''', (timestamp, timestamp)) conn.commit() def _delete_db(self, conn, timestamp): """ Mark the DB as deleted :param conn: DB connection object :param timestamp: timestamp to mark as deleted """ conn.execute(""" UPDATE container_stat SET delete_timestamp = ?, status = 'DELETED', status_changed_at = ? WHERE delete_timestamp < ? """, (timestamp, timestamp, timestamp)) def empty(self): """ Check if the DB is empty. :returns: True if the database has no active objects, False otherwise """ try: self._commit_puts() except LockTimeout: if not self.stale_reads_ok: raise with self.get() as conn: row = conn.execute( 'SELECT object_count from container_stat').fetchone() return (row[0] == 0) def _commit_puts(self, item_list=None): """Handles commiting rows in .pending files.""" if self.db_file == ':memory:' or not os.path.exists(self.pending_file): return if item_list is None: item_list = [] with lock_parent_directory(self.pending_file, self.pending_timeout): self._preallocate() if not os.path.getsize(self.pending_file): if item_list: self.merge_items(item_list) return with open(self.pending_file, 'r+b') as fp: for entry in fp.read().split(':'): if entry: try: (name, timestamp, size, content_type, etag, deleted) = pickle.loads(entry.decode('base64')) item_list.append({'name': name, 'created_at': timestamp, 'size': size, 'content_type': content_type, 'etag': etag, 'deleted': deleted}) except Exception: self.logger.exception( _('Invalid pending entry %(file)s: %(entry)s'), {'file': self.pending_file, 'entry': entry}) if item_list: self.merge_items(item_list) try: os.ftruncate(fp.fileno(), 0) except OSError, err: if err.errno != errno.ENOENT: raise def reclaim(self, object_timestamp, sync_timestamp): """ Delete rows from the object table that are marked deleted and whose created_at timestamp is < object_timestamp. Also deletes rows from incoming_sync and outgoing_sync where the updated_at timestamp is < sync_timestamp. In addition, this calls the DatabaseBroker's :func:_reclaim method. :param object_timestamp: max created_at timestamp of object rows to delete :param sync_timestamp: max update_at timestamp of sync rows to delete """ self._commit_puts() with self.get() as conn: conn.execute(""" DELETE FROM object WHERE deleted = 1 AND created_at < ?""", (object_timestamp,)) try: conn.execute(''' DELETE FROM outgoing_sync WHERE updated_at < ? ''', (sync_timestamp,)) conn.execute(''' DELETE FROM incoming_sync WHERE updated_at < ? ''', (sync_timestamp,)) except sqlite3.OperationalError, err: # Old dbs didn't have updated_at in the _sync tables. if 'no such column: updated_at' not in str(err): raise DatabaseBroker._reclaim(self, conn, object_timestamp) conn.commit() def delete_object(self, name, timestamp): """ Mark an object deleted. :param name: object name to be deleted :param timestamp: timestamp when the object was marked as deleted """ self.put_object(name, timestamp, 0, 'application/deleted', 'noetag', 1) def put_object(self, name, timestamp, size, content_type, etag, deleted=0): """ Creates an object in the DB with its metadata. :param name: object name to be created :param timestamp: timestamp of when the object was created :param size: object size :param content_type: object content-type :param etag: object etag :param deleted: if True, marks the object as deleted and sets the deteleted_at timestamp to timestamp """ record = {'name': name, 'created_at': timestamp, 'size': size, 'content_type': content_type, 'etag': etag, 'deleted': deleted} if self.db_file == ':memory:': self.merge_items([record]) return if not os.path.exists(self.db_file): raise DatabaseConnectionError(self.db_file, "DB doesn't exist") pending_size = 0 try: pending_size = os.path.getsize(self.pending_file) except OSError, err: if err.errno != errno.ENOENT: raise if pending_size > PENDING_CAP: self._commit_puts([record]) else: with lock_parent_directory( self.pending_file, self.pending_timeout): with open(self.pending_file, 'a+b') as fp: # Colons aren't used in base64 encoding; so they are our # delimiter fp.write(':') fp.write(pickle.dumps( (name, timestamp, size, content_type, etag, deleted), protocol=PICKLE_PROTOCOL).encode('base64')) fp.flush() def is_deleted(self, timestamp=None): """ Check if the DB is considered to be deleted. :returns: True if the DB is considered to be deleted, False otherwise """ if self.db_file != ':memory:' and not os.path.exists(self.db_file): return True try: self._commit_puts() except LockTimeout: if not self.stale_reads_ok: raise with self.get() as conn: row = conn.execute(''' SELECT put_timestamp, delete_timestamp, object_count FROM container_stat''').fetchone() # leave this db as a tombstone for a consistency window if timestamp and row['delete_timestamp'] > timestamp: return False # The container is considered deleted if the delete_timestamp # value is greater than the put_timestamp, and there are no # objects in the container. return (row['object_count'] in (None, '', 0, '0')) and \ (float(row['delete_timestamp']) > float(row['put_timestamp'])) def get_info(self, include_metadata=False): """ Get global data for the container. :returns: dict with keys: account, container, created_at, put_timestamp, delete_timestamp, object_count, bytes_used, reported_put_timestamp, reported_delete_timestamp, reported_object_count, reported_bytes_used, hash, id, x_container_sync_point1, and x_container_sync_point2. If include_metadata is set, metadata is included as a key pointing to a dict of tuples of the metadata """ try: self._commit_puts() except LockTimeout: if not self.stale_reads_ok: raise with self.get() as conn: data = None trailing1 = 'metadata' trailing2 = 'x_container_sync_point1, x_container_sync_point2' while not data: try: data = conn.execute(''' SELECT account, container, created_at, put_timestamp, delete_timestamp, object_count, bytes_used, reported_put_timestamp, reported_delete_timestamp, reported_object_count, reported_bytes_used, hash, id, %s, %s FROM container_stat ''' % (trailing1, trailing2)).fetchone() except sqlite3.OperationalError, err: if 'no such column: metadata' in str(err): trailing1 = "'' as metadata" elif 'no such column: x_container_sync_point' in str(err): trailing2 = '-1 AS x_container_sync_point1, ' \ '-1 AS x_container_sync_point2' else: raise data = dict(data) if include_metadata: try: data['metadata'] = json.loads(data.get('metadata', '')) except ValueError: data['metadata'] = {} elif 'metadata' in data: del data['metadata'] return data def set_x_container_sync_points(self, sync_point1, sync_point2): with self.get() as conn: orig_isolation_level = conn.isolation_level try: # We turn off auto-transactions to ensure the alter table # commands are part of the transaction. conn.isolation_level = None conn.execute('BEGIN') try: self._set_x_container_sync_points(conn, sync_point1, sync_point2) except sqlite3.OperationalError, err: if 'no such column: x_container_sync_point' not in \ str(err): raise conn.execute(''' ALTER TABLE container_stat ADD COLUMN x_container_sync_point1 INTEGER DEFAULT -1 ''') conn.execute(''' ALTER TABLE container_stat ADD COLUMN x_container_sync_point2 INTEGER DEFAULT -1 ''') self._set_x_container_sync_points(conn, sync_point1, sync_point2) conn.execute('COMMIT') finally: conn.isolation_level = orig_isolation_level def _set_x_container_sync_points(self, conn, sync_point1, sync_point2): if sync_point1 is not None and sync_point2 is not None: conn.execute(''' UPDATE container_stat SET x_container_sync_point1 = ?, x_container_sync_point2 = ? ''', (sync_point1, sync_point2)) elif sync_point1 is not None: conn.execute(''' UPDATE container_stat SET x_container_sync_point1 = ? ''', (sync_point1,)) elif sync_point2 is not None: conn.execute(''' UPDATE container_stat SET x_container_sync_point2 = ? ''', (sync_point2,)) def reported(self, put_timestamp, delete_timestamp, object_count, bytes_used): """ Update reported stats. :param put_timestamp: put_timestamp to update :param delete_timestamp: delete_timestamp to update :param object_count: object_count to update :param bytes_used: bytes_used to update """ with self.get() as conn: conn.execute(''' UPDATE container_stat SET reported_put_timestamp = ?, reported_delete_timestamp = ?, reported_object_count = ?, reported_bytes_used = ? ''', (put_timestamp, delete_timestamp, object_count, bytes_used)) conn.commit() def list_objects_iter(self, limit, marker, end_marker, prefix, delimiter, path=None, format=None): """ Get a list of objects sorted by name starting at marker onward, up to limit entries. Entries will begin with the prefix and will not have the delimiter after the prefix. :param limit: maximum number of entries to get :param marker: marker query :param end_marker: end marker query :param prefix: prefix query :param delimeter: delimeter for query :param path: if defined, will set the prefix and delimter based on the path :param format: TOOD: remove as it is no longer used :returns: list of tuples of (name, created_at, size, content_type, etag) """ try: self._commit_puts() except LockTimeout: if not self.stale_reads_ok: raise if path is not None: prefix = path if path: prefix = path = path.rstrip('/') + '/' delimiter = '/' elif delimiter and not prefix: prefix = '' orig_marker = marker with self.get() as conn: results = [] while len(results) < limit: query = '''SELECT name, created_at, size, content_type, etag FROM object WHERE''' query_args = [] if end_marker: query += ' name < ? AND' query_args.append(end_marker) if marker and marker >= prefix: query += ' name > ? AND' query_args.append(marker) elif prefix: query += ' name >= ? AND' query_args.append(prefix) if self.get_db_version(conn) < 1: query += ' +deleted = 0' else: query += ' deleted = 0' query += ' ORDER BY name LIMIT ?' query_args.append(limit - len(results)) curs = conn.execute(query, query_args) curs.row_factory = None if prefix is None: return [r for r in curs] if not delimiter: return [r for r in curs if r[0].startswith(prefix)] rowcount = 0 for row in curs: rowcount += 1 marker = name = row[0] if len(results) >= limit or not name.startswith(prefix): curs.close() return results end = name.find(delimiter, len(prefix)) if path is not None: if name == path: continue if end >= 0 and len(name) > end + len(delimiter): marker = name[:end] + chr(ord(delimiter) + 1) curs.close() break elif end > 0: marker = name[:end] + chr(ord(delimiter) + 1) dir_name = name[:end + 1] if dir_name != orig_marker: results.append([dir_name, '0', 0, None, '']) curs.close() break results.append(row) if not rowcount: break return results def merge_items(self, item_list, source=None): """ Merge items into the object table. :param item_list: list of dictionaries of {'name', 'created_at', 'size', 'content_type', 'etag', 'deleted'} :param source: if defined, update incoming_sync with the source """ with self.get() as conn: max_rowid = -1 for rec in item_list: query = ''' DELETE FROM object WHERE name = ? AND (created_at < ?) ''' if self.get_db_version(conn) >= 1: query += ' AND deleted IN (0, 1)' conn.execute(query, (rec['name'], rec['created_at'])) query = 'SELECT 1 FROM object WHERE name = ?' if self.get_db_version(conn) >= 1: query += ' AND deleted IN (0, 1)' if not conn.execute(query, (rec['name'],)).fetchall(): conn.execute(''' INSERT INTO object (name, created_at, size, content_type, etag, deleted) VALUES (?, ?, ?, ?, ?, ?) ''', ([rec['name'], rec['created_at'], rec['size'], rec['content_type'], rec['etag'], rec['deleted']])) if source: max_rowid = max(max_rowid, rec['ROWID']) if source: try: conn.execute(''' INSERT INTO incoming_sync (sync_point, remote_id) VALUES (?, ?) ''', (max_rowid, source)) except sqlite3.IntegrityError: conn.execute(''' UPDATE incoming_sync SET sync_point=max(?, sync_point) WHERE remote_id=? ''', (max_rowid, source)) conn.commit() class AccountBroker(DatabaseBroker): """Encapsulates working with a account database.""" db_type = 'account' db_contains_type = 'container' def _initialize(self, conn, put_timestamp): """ Create a brand new database (tables, indices, triggers, etc.) :param conn: DB connection object :param put_timestamp: put timestamp """ if not self.account: raise ValueError( 'Attempting to create a new database with no account set') self.create_container_table(conn) self.create_account_stat_table(conn, put_timestamp) def create_container_table(self, conn): """ Create container table which is specific to the account DB. :param conn: DB connection object """ conn.executescript(""" CREATE TABLE container ( ROWID INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT, put_timestamp TEXT, delete_timestamp TEXT, object_count INTEGER, bytes_used INTEGER, deleted INTEGER DEFAULT 0 ); CREATE INDEX ix_container_deleted_name ON container (deleted, name); CREATE TRIGGER container_insert AFTER INSERT ON container BEGIN UPDATE account_stat SET container_count = container_count + (1 - new.deleted), object_count = object_count + new.object_count, bytes_used = bytes_used + new.bytes_used, hash = chexor(hash, new.name, new.put_timestamp || '-' || new.delete_timestamp || '-' || new.object_count || '-' || new.bytes_used); END; CREATE TRIGGER container_update BEFORE UPDATE ON container BEGIN SELECT RAISE(FAIL, 'UPDATE not allowed; DELETE and INSERT'); END; CREATE TRIGGER container_delete AFTER DELETE ON container BEGIN UPDATE account_stat SET container_count = container_count - (1 - old.deleted), object_count = object_count - old.object_count, bytes_used = bytes_used - old.bytes_used, hash = chexor(hash, old.name, old.put_timestamp || '-' || old.delete_timestamp || '-' || old.object_count || '-' || old.bytes_used); END; """) def create_account_stat_table(self, conn, put_timestamp): """ Create account_stat table which is specific to the account DB. :param conn: DB connection object :param put_timestamp: put timestamp """ conn.executescript(""" CREATE TABLE account_stat ( account TEXT, created_at TEXT, put_timestamp TEXT DEFAULT '0', delete_timestamp TEXT DEFAULT '0', container_count INTEGER, object_count INTEGER DEFAULT 0, bytes_used INTEGER DEFAULT 0, hash TEXT default '00000000000000000000000000000000', id TEXT, status TEXT DEFAULT '', status_changed_at TEXT DEFAULT '0', metadata TEXT DEFAULT '' ); INSERT INTO account_stat (container_count) VALUES (0); """) conn.execute(''' UPDATE account_stat SET account = ?, created_at = ?, id = ?, put_timestamp = ? ''', (self.account, normalize_timestamp(time.time()), str(uuid4()), put_timestamp)) def get_db_version(self, conn): if self._db_version == -1: self._db_version = 0 for row in conn.execute(''' SELECT name FROM sqlite_master WHERE name = 'ix_container_deleted_name' '''): self._db_version = 1 return self._db_version def update_put_timestamp(self, timestamp): """ Update the put_timestamp. Only modifies it if it is greater than the current timestamp. :param timestamp: put timestamp """ with self.get() as conn: conn.execute(''' UPDATE account_stat SET put_timestamp = ? WHERE put_timestamp < ? ''', (timestamp, timestamp)) conn.commit() def _delete_db(self, conn, timestamp, force=False): """ Mark the DB as deleted. :param conn: DB connection object :param timestamp: timestamp to mark as deleted """ conn.execute(""" UPDATE account_stat SET delete_timestamp = ?, status = 'DELETED', status_changed_at = ? WHERE delete_timestamp < ? """, (timestamp, timestamp, timestamp)) def _commit_puts(self, item_list=None): """Handles commiting rows in .pending files.""" if self.db_file == ':memory:' or not os.path.exists(self.pending_file): return if item_list is None: item_list = [] with lock_parent_directory(self.pending_file, self.pending_timeout): self._preallocate() if not os.path.getsize(self.pending_file): if item_list: self.merge_items(item_list) return with open(self.pending_file, 'r+b') as fp: for entry in fp.read().split(':'): if entry: try: (name, put_timestamp, delete_timestamp, object_count, bytes_used, deleted) = \ pickle.loads(entry.decode('base64')) item_list.append({'name': name, 'put_timestamp': put_timestamp, 'delete_timestamp': delete_timestamp, 'object_count': object_count, 'bytes_used': bytes_used, 'deleted': deleted}) except Exception: self.logger.exception( _('Invalid pending entry %(file)s: %(entry)s'), {'file': self.pending_file, 'entry': entry}) if item_list: self.merge_items(item_list) try: os.ftruncate(fp.fileno(), 0) except OSError, err: if err.errno != errno.ENOENT: raise def empty(self): """ Check if the account DB is empty. :returns: True if the database has no active containers. """ try: self._commit_puts() except LockTimeout: if not self.stale_reads_ok: raise with self.get() as conn: row = conn.execute( 'SELECT container_count from account_stat').fetchone() return (row[0] == 0) def reclaim(self, container_timestamp, sync_timestamp): """ Delete rows from the container table that are marked deleted and whose created_at timestamp is < object_timestamp. Also deletes rows from incoming_sync and outgoing_sync where the updated_at timestamp is < sync_timestamp. In addition, this calls the DatabaseBroker's :func:_reclaim method. :param object_timestamp: max created_at timestamp of container rows to delete :param sync_timestamp: max update_at timestamp of sync rows to delete """ self._commit_puts() with self.get() as conn: conn.execute(''' DELETE FROM container WHERE deleted = 1 AND delete_timestamp < ? ''', (container_timestamp,)) try: conn.execute(''' DELETE FROM outgoing_sync WHERE updated_at < ? ''', (sync_timestamp,)) conn.execute(''' DELETE FROM incoming_sync WHERE updated_at < ? ''', (sync_timestamp,)) except sqlite3.OperationalError, err: # Old dbs didn't have updated_at in the _sync tables. if 'no such column: updated_at' not in str(err): raise DatabaseBroker._reclaim(self, conn, container_timestamp) conn.commit() def get_container_timestamp(self, container_name): """ Get the put_timestamp of a container. :param container_name: container name :returns: put_timestamp of the container """ try: self._commit_puts() except LockTimeout: if not self.stale_reads_ok: raise with self.get() as conn: ret = conn.execute(''' SELECT put_timestamp FROM container WHERE name = ? AND deleted != 1''', (container_name,)).fetchone() if ret: ret = ret[0] return ret def put_container(self, name, put_timestamp, delete_timestamp, object_count, bytes_used): """ Create a container with the given attributes. :param name: name of the container to create :param put_timestamp: put_timestamp of the container to create :param delete_timestamp: delete_timestamp of the container to create :param object_count: number of objects in the container :param bytes_used: number of bytes used by the container """ if delete_timestamp > put_timestamp and \ object_count in (None, '', 0, '0'): deleted = 1 else: deleted = 0 record = {'name': name, 'put_timestamp': put_timestamp, 'delete_timestamp': delete_timestamp, 'object_count': object_count, 'bytes_used': bytes_used, 'deleted': deleted} if self.db_file == ':memory:': self.merge_items([record]) return commit = False with lock_parent_directory(self.pending_file, self.pending_timeout): with open(self.pending_file, 'a+b') as fp: # Colons aren't used in base64 encoding; so they are our # delimiter fp.write(':') fp.write(pickle.dumps( (name, put_timestamp, delete_timestamp, object_count, bytes_used, deleted), protocol=PICKLE_PROTOCOL).encode('base64')) fp.flush() if fp.tell() > PENDING_CAP: commit = True if commit: self._commit_puts() def can_delete_db(self, cutoff): """ Check if the accont DB can be deleted. :returns: True if the account can be deleted, False otherwise """ self._commit_puts() with self.get() as conn: row = conn.execute(''' SELECT status, put_timestamp, delete_timestamp, container_count FROM account_stat''').fetchone() # The account is considered deleted if its status is marked # as 'DELETED" and the delete_timestamp is older than the supplied # cutoff date; or if the delete_timestamp value is greater than # the put_timestamp, and there are no containers for the account status_del = (row['status'] == 'DELETED') deltime = float(row['delete_timestamp']) past_cutoff = (deltime < cutoff) time_later = (row['delete_timestamp'] > row['put_timestamp']) no_containers = (row['container_count'] in (None, '', 0, '0')) return ( (status_del and past_cutoff) or (time_later and no_containers)) def is_deleted(self): """ Check if the account DB is considered to be deleted. :returns: True if the account DB is considered to be deleted, False otherwise """ if self.db_file != ':memory:' and not os.path.exists(self.db_file): return True try: self._commit_puts() except LockTimeout: if not self.stale_reads_ok: raise with self.get() as conn: row = conn.execute(''' SELECT put_timestamp, delete_timestamp, container_count, status FROM account_stat''').fetchone() return row['status'] == 'DELETED' or ( row['container_count'] in (None, '', 0, '0') and row['delete_timestamp'] > row['put_timestamp']) def is_status_deleted(self): """Only returns true if the status field is set to DELETED.""" with self.get() as conn: row = conn.execute(''' SELECT status FROM account_stat''').fetchone() return (row['status'] == "DELETED") def get_info(self): """ Get global data for the account. :returns: dict with keys: account, created_at, put_timestamp, delete_timestamp, container_count, object_count, bytes_used, hash, id """ try: self._commit_puts() except LockTimeout: if not self.stale_reads_ok: raise with self.get() as conn: return dict(conn.execute(''' SELECT account, created_at, put_timestamp, delete_timestamp, container_count, object_count, bytes_used, hash, id FROM account_stat ''').fetchone()) def list_containers_iter(self, limit, marker, end_marker, prefix, delimiter): """ Get a list of containerss sorted by name starting at marker onward, up to limit entries. Entries will begin with the prefix and will not have the delimiter after the prefix. :param limit: maximum number of entries to get :param marker: marker query :param end_marker: end marker query :param prefix: prefix query :param delimeter: delimeter for query :returns: list of tuples of (name, object_count, bytes_used, 0) """ try: self._commit_puts() except LockTimeout: if not self.stale_reads_ok: raise if delimiter and not prefix: prefix = '' orig_marker = marker with self.get() as conn: results = [] while len(results) < limit: query = """ SELECT name, object_count, bytes_used, 0 FROM container WHERE deleted = 0 AND """ query_args = [] if end_marker: query += ' name <= ? AND' query_args.append(end_marker) if marker and marker >= prefix: query += ' name > ? AND' query_args.append(marker) elif prefix: query += ' name >= ? AND' query_args.append(prefix) if self.get_db_version(conn) < 1: query += ' +deleted = 0' else: query += ' deleted = 0' query += ' ORDER BY name LIMIT ?' query_args.append(limit - len(results)) curs = conn.execute(query, query_args) curs.row_factory = None if prefix is None: return [r for r in curs] if not delimiter: return [r for r in curs if r[0].startswith(prefix)] rowcount = 0 for row in curs: rowcount += 1 marker = name = row[0] if len(results) >= limit or not name.startswith(prefix): curs.close() return results end = name.find(delimiter, len(prefix)) if end > 0: marker = name[:end] + chr(ord(delimiter) + 1) dir_name = name[:end + 1] if dir_name != orig_marker: results.append([dir_name, 0, 0, 1]) curs.close() break results.append(row) if not rowcount: break return results def merge_items(self, item_list, source=None): """ Merge items into the container table. :param item_list: list of dictionaries of {'name', 'put_timestamp', 'delete_timestamp', 'object_count', 'bytes_used', 'deleted'} :param source: if defined, update incoming_sync with the source """ with self.get() as conn: max_rowid = -1 for rec in item_list: record = [rec['name'], rec['put_timestamp'], rec['delete_timestamp'], rec['object_count'], rec['bytes_used'], rec['deleted']] query = ''' SELECT name, put_timestamp, delete_timestamp, object_count, bytes_used, deleted FROM container WHERE name = ? ''' if self.get_db_version(conn) >= 1: query += ' AND deleted IN (0, 1)' curs = conn.execute(query, (rec['name'],)) curs.row_factory = None row = curs.fetchone() if row: row = list(row) for i in xrange(5): if record[i] is None and row[i] is not None: record[i] = row[i] if row[1] > record[1]: # Keep newest put_timestamp record[1] = row[1] if row[2] > record[2]: # Keep newest delete_timestamp record[2] = row[2] # If deleted, mark as such if record[2] > record[1] and \ record[3] in (None, '', 0, '0'): record[5] = 1 else: record[5] = 0 conn.execute(''' DELETE FROM container WHERE name = ? AND deleted IN (0, 1) ''', (record[0],)) conn.execute(''' INSERT INTO container (name, put_timestamp, delete_timestamp, object_count, bytes_used, deleted) VALUES (?, ?, ?, ?, ?, ?) ''', record) if source: max_rowid = max(max_rowid, rec['ROWID']) if source: try: conn.execute(''' INSERT INTO incoming_sync (sync_point, remote_id) VALUES (?, ?) ''', (max_rowid, source)) except sqlite3.IntegrityError: conn.execute(''' UPDATE incoming_sync SET sync_point=max(?, sync_point) WHERE remote_id=? ''', (max_rowid, source)) conn.commit()
apache-2.0
mohamed--abdel-maksoud/chromium.src
tools/clang/scripts/update.py
9
10298
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Windows can't run .sh files, so this is a Python implementation of update.sh. This script should replace update.sh on all platforms eventually.""" import os import re import shutil import subprocess import stat import sys import time # Do NOT CHANGE this if you don't know what you're doing -- see # https://code.google.com/p/chromium/wiki/UpdatingClang # Reverting problematic clang rolls is safe, though. # Note: this revision is only used for Windows. Other platforms use update.sh. LLVM_WIN_REVISION = 'HEAD' # ASan on Windows is useful enough to use it even while the clang/win is still # in bringup. Use a pinned revision to make it slightly more stable. if (re.search(r'\b(asan)=1', os.environ.get('GYP_DEFINES', '')) and not 'LLVM_FORCE_HEAD_REVISION' in os.environ): LLVM_WIN_REVISION = '217738' # Path constants. (All of these should be absolute paths.) THIS_DIR = os.path.abspath(os.path.dirname(__file__)) CHROMIUM_DIR = os.path.abspath(os.path.join(THIS_DIR, '..', '..', '..')) LLVM_DIR = os.path.join(CHROMIUM_DIR, 'third_party', 'llvm') LLVM_BUILD_DIR = os.path.join(CHROMIUM_DIR, 'third_party', 'llvm-build', 'Release+Asserts') COMPILER_RT_BUILD_DIR = os.path.join(LLVM_BUILD_DIR, '32bit-compiler-rt') CLANG_DIR = os.path.join(LLVM_DIR, 'tools', 'clang') LLD_DIR = os.path.join(LLVM_DIR, 'tools', 'lld') COMPILER_RT_DIR = os.path.join(LLVM_DIR, 'projects', 'compiler-rt') STAMP_FILE = os.path.join(LLVM_BUILD_DIR, 'cr_build_revision') LLVM_REPO_URL='https://llvm.org/svn/llvm-project' if 'LLVM_REPO_URL' in os.environ: LLVM_REPO_URL = os.environ['LLVM_REPO_URL'] def ReadStampFile(): """Return the contents of the stamp file, or '' if it doesn't exist.""" try: with open(STAMP_FILE, 'r') as f: return f.read(); except IOError: return '' def WriteStampFile(s): """Write s to the stamp file.""" if not os.path.exists(LLVM_BUILD_DIR): os.makedirs(LLVM_BUILD_DIR) with open(STAMP_FILE, 'w') as f: f.write(s) def RmTree(dir): """Delete dir.""" def ChmodAndRetry(func, path, _): # Subversion can leave read-only files around. if not os.access(path, os.W_OK): os.chmod(path, stat.S_IWUSR) return func(path) raise shutil.rmtree(dir, onerror=ChmodAndRetry) def ClobberChromiumBuildFiles(): """Clobber Chomium build files.""" print 'Clobbering Chromium build files...' out_dir = os.path.join(CHROMIUM_DIR, 'out') if os.path.isdir(out_dir): RmTree(out_dir) print 'Removed Chromium out dir: %s.' % (out_dir) def RunCommand(command, fail_hard=True): """Run command and return success (True) or failure; or if fail_hard is True, exit on failure.""" print 'Running %s' % (str(command)) if subprocess.call(command, shell=True) == 0: return True print 'Failed.' if fail_hard: sys.exit(1) return False def CopyFile(src, dst): """Copy a file from src to dst.""" shutil.copy(src, dst) print "Copying %s to %s" % (src, dst) def CopyDirectoryContents(src, dst, filename_filter=None): """Copy the files from directory src to dst with an optional filename filter.""" if not os.path.exists(dst): os.makedirs(dst) for root, _, files in os.walk(src): for f in files: if filename_filter and not re.match(filename_filter, f): continue CopyFile(os.path.join(root, f), dst) def Checkout(name, url, dir): """Checkout the SVN module at url into dir. Use name for the log message.""" print "Checking out %s r%s into '%s'" % (name, LLVM_WIN_REVISION, dir) command = ['svn', 'checkout', '--force', url + '@' + LLVM_WIN_REVISION, dir] if RunCommand(command, fail_hard=False): return if os.path.isdir(dir): print "Removing %s." % (dir) RmTree(dir) print "Retrying." RunCommand(command) def AddCMakeToPath(): """Look for CMake and add it to PATH if it's not there already.""" try: # First check if cmake is already on PATH. subprocess.call(['cmake', '--version']) return except OSError as e: if e.errno != os.errno.ENOENT: raise cmake_locations = ['C:\\Program Files (x86)\\CMake\\bin', 'C:\\Program Files (x86)\\CMake 2.8\\bin'] for d in cmake_locations: if os.path.isdir(d): os.environ['PATH'] = os.environ.get('PATH', '') + os.pathsep + d return print 'Failed to find CMake!' sys.exit(1) vs_version = None def GetVSVersion(): global vs_version if vs_version: return vs_version # Try using the toolchain in depot_tools. # This sets environment variables used by SelectVisualStudioVersion below. sys.path.append(os.path.join(CHROMIUM_DIR, 'build')) import vs_toolchain vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs() # Use gyp to find the MSVS installation, either in depot_tools as per above, # or a system-wide installation otherwise. sys.path.append(os.path.join(CHROMIUM_DIR, 'tools', 'gyp', 'pylib')) import gyp.MSVSVersion vs_version = gyp.MSVSVersion.SelectVisualStudioVersion('2013') return vs_version def SubversionCmakeArg(): # Since cmake's find_program can only find .exe and .com, # svn.bat in depot_tools will be ignored. default_pathext = ('.com', '.exe', '.bat', '.cmd') for path in os.environ.get('PATH', '').split(os.pathsep): for ext in default_pathext: candidate = os.path.join(path, 'svn' + ext) if os.path.isfile(candidate): return '-DSubversion_SVN_EXECUTABLE=%s' % candidate return '' def UpdateClang(): print 'Updating Clang to %s...' % (LLVM_WIN_REVISION) if LLVM_WIN_REVISION != 'HEAD' and ReadStampFile() == LLVM_WIN_REVISION: print 'Already up to date.' return 0 AddCMakeToPath() ClobberChromiumBuildFiles() # Reset the stamp file in case the build is unsuccessful. WriteStampFile('') Checkout('LLVM', LLVM_REPO_URL + '/llvm/trunk', LLVM_DIR) Checkout('Clang', LLVM_REPO_URL + '/cfe/trunk', CLANG_DIR) Checkout('LLD', LLVM_REPO_URL + '/lld/trunk', LLD_DIR) Checkout('compiler-rt', LLVM_REPO_URL + '/compiler-rt/trunk', COMPILER_RT_DIR) if not os.path.exists(LLVM_BUILD_DIR): os.makedirs(LLVM_BUILD_DIR) os.chdir(LLVM_BUILD_DIR) RunCommand(GetVSVersion().SetupScript('x64') + ['&&', 'cmake', '-GNinja', '-DCMAKE_BUILD_TYPE=Release', '-DLLVM_ENABLE_ASSERTIONS=ON', SubversionCmakeArg(), LLVM_DIR]) RunCommand(GetVSVersion().SetupScript('x64') + ['&&', 'ninja', 'all']) # Do an x86 build of compiler-rt to get the 32-bit ASan run-time. # TODO(hans): Remove once the regular build above produces this. if not os.path.exists(COMPILER_RT_BUILD_DIR): os.makedirs(COMPILER_RT_BUILD_DIR) os.chdir(COMPILER_RT_BUILD_DIR) RunCommand(GetVSVersion().SetupScript('x86') + ['&&', 'cmake', '-GNinja', '-DCMAKE_BUILD_TYPE=Release', '-DLLVM_ENABLE_ASSERTIONS=ON', LLVM_DIR]) RunCommand(GetVSVersion().SetupScript('x86') + ['&&', 'ninja', 'compiler-rt']) # TODO(hans): Make this (and the .gypi file) version number independent. asan_rt_lib_src_dir = os.path.join(COMPILER_RT_BUILD_DIR, 'lib', 'clang', '3.6.0', 'lib', 'windows') asan_rt_lib_dst_dir = os.path.join(LLVM_BUILD_DIR, 'lib', 'clang', '3.6.0', 'lib', 'windows') CopyDirectoryContents(asan_rt_lib_src_dir, asan_rt_lib_dst_dir, r'^.*-i386\.lib$') # TODO(hans): Remove when LLVM_WIN_REVISION is updated. # Old versions of compiler-rt will leave the asan dll in bin/ asan_rt_bin_src_dir = os.path.join(COMPILER_RT_BUILD_DIR, 'bin') CopyDirectoryContents(asan_rt_bin_src_dir, asan_rt_lib_dst_dir, r'^.*-i386\.dll$') CopyDirectoryContents(asan_rt_lib_src_dir, asan_rt_lib_dst_dir, r'^.*-i386\.dll$') CopyFile(os.path.join(asan_rt_lib_src_dir, '..', '..', 'asan_blacklist.txt'), os.path.join(asan_rt_lib_dst_dir, '..', '..')) # Make an extra copy of the sanitizer headers, to be put on the include path # of the fallback compiler. sanitizer_include_dir = os.path.join(LLVM_BUILD_DIR, 'lib', 'clang', '3.6.0', 'include', 'sanitizer') aux_sanitizer_include_dir = os.path.join(LLVM_BUILD_DIR, 'lib', 'clang', '3.6.0', 'include_sanitizer', 'sanitizer') if not os.path.exists(aux_sanitizer_include_dir): os.makedirs(aux_sanitizer_include_dir) for _, _, files in os.walk(sanitizer_include_dir): for f in files: CopyFile(os.path.join(sanitizer_include_dir, f), aux_sanitizer_include_dir) WriteStampFile(LLVM_WIN_REVISION) print 'Clang update was successful.' return 0 def main(): if not sys.platform in ['win32', 'cygwin']: # For non-Windows, fall back to update.sh. # TODO(hans): Make update.py replace update.sh completely. # This script is called by gclient. gclient opens its hooks subprocesses # with (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does # custom output processing that breaks printing '\r' characters for # single-line updating status messages as printed by curl and wget. # Work around this by setting stderr of the update.sh process to stdin (!): # gclient doesn't redirect stdin, and while stdin itself is read-only, a # dup()ed sys.stdin is writable, try # fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi') # TODO: Fix gclient instead, http://crbug.com/95350 return subprocess.call( [os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:], stderr=os.fdopen(os.dup(sys.stdin.fileno()))) if not re.search(r'\b(clang|asan)=1', os.environ.get('GYP_DEFINES', '')): print 'Skipping Clang update (clang=1 was not set in GYP_DEFINES).' return 0 if re.search(r'\b(make_clang_dir)=', os.environ.get('GYP_DEFINES', '')): print 'Skipping Clang update (make_clang_dir= was set in GYP_DEFINES).' return 0 return UpdateClang() if __name__ == '__main__': sys.exit(main())
bsd-3-clause
tylertian/Openstack
openstack F/python-keystoneclient/keystoneclient/v2_0/shell.py
3
19264
# Copyright 2010 Jacob Kaplan-Moss # Copyright 2011 OpenStack LLC. # Copyright 2011 Nebula, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import argparse import getpass import sys import six from keystoneclient import utils from keystoneclient.v2_0 import client CLIENT_CLASS = client.Client def require_service_catalog(f): msg = ('Configuration error: Client configured to run without a service ' 'catalog. Run the client using --os-auth-url or OS_AUTH_URL, ' 'instead of --os-endpoint or OS_SERVICE_ENDPOINT, for example.') def wrapped(kc, args): if not kc.has_service_catalog(): raise Exception(msg) return f(kc, args) # Change __doc__ attribute back to origin function's __doc__ wrapped.__doc__ = f.__doc__ return wrapped @utils.arg('--tenant', '--tenant-id', metavar='<tenant>', help='Tenant; lists all users if not specified') @utils.arg('--tenant_id', help=argparse.SUPPRESS) def do_user_list(kc, args): """List users.""" if args.tenant: tenant_id = utils.find_resource(kc.tenants, args.tenant).id else: tenant_id = None users = kc.users.list(tenant_id=tenant_id) utils.print_list(users, ['id', 'name', 'enabled', 'email'], order_by='name') @utils.arg('user', metavar='<user>', help='Name or ID of user to display') def do_user_get(kc, args): """Display user details.""" user = utils.find_resource(kc.users, args.user) utils.print_dict(user._info) @utils.arg('--name', metavar='<user-name>', required=True, help='New user name (must be unique)') @utils.arg('--tenant', '--tenant-id', metavar='<tenant>', help='New user default tenant') @utils.arg('--tenant_id', help=argparse.SUPPRESS) @utils.arg('--pass', metavar='<pass>', dest='passwd', help='New user password') @utils.arg('--email', metavar='<email>', help='New user email address') @utils.arg('--enabled', metavar='<true|false>', default=True, help='Initial user enabled status (default true)') def do_user_create(kc, args): """Create new user""" if args.tenant: tenant_id = utils.find_resource(kc.tenants, args.tenant).id elif args.tenant_id: tenant_id = args.tenant_id else: tenant_id = None user = kc.users.create(args.name, args.passwd, args.email, tenant_id=tenant_id, enabled=utils.string_to_bool(args.enabled)) utils.print_dict(user._info) @utils.arg('--name', metavar='<user-name>', help='Desired new user name') @utils.arg('--email', metavar='<email>', help='Desired new email address') @utils.arg('--enabled', metavar='<true|false>', help='Enable or disable user') @utils.arg('user', metavar='<user>', help='Name or ID of user to update') def do_user_update(kc, args): """Update user's name, email, and enabled status.""" kwargs = {} if args.name: kwargs['name'] = args.name if args.email: kwargs['email'] = args.email if args.enabled: kwargs['enabled'] = utils.string_to_bool(args.enabled) if not len(kwargs): print("User not updated, no arguments present.") return user = utils.find_resource(kc.users, args.user) try: kc.users.update(user, **kwargs) print('User has been updated.') except Exception as e: print('Unable to update user: %s' % e) @utils.arg('--pass', metavar='<password>', dest='passwd', required=False, help='Desired new password') @utils.arg('user', metavar='<user>', help='Name or ID of user to update password') def do_user_password_update(kc, args): """Update user password.""" user = utils.find_resource(kc.users, args.user) new_passwd = args.passwd or utils.prompt_for_password() if new_passwd is None: msg = ("\nPlease specify password using the --pass option " "or using the prompt") sys.exit(msg) kc.users.update_password(user, new_passwd) @utils.arg('--current-password', metavar='<current-password>', dest='currentpasswd', required=False, help='Current password, ' 'Defaults to the password as set by --os-password or ' 'OS_PASSWORD') @utils.arg('--new-password ', metavar='<new-password>', dest='newpasswd', required=False, help='Desired new password') def do_password_update(kc, args): """Update own password.""" # we are prompting for these passwords if they are not passed in # this gives users the option not to have their password # appear in bash history etc.. currentpasswd = args.os_password if args.currentpasswd is not None: currentpasswd = args.currentpasswd if currentpasswd is None: currentpasswd = getpass.getpass('Current Password: ') newpasswd = args.newpasswd while newpasswd is None: passwd1 = getpass.getpass('New Password: ') passwd2 = getpass.getpass('Repeat New Password: ') if passwd1 == passwd2: newpasswd = passwd1 kc.users.update_own_password(currentpasswd, newpasswd) if args.os_password != newpasswd: print("You should update the password you are using to authenticate " "to match your new password") @utils.arg('user', metavar='<user>', help='Name or ID of user to delete') def do_user_delete(kc, args): """Delete user""" user = utils.find_resource(kc.users, args.user) kc.users.delete(user) def do_tenant_list(kc, args): """List all tenants.""" tenants = kc.tenants.list() utils.print_list(tenants, ['id', 'name', 'enabled'], order_by='name') @utils.arg('tenant', metavar='<tenant>', help='Name or ID of tenant to display') def do_tenant_get(kc, args): """Display tenant details.""" tenant = utils.find_resource(kc.tenants, args.tenant) utils.print_dict(tenant._info) @utils.arg('--name', metavar='<tenant-name>', required=True, help='New tenant name (must be unique)') @utils.arg('--description', metavar='<tenant-description>', default=None, help='Description of new tenant (default is none)') @utils.arg('--enabled', metavar='<true|false>', default=True, help='Initial tenant enabled status (default true)') def do_tenant_create(kc, args): """Create new tenant.""" tenant = kc.tenants.create(args.name, description=args.description, enabled=utils.string_to_bool(args.enabled)) utils.print_dict(tenant._info) @utils.arg('--name', metavar='<tenant_name>', help='Desired new name of tenant') @utils.arg('--description', metavar='<tenant-description>', default=None, help='Desired new description of tenant') @utils.arg('--enabled', metavar='<true|false>', help='Enable or disable tenant') @utils.arg('tenant', metavar='<tenant>', help='Name or ID of tenant to update') def do_tenant_update(kc, args): """Update tenant name, description, enabled status.""" tenant = utils.find_resource(kc.tenants, args.tenant) kwargs = {} if args.name: kwargs.update({'name': args.name}) if args.description is not None: kwargs.update({'description': args.description}) if args.enabled: kwargs.update({'enabled': utils.string_to_bool(args.enabled)}) if kwargs == {}: print("Tenant not updated, no arguments present.") return tenant.update(**kwargs) @utils.arg('tenant', metavar='<tenant>', help='Name or ID of tenant to delete') def do_tenant_delete(kc, args): """Delete tenant.""" tenant = utils.find_resource(kc.tenants, args.tenant) kc.tenants.delete(tenant) @utils.arg('--name', metavar='<name>', required=True, help='Name of new service (must be unique)') @utils.arg('--type', metavar='<type>', required=True, help='Service type (one of: identity, compute, network, ' 'image, or object-store)') @utils.arg('--description', metavar='<service-description>', help='Description of service') def do_service_create(kc, args): """Add service to Service Catalog.""" service = kc.services.create(args.name, args.type, args.description) utils.print_dict(service._info) def do_service_list(kc, args): """List all services in Service Catalog.""" services = kc.services.list() utils.print_list(services, ['id', 'name', 'type', 'description'], order_by='name') @utils.arg('service', metavar='<service>', help='Name or ID of service to display') def do_service_get(kc, args): """Display service from Service Catalog.""" service = utils.find_resource(kc.services, args.service) utils.print_dict(service._info) @utils.arg('service', metavar='<service>', help='Name or ID of service to delete') def do_service_delete(kc, args): """Delete service from Service Catalog.""" service = utils.find_resource(kc.services, args.service) kc.services.delete(service.id) def do_role_list(kc, args): """List all roles.""" roles = kc.roles.list() utils.print_list(roles, ['id', 'name'], order_by='name') @utils.arg('role', metavar='<role>', help='Name or ID of role to display') def do_role_get(kc, args): """Display role details.""" role = utils.find_resource(kc.roles, args.role) utils.print_dict(role._info) @utils.arg('--name', metavar='<role-name>', required=True, help='Name of new role') def do_role_create(kc, args): """Create new role.""" role = kc.roles.create(args.name) utils.print_dict(role._info) @utils.arg('role', metavar='<role>', help='Name or ID of role to delete') def do_role_delete(kc, args): """Delete role.""" role = utils.find_resource(kc.roles, args.role) kc.roles.delete(role) @utils.arg('--user', '--user-id', '--user_id', metavar='<user>', required=True, help='Name or ID of user') @utils.arg('--role', '--role-id', '--role_id', metavar='<role>', required=True, help='Name or ID of role') @utils.arg('--tenant', '--tenant-id', metavar='<tenant>', help='Name or ID of tenant') @utils.arg('--tenant_id', help=argparse.SUPPRESS) def do_user_role_add(kc, args): """Add role to user""" user = utils.find_resource(kc.users, args.user) role = utils.find_resource(kc.roles, args.role) if args.tenant: tenant = utils.find_resource(kc.tenants, args.tenant) elif args.tenant_id: tenant = args.tenant_id else: tenant = None kc.roles.add_user_role(user, role, tenant) @utils.arg('--user', '--user-id', '--user_id', metavar='<user>', required=True, help='Name or ID of user') @utils.arg('--role', '--role-id', '--role_id', metavar='<role>', required=True, help='Name or ID of role') @utils.arg('--tenant', '--tenant-id', metavar='<tenant>', help='Name or ID of tenant') @utils.arg('--tenant_id', help=argparse.SUPPRESS) def do_user_role_remove(kc, args): """Remove role from user""" user = utils.find_resource(kc.users, args.user) role = utils.find_resource(kc.roles, args.role) if args.tenant: tenant = utils.find_resource(kc.tenants, args.tenant) elif args.tenant_id: tenant = args.tenant_id else: tenant = None kc.roles.remove_user_role(user, role, tenant) @utils.arg('--user', '--user-id', metavar='<user>', help='List roles granted to a user') @utils.arg('--user_id', help=argparse.SUPPRESS) @utils.arg('--tenant', '--tenant-id', metavar='<tenant>', help='List roles granted on a tenant') @utils.arg('--tenant_id', help=argparse.SUPPRESS) def do_user_role_list(kc, args): """List roles granted to a user""" if args.tenant: tenant_id = utils.find_resource(kc.tenants, args.tenant).id elif args.tenant_id: tenant_id = args.tenant_id else: # use the authenticated tenant id as a default tenant_id = kc.auth_tenant_id if args.user: user_id = utils.find_resource(kc.users, args.user).id elif args.user_id: user_id = args.user_id else: # use the authenticated user id as a default user_id = kc.auth_user_id roles = kc.roles.roles_for_user(user=user_id, tenant=tenant_id) # this makes the command output a bit more intuitive for role in roles: role.user_id = user_id role.tenant_id = tenant_id utils.print_list(roles, ['id', 'name', 'user_id', 'tenant_id'], order_by='name') @utils.arg('--user-id', metavar='<user-id>', help='User ID') @utils.arg('--user_id', help=argparse.SUPPRESS) @utils.arg('--tenant-id', metavar='<tenant-id>', help='Tenant ID') @utils.arg('--tenant_id', help=argparse.SUPPRESS) def do_ec2_credentials_create(kc, args): """Create EC2-compatible credentials for user per tenant.""" if not args.tenant_id: # use the authenticated tenant id as a default args.tenant_id = kc.auth_tenant_id if not args.user_id: # use the authenticated user id as a default args.user_id = kc.auth_user_id credentials = kc.ec2.create(args.user_id, args.tenant_id) utils.print_dict(credentials._info) @utils.arg('--user-id', metavar='<user-id>', help='User ID') @utils.arg('--user_id', help=argparse.SUPPRESS) @utils.arg('--access', metavar='<access-key>', required=True, help='Access Key') def do_ec2_credentials_get(kc, args): """Display EC2-compatible credentials.""" if not args.user_id: # use the authenticated user id as a default args.user_id = kc.auth_user_id cred = kc.ec2.get(args.user_id, args.access) if cred: utils.print_dict(cred._info) @utils.arg('--user-id', metavar='<user-id>', help='User ID') @utils.arg('--user_id', help=argparse.SUPPRESS) def do_ec2_credentials_list(kc, args): """List EC2-compatible credentials for a user""" if not args.user_id: # use the authenticated user id as a default args.user_id = kc.auth_user_id credentials = kc.ec2.list(args.user_id) for cred in credentials: try: cred.tenant = getattr(kc.tenants.get(cred.tenant_id), 'name') except Exception: # FIXME(dtroyer): Retrieving the tenant name fails for normal # users; stuff in the tenant_id instead. cred.tenant = cred.tenant_id utils.print_list(credentials, ['tenant', 'access', 'secret']) @utils.arg('--user-id', metavar='<user-id>', help='User ID') @utils.arg('--user_id', help=argparse.SUPPRESS) @utils.arg('--access', metavar='<access-key>', required=True, help='Access Key') def do_ec2_credentials_delete(kc, args): """Delete EC2-compatible credentials.""" if not args.user_id: # use the authenticated user id as a default args.user_id = kc.auth_user_id try: kc.ec2.delete(args.user_id, args.access) print('Credential has been deleted.') except Exception as e: print('Unable to delete credential: %s' % e) @utils.arg('--service', metavar='<service-type>', default=None, help='Service type to return') @require_service_catalog def do_catalog(kc, args): """List service catalog, possibly filtered by service.""" endpoints = kc.service_catalog.get_endpoints(service_type=args.service) for (service, service_endpoints) in six.iteritems(endpoints): if len(service_endpoints) > 0: print("Service: %s" % service) for ep in service_endpoints: utils.print_dict(ep) @utils.arg('--service', metavar='<service-type>', required=True, help='Service type to select') @utils.arg('--endpoint-type', metavar='<endpoint-type>', default='publicURL', help='Endpoint type to select') @utils.arg('--endpoint_type', default='publicURL', help=argparse.SUPPRESS) @utils.arg('--attr', metavar='<service-attribute>', help='Service attribute to match for selection') @utils.arg('--value', metavar='<value>', help='Value of attribute to match') @require_service_catalog def do_endpoint_get(kc, args): """Find endpoint filtered by a specific attribute or service type.""" kwargs = { 'service_type': args.service, 'endpoint_type': args.endpoint_type, } if args.attr and args.value: kwargs.update({'attr': args.attr, 'filter_value': args.value}) elif args.attr or args.value: print('Both --attr and --value required.') return url = kc.service_catalog.url_for(**kwargs) utils.print_dict({'%s.%s' % (args.service, args.endpoint_type): url}) def do_endpoint_list(kc, args): """List configured service endpoints.""" endpoints = kc.endpoints.list() utils.print_list(endpoints, ['id', 'region', 'publicurl', 'internalurl', 'adminurl', 'service_id']) @utils.arg('--region', metavar='<endpoint-region>', help='Endpoint region', default='regionOne') @utils.arg('--service', '--service-id', '--service_id', metavar='<service>', required=True, help='Name or ID of service associated with Endpoint') @utils.arg('--publicurl', metavar='<public-url>', help='Public URL endpoint') @utils.arg('--adminurl', metavar='<admin-url>', help='Admin URL endpoint') @utils.arg('--internalurl', metavar='<internal-url>', help='Internal URL endpoint') def do_endpoint_create(kc, args): """Create a new endpoint associated with a service.""" service_id = utils.find_resource(kc.services, args.service).id endpoint = kc.endpoints.create(args.region, service_id, args.publicurl, args.adminurl, args.internalurl) utils.print_dict(endpoint._info) @utils.arg('id', metavar='<endpoint-id>', help='ID of endpoint to delete') def do_endpoint_delete(kc, args): """Delete a service endpoint.""" try: kc.endpoints.delete(args.id) print('Endpoint has been deleted.') except Exception: print('Unable to delete endpoint.') @utils.arg('--wrap', metavar='<integer>', default=0, help='wrap PKI tokens to a specified length, or 0 to disable') @require_service_catalog def do_token_get(kc, args): """Display the current user token.""" utils.print_dict(kc.service_catalog.get_token(), wrap=int(args.wrap))
apache-2.0
quantumlib/OpenFermion
src/openfermion/circuits/trotter/hubbard_trotter_error_test.py
1
10858
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for hubbard_trotter_error.py.""" import unittest from openfermion.ops.operators import FermionOperator from openfermion.hamiltonians import fermi_hubbard from openfermion.circuits.trotter.hubbard_trotter_error import ( simulation_ordered_grouped_hubbard_terms_with_info) from openfermion.circuits.trotter.low_depth_trotter_error import ( low_depth_second_order_trotter_error_bound, low_depth_second_order_trotter_error_operator) from openfermion.transforms.opconversions import normal_ordered class ErrorOperatorTest(unittest.TestCase): def test_error_operator(self): FO = FermionOperator terms = [] for i in range(4): terms.append(FO(((i, 1), ((i + 1) % 4, 0)), -0.0123370055014)) terms.append(FO(((i, 1), ((i - 1) % 4, 0)), -0.0123370055014)) if i in [0, 2]: terms.append( normal_ordered( FO(((i + 1, 1), (i, 1), (i + 1, 0), (i, 0)), 3.18309886184))) if i < 2: terms.append( normal_ordered( FO(((i, 1), ((i + 2) % 4, 1), (i, 0), ((i + 2) % 4, 0)), 22.2816920329))) self.assertAlmostEqual( low_depth_second_order_trotter_error_operator(terms).terms[((3, 1), (2, 1), (1, 1), (2, 0), (1, 0), (0, 0))], 0.75) class ErrorBoundTest(unittest.TestCase): def test_error_bound_superset_hubbard(self): FO = FermionOperator terms = [] for i in [0, 2]: terms.append( FO(((i, 1), (i + 1, 0)), -0.01) + FO(((i + 1, 1), (i, 0)), -0.01)) for i in [0, 1]: terms.append( FO(((i, 1), (i + 2, 0)), -0.03) + FO(((i + 2, 1), (i, 0)), -0.03)) terms.append(FO(((i + 2, 1), (i, 1), (i + 2, 0), (i, 0)), 3.)) indices = [ set([0, 1]), set([2, 3]), set([0, 2]), set([0, 2]), set([1, 3]), set([1, 3]) ] is_hopping_operator = [True, True, True, False, True, False] self.assertAlmostEqual( low_depth_second_order_trotter_error_bound(terms, indices, is_hopping_operator), 0.0608) def test_error_bound_using_info_even_side_length(self): # Generate the Hamiltonian. hamiltonian = normal_ordered( fermi_hubbard(4, 4, 0.5, 0.2, periodic=False)) hamiltonian.compress() # Unpack result into terms, indices they act on, and whether they're # hopping operators. result = simulation_ordered_grouped_hubbard_terms_with_info(hamiltonian) terms, indices, is_hopping = result self.assertAlmostEqual( low_depth_second_order_trotter_error_bound(terms, indices, is_hopping), 13.59) def test_error_bound_using_info_odd_side_length_verbose(self): # Generate the Hamiltonian. hamiltonian = normal_ordered( fermi_hubbard(5, 5, -0.5, 0.3, periodic=False)) hamiltonian.compress() # Unpack result into terms, indices they act on, and whether they're # hopping operators. result = simulation_ordered_grouped_hubbard_terms_with_info(hamiltonian) terms, indices, is_hopping = result self.assertAlmostEqual( 32.025, low_depth_second_order_trotter_error_bound(terms, indices, is_hopping, verbose=True)) class OrderedHubbardTermsMoreInfoTest(unittest.TestCase): def test_sum_of_ordered_terms_equals_full_side_length_2_hopping_only(self): hamiltonian = normal_ordered( fermi_hubbard(2, 2, 1., 0.0, periodic=False)) hamiltonian.compress() # Unpack result into terms, indices they act on, and whether they're # hopping operators. result = simulation_ordered_grouped_hubbard_terms_with_info(hamiltonian) terms, _, _ = result terms_total = sum(terms, FermionOperator.zero()) self.assertTrue(terms_total == hamiltonian) def test_sum_of_ordered_terms_equals_full_hamiltonian_even_side_len(self): hamiltonian = normal_ordered( fermi_hubbard(4, 4, 10.0, 0.3, periodic=False)) hamiltonian.compress() terms = simulation_ordered_grouped_hubbard_terms_with_info( hamiltonian)[0] terms_total = sum(terms, FermionOperator.zero()) self.assertTrue(terms_total == hamiltonian) def test_sum_of_ordered_terms_equals_full_hamiltonian_odd_side_len(self): hamiltonian = normal_ordered( fermi_hubbard(5, 5, 1.0, -0.3, periodic=False)) hamiltonian.compress() terms = simulation_ordered_grouped_hubbard_terms_with_info( hamiltonian)[0] terms_total = sum(terms, FermionOperator.zero()) self.assertTrue(terms_total == hamiltonian) def test_correct_indices_terms_with_info(self): hamiltonian = normal_ordered( fermi_hubbard(5, 5, 1., -1., periodic=False)) hamiltonian.compress() # Unpack result into terms, indices they act on, and whether they're # hopping operators. result = simulation_ordered_grouped_hubbard_terms_with_info(hamiltonian) terms, indices, _ = result for i in range(len(terms)): term = list(terms[i].terms) term_indices = set() for single_term in term: term_indices = term_indices.union( [single_term[j][0] for j in range(len(single_term))]) self.assertEqual(term_indices, indices[i]) def test_is_hopping_operator_terms_with_info(self): hamiltonian = normal_ordered( fermi_hubbard(5, 5, 1., -1., periodic=False)) hamiltonian.compress() # Unpack result into terms, indices they act on, and whether they're # hopping operators. result = simulation_ordered_grouped_hubbard_terms_with_info(hamiltonian) terms, _, is_hopping = result for i in range(len(terms)): single_term = list(terms[i].terms)[0] is_hopping_term = not (single_term[1][1] or single_term[0][0] == single_term[1][0]) self.assertEqual(is_hopping_term, is_hopping[i]) def test_total_length_side_length_2_hopping_only(self): hamiltonian = normal_ordered( fermi_hubbard(2, 2, 1., 0.0, periodic=False)) hamiltonian.compress() # Unpack result into terms, indices they act on, and whether they're # hopping operators. result = simulation_ordered_grouped_hubbard_terms_with_info(hamiltonian) terms, _, _ = result self.assertEqual(len(terms), 8) def test_total_length_odd_side_length_hopping_only(self): hamiltonian = normal_ordered( fermi_hubbard(3, 3, 1., 0.0, periodic=False)) hamiltonian.compress() # Unpack result into terms, indices they act on, and whether they're # hopping operators. result = simulation_ordered_grouped_hubbard_terms_with_info(hamiltonian) terms, _, _ = result self.assertEqual(len(terms), 24) def test_total_length_even_side_length_hopping_only(self): hamiltonian = normal_ordered( fermi_hubbard(4, 4, 1., 0.0, periodic=False)) hamiltonian.compress() # Unpack result into terms, indices they act on, and whether they're # hopping operators. result = simulation_ordered_grouped_hubbard_terms_with_info(hamiltonian) terms, _, _ = result self.assertEqual(len(terms), 48) def test_total_length_side_length_2_onsite_only(self): hamiltonian = normal_ordered( fermi_hubbard(2, 2, 0.0, 1., periodic=False)) hamiltonian.compress() # Unpack result into terms, indices they act on, and whether they're # hopping operators. result = simulation_ordered_grouped_hubbard_terms_with_info(hamiltonian) terms, _, _ = result self.assertEqual(len(terms), 4) def test_total_length_odd_side_length_onsite_only(self): hamiltonian = normal_ordered( fermi_hubbard(3, 3, 0.0, 1., periodic=False)) hamiltonian.compress() # Unpack result into terms, indices they act on, and whether they're # hopping operators. result = simulation_ordered_grouped_hubbard_terms_with_info(hamiltonian) terms, _, _ = result self.assertEqual(len(terms), 9) def test_total_length_even_side_length_onsite_only(self): hamiltonian = normal_ordered( fermi_hubbard(4, 4, 0., -0.3, periodic=False)) hamiltonian.compress() # Unpack result into terms, indices they act on, and whether they're # hopping operators. result = simulation_ordered_grouped_hubbard_terms_with_info(hamiltonian) terms, _, _ = result self.assertEqual(len(terms), 16) def test_total_length_odd_side_length_full_hubbard(self): hamiltonian = normal_ordered( fermi_hubbard(5, 5, -1., -0.3, periodic=False)) hamiltonian.compress() # Unpack result into terms, indices they act on, and whether they're # hopping operators. result = simulation_ordered_grouped_hubbard_terms_with_info(hamiltonian) terms, _, _ = result self.assertEqual(len(terms), 105)
apache-2.0
Infinidat/pyvmomi
tests/test_container_view.py
12
1730
# VMware vSphere Python SDK # Copyright (c) 2008-2015 VMware, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import tests import vcr from pyVim import connect from pyVmomi import vim class ContainerViewTests(tests.VCRTestBase): @vcr.use_cassette('basic_container_view.yaml', cassette_library_dir=tests.fixtures_path, record_mode='once') def test_basic_container_view(self): # see: http://python3porting.com/noconv.html si = connect.SmartConnect(host='vcsa', user='my_user', pwd='my_password') content = si.RetrieveContent() datacenter_object_view = content.viewManager.CreateContainerView( content.rootFolder, [vim.Datacenter], True) for datacenter in datacenter_object_view.view: datastores = datacenter.datastore # NOTE (hartsocks): the object handle here is a managed object # reference, until we ask for more details, no other detail is # transmitted. Our sample fixture is quite small. self.assertEqual(1, len(datastores)) datacenter_object_view.Destroy()
apache-2.0
gnychis/grforwarder
gnuradio-core/src/python/gnuradio/gr/qa_wavefile.py
10
1860
#!/usr/bin/env python # # Copyright 2008,2010 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # from gnuradio import gr, gr_unittest import os from os.path import getsize g_in_file = os.path.join (os.getenv ("srcdir"), "test_16bit_1chunk.wav") class test_wavefile(gr_unittest.TestCase): def setUp (self): self.tb = gr.top_block () def tearDown (self): self.tb = None def test_001_checkwavread (self): wf = gr.wavfile_source(g_in_file) self.assertEqual(wf.sample_rate(), 8000) def test_002_checkwavcopy (self): infile = g_in_file outfile = "test_out.wav" wf_in = gr.wavfile_source(infile) wf_out = gr.wavfile_sink(outfile, wf_in.channels(), wf_in.sample_rate(), wf_in.bits_per_sample()) self.tb.connect(wf_in, wf_out) self.tb.run() wf_out.close() self.assertEqual(getsize(infile), getsize(outfile)) in_f = file(infile, 'rb') out_f = file(outfile, 'rb') in_data = in_f.read() out_data = out_f.read() out_f.close() os.remove(outfile) self.assertEqual(in_data, out_data) if __name__ == '__main__': gr_unittest.run(test_wavefile, "test_wavefile.xml")
gpl-3.0
olapaola/olapaola-android-scripting
python/src/Lib/shelve.py
59
7866
"""Manage shelves of pickled objects. A "shelf" is a persistent, dictionary-like object. The difference with dbm databases is that the values (not the keys!) in a shelf can be essentially arbitrary Python objects -- anything that the "pickle" module can handle. This includes most class instances, recursive data types, and objects containing lots of shared sub-objects. The keys are ordinary strings. To summarize the interface (key is a string, data is an arbitrary object): import shelve d = shelve.open(filename) # open, with (g)dbm filename -- no suffix d[key] = data # store data at key (overwrites old data if # using an existing key) data = d[key] # retrieve a COPY of the data at key (raise # KeyError if no such key) -- NOTE that this # access returns a *copy* of the entry! del d[key] # delete data stored at key (raises KeyError # if no such key) flag = d.has_key(key) # true if the key exists; same as "key in d" list = d.keys() # a list of all existing keys (slow!) d.close() # close it Dependent on the implementation, closing a persistent dictionary may or may not be necessary to flush changes to disk. Normally, d[key] returns a COPY of the entry. This needs care when mutable entries are mutated: for example, if d[key] is a list, d[key].append(anitem) does NOT modify the entry d[key] itself, as stored in the persistent mapping -- it only modifies the copy, which is then immediately discarded, so that the append has NO effect whatsoever. To append an item to d[key] in a way that will affect the persistent mapping, use: data = d[key] data.append(anitem) d[key] = data To avoid the problem with mutable entries, you may pass the keyword argument writeback=True in the call to shelve.open. When you use: d = shelve.open(filename, writeback=True) then d keeps a cache of all entries you access, and writes them all back to the persistent mapping when you call d.close(). This ensures that such usage as d[key].append(anitem) works as intended. However, using keyword argument writeback=True may consume vast amount of memory for the cache, and it may make d.close() very slow, if you access many of d's entries after opening it in this way: d has no way to check which of the entries you access are mutable and/or which ones you actually mutate, so it must cache, and write back at close, all of the entries that you access. You can call d.sync() to write back all the entries in the cache, and empty the cache (d.sync() also synchronizes the persistent dictionary on disk, if feasible). """ # Try using cPickle and cStringIO if available. try: from cPickle import Pickler, Unpickler except ImportError: from pickle import Pickler, Unpickler try: from cStringIO import StringIO except ImportError: from StringIO import StringIO import UserDict __all__ = ["Shelf","BsdDbShelf","DbfilenameShelf","open"] class _ClosedDict(UserDict.DictMixin): 'Marker for a closed dict. Access attempts raise a ValueError.' def closed(self, *args): raise ValueError('invalid operation on closed shelf') __getitem__ = __setitem__ = __delitem__ = keys = closed def __repr__(self): return '<Closed Dictionary>' class Shelf(UserDict.DictMixin): """Base class for shelf implementations. This is initialized with a dictionary-like object. See the module's __doc__ string for an overview of the interface. """ def __init__(self, dict, protocol=None, writeback=False): self.dict = dict if protocol is None: protocol = 0 self._protocol = protocol self.writeback = writeback self.cache = {} def keys(self): return self.dict.keys() def __len__(self): return len(self.dict) def has_key(self, key): return key in self.dict def __contains__(self, key): return key in self.dict def get(self, key, default=None): if key in self.dict: return self[key] return default def __getitem__(self, key): try: value = self.cache[key] except KeyError: f = StringIO(self.dict[key]) value = Unpickler(f).load() if self.writeback: self.cache[key] = value return value def __setitem__(self, key, value): if self.writeback: self.cache[key] = value f = StringIO() p = Pickler(f, self._protocol) p.dump(value) self.dict[key] = f.getvalue() def __delitem__(self, key): del self.dict[key] try: del self.cache[key] except KeyError: pass def close(self): self.sync() try: self.dict.close() except AttributeError: pass self.dict = _ClosedDict() def __del__(self): if not hasattr(self, 'writeback'): # __init__ didn't succeed, so don't bother closing return self.close() def sync(self): if self.writeback and self.cache: self.writeback = False for key, entry in self.cache.iteritems(): self[key] = entry self.writeback = True self.cache = {} if hasattr(self.dict, 'sync'): self.dict.sync() class BsdDbShelf(Shelf): """Shelf implementation using the "BSD" db interface. This adds methods first(), next(), previous(), last() and set_location() that have no counterpart in [g]dbm databases. The actual database must be opened using one of the "bsddb" modules "open" routines (i.e. bsddb.hashopen, bsddb.btopen or bsddb.rnopen) and passed to the constructor. See the module's __doc__ string for an overview of the interface. """ def __init__(self, dict, protocol=None, writeback=False): Shelf.__init__(self, dict, protocol, writeback) def set_location(self, key): (key, value) = self.dict.set_location(key) f = StringIO(value) return (key, Unpickler(f).load()) def next(self): (key, value) = self.dict.next() f = StringIO(value) return (key, Unpickler(f).load()) def previous(self): (key, value) = self.dict.previous() f = StringIO(value) return (key, Unpickler(f).load()) def first(self): (key, value) = self.dict.first() f = StringIO(value) return (key, Unpickler(f).load()) def last(self): (key, value) = self.dict.last() f = StringIO(value) return (key, Unpickler(f).load()) class DbfilenameShelf(Shelf): """Shelf implementation using the "anydbm" generic dbm interface. This is initialized with the filename for the dbm database. See the module's __doc__ string for an overview of the interface. """ def __init__(self, filename, flag='c', protocol=None, writeback=False): import anydbm Shelf.__init__(self, anydbm.open(filename, flag), protocol, writeback) def open(filename, flag='c', protocol=None, writeback=False): """Open a persistent dictionary for reading and writing. The filename parameter is the base filename for the underlying database. As a side-effect, an extension may be added to the filename and more than one file may be created. The optional flag parameter has the same interpretation as the flag parameter of anydbm.open(). The optional protocol parameter specifies the version of the pickle protocol (0, 1, or 2). See the module's __doc__ string for an overview of the interface. """ return DbfilenameShelf(filename, flag, protocol, writeback)
apache-2.0
normanjaeckel/OpenSlides
server/openslides/core/signals.py
7
4449
import sys from collections import defaultdict from typing import Dict, List from django.apps import apps from django.contrib.auth.models import Permission from django.contrib.contenttypes.models import ContentType from django.db.models import Q from django.dispatch import Signal from ..utils import logging from ..utils.autoupdate import AutoupdateElement, inform_elements # This signal is send when the migrate command is done. That means it is sent # after post_migrate sending and creating all Permission objects. Don't use it # for other things than dealing with Permission objects. post_permission_creation = Signal() # This signal is sent if a permission is changed (e. g. a group gets a new # permission). Connected receivers may yield Collections. permission_change = Signal() def delete_django_app_permissions(sender, **kwargs): """ Deletes the permissions, Django creates by default. Only required for auth, contenttypes and sessions. """ contenttypes = ContentType.objects.filter( Q(app_label="auth") | Q(app_label="contenttypes") | Q(app_label="sessions") ) Permission.objects.filter(content_type__in=contenttypes).delete() def cleanup_unused_permissions(sender, **kwargs): """ Deletes all permissions, that are not defined in any model meta class """ # Maps the content type id to codenames of perms for this content type. content_type_codename_mapping: Dict[int, List[str]] = defaultdict(list) # Maps content type ids to the content type. content_type_id_mapping = {} # Collect all perms from all apps. for model in apps.get_models(): content_type = ContentType.objects.get_for_model( model, for_concrete_model=False ) content_type_id_mapping[content_type.id] = content_type for perm in model._meta.permissions: content_type_codename_mapping[content_type.id].append(perm[0]) # Cleanup perms per content type. logger = logging.getLogger("openslides.core.migrations") for content_type_id, codenames in content_type_codename_mapping.items(): app_label = content_type_id_mapping[content_type_id].app_label unused_perms = Permission.objects.filter( content_type__pk=content_type_id ).exclude(codename__in=codenames) if unused_perms.exists(): verbose_permissions = ", ".join( [f"{app_label}.{perm.codename}" for perm in unused_perms.all()] ) logger.info(f"cleaning unused permissions: {verbose_permissions}") unused_perms.delete() def get_permission_change_data(sender, permissions, **kwargs): """ Yields all necessary Cachables if the respective permissions change. """ core_app = apps.get_app_config(app_label="core") for permission in permissions: if permission.content_type.app_label == core_app.label: if permission.codename == "can_see_projector": yield core_app.get_model("Projector") elif permission.codename == "can_manage_projector": yield core_app.get_model("ProjectorMessage") yield core_app.get_model("Countdown") yield core_app.get_model("ProjectionDefault") def autoupdate_for_many_to_many_relations(sender, instance, **kwargs): """ Send autoupdate for many-to-many related objects if the other side is deleted. """ # Hotfix for #4501: Skip autoupdate for many-to-many related objects # during migrations. if "migrate" in sys.argv: return m2m_fields = ( field for field in instance._meta.get_fields(include_hidden=True) if field.many_to_many and field.auto_created ) for field in m2m_fields: queryset = getattr(instance, field.get_accessor_name()).all() elements = [] for related_instance in queryset: if hasattr(related_instance, "get_root_rest_element"): # The related instance is or has a root rest element. # So lets send it via autoupdate. root_rest_element = related_instance.get_root_rest_element() elements.append( AutoupdateElement( collection_string=root_rest_element.get_collection_string(), id=root_rest_element.pk, ) ) inform_elements(elements)
mit
PLopezD/boilerplate-node
node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
395
65937
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Unit tests for the MSVSSettings.py file.""" import StringIO import unittest import gyp.MSVSSettings as MSVSSettings class TestSequenceFunctions(unittest.TestCase): def setUp(self): self.stderr = StringIO.StringIO() def _ExpectedWarnings(self, expected): """Compares recorded lines to expected warnings.""" self.stderr.seek(0) actual = self.stderr.read().split('\n') actual = [line for line in actual if line] self.assertEqual(sorted(expected), sorted(actual)) def testValidateMSVSSettings_tool_names(self): """Tests that only MSVS tool names are allowed.""" MSVSSettings.ValidateMSVSSettings( {'VCCLCompilerTool': {}, 'VCLinkerTool': {}, 'VCMIDLTool': {}, 'foo': {}, 'VCResourceCompilerTool': {}, 'VCLibrarianTool': {}, 'VCManifestTool': {}, 'ClCompile': {}}, self.stderr) self._ExpectedWarnings([ 'Warning: unrecognized tool foo', 'Warning: unrecognized tool ClCompile']) def testValidateMSVSSettings_settings(self): """Tests that for invalid MSVS settings.""" MSVSSettings.ValidateMSVSSettings( {'VCCLCompilerTool': { 'AdditionalIncludeDirectories': 'folder1;folder2', 'AdditionalOptions': ['string1', 'string2'], 'AdditionalUsingDirectories': 'folder1;folder2', 'AssemblerListingLocation': 'a_file_name', 'AssemblerOutput': '0', 'BasicRuntimeChecks': '5', 'BrowseInformation': 'fdkslj', 'BrowseInformationFile': 'a_file_name', 'BufferSecurityCheck': 'true', 'CallingConvention': '-1', 'CompileAs': '1', 'DebugInformationFormat': '2', 'DefaultCharIsUnsigned': 'true', 'Detect64BitPortabilityProblems': 'true', 'DisableLanguageExtensions': 'true', 'DisableSpecificWarnings': 'string1;string2', 'EnableEnhancedInstructionSet': '1', 'EnableFiberSafeOptimizations': 'true', 'EnableFunctionLevelLinking': 'true', 'EnableIntrinsicFunctions': 'true', 'EnablePREfast': 'true', 'Enableprefast': 'bogus', 'ErrorReporting': '1', 'ExceptionHandling': '1', 'ExpandAttributedSource': 'true', 'FavorSizeOrSpeed': '1', 'FloatingPointExceptions': 'true', 'FloatingPointModel': '1', 'ForceConformanceInForLoopScope': 'true', 'ForcedIncludeFiles': 'file1;file2', 'ForcedUsingFiles': 'file1;file2', 'GeneratePreprocessedFile': '1', 'GenerateXMLDocumentationFiles': 'true', 'IgnoreStandardIncludePath': 'true', 'InlineFunctionExpansion': '1', 'KeepComments': 'true', 'MinimalRebuild': 'true', 'ObjectFile': 'a_file_name', 'OmitDefaultLibName': 'true', 'OmitFramePointers': 'true', 'OpenMP': 'true', 'Optimization': '1', 'PrecompiledHeaderFile': 'a_file_name', 'PrecompiledHeaderThrough': 'a_file_name', 'PreprocessorDefinitions': 'string1;string2', 'ProgramDataBaseFileName': 'a_file_name', 'RuntimeLibrary': '1', 'RuntimeTypeInfo': 'true', 'ShowIncludes': 'true', 'SmallerTypeCheck': 'true', 'StringPooling': 'true', 'StructMemberAlignment': '1', 'SuppressStartupBanner': 'true', 'TreatWChar_tAsBuiltInType': 'true', 'UndefineAllPreprocessorDefinitions': 'true', 'UndefinePreprocessorDefinitions': 'string1;string2', 'UseFullPaths': 'true', 'UsePrecompiledHeader': '1', 'UseUnicodeResponseFiles': 'true', 'WarnAsError': 'true', 'WarningLevel': '1', 'WholeProgramOptimization': 'true', 'XMLDocumentationFileName': 'a_file_name', 'ZZXYZ': 'bogus'}, 'VCLinkerTool': { 'AdditionalDependencies': 'file1;file2', 'AdditionalDependencies_excluded': 'file3', 'AdditionalLibraryDirectories': 'folder1;folder2', 'AdditionalManifestDependencies': 'file1;file2', 'AdditionalOptions': 'a string1', 'AddModuleNamesToAssembly': 'file1;file2', 'AllowIsolation': 'true', 'AssemblyDebug': '2', 'AssemblyLinkResource': 'file1;file2', 'BaseAddress': 'a string1', 'CLRImageType': '2', 'CLRThreadAttribute': '2', 'CLRUnmanagedCodeCheck': 'true', 'DataExecutionPrevention': '2', 'DelayLoadDLLs': 'file1;file2', 'DelaySign': 'true', 'Driver': '2', 'EmbedManagedResourceFile': 'file1;file2', 'EnableCOMDATFolding': '2', 'EnableUAC': 'true', 'EntryPointSymbol': 'a string1', 'ErrorReporting': '2', 'FixedBaseAddress': '2', 'ForceSymbolReferences': 'file1;file2', 'FunctionOrder': 'a_file_name', 'GenerateDebugInformation': 'true', 'GenerateManifest': 'true', 'GenerateMapFile': 'true', 'HeapCommitSize': 'a string1', 'HeapReserveSize': 'a string1', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreDefaultLibraryNames': 'file1;file2', 'IgnoreEmbeddedIDL': 'true', 'IgnoreImportLibrary': 'true', 'ImportLibrary': 'a_file_name', 'KeyContainer': 'a_file_name', 'KeyFile': 'a_file_name', 'LargeAddressAware': '2', 'LinkIncremental': '2', 'LinkLibraryDependencies': 'true', 'LinkTimeCodeGeneration': '2', 'ManifestFile': 'a_file_name', 'MapExports': 'true', 'MapFileName': 'a_file_name', 'MergedIDLBaseFileName': 'a_file_name', 'MergeSections': 'a string1', 'MidlCommandFile': 'a_file_name', 'ModuleDefinitionFile': 'a_file_name', 'OptimizeForWindows98': '1', 'OptimizeReferences': '2', 'OutputFile': 'a_file_name', 'PerUserRedirection': 'true', 'Profile': 'true', 'ProfileGuidedDatabase': 'a_file_name', 'ProgramDatabaseFile': 'a_file_name', 'RandomizedBaseAddress': '2', 'RegisterOutput': 'true', 'ResourceOnlyDLL': 'true', 'SetChecksum': 'true', 'ShowProgress': '2', 'StackCommitSize': 'a string1', 'StackReserveSize': 'a string1', 'StripPrivateSymbols': 'a_file_name', 'SubSystem': '2', 'SupportUnloadOfDelayLoadedDLL': 'true', 'SuppressStartupBanner': 'true', 'SwapRunFromCD': 'true', 'SwapRunFromNet': 'true', 'TargetMachine': '2', 'TerminalServerAware': '2', 'TurnOffAssemblyGeneration': 'true', 'TypeLibraryFile': 'a_file_name', 'TypeLibraryResourceID': '33', 'UACExecutionLevel': '2', 'UACUIAccess': 'true', 'UseLibraryDependencyInputs': 'true', 'UseUnicodeResponseFiles': 'true', 'Version': 'a string1'}, 'VCMIDLTool': { 'AdditionalIncludeDirectories': 'folder1;folder2', 'AdditionalOptions': 'a string1', 'CPreprocessOptions': 'a string1', 'DefaultCharType': '1', 'DLLDataFileName': 'a_file_name', 'EnableErrorChecks': '1', 'ErrorCheckAllocations': 'true', 'ErrorCheckBounds': 'true', 'ErrorCheckEnumRange': 'true', 'ErrorCheckRefPointers': 'true', 'ErrorCheckStubData': 'true', 'GenerateStublessProxies': 'true', 'GenerateTypeLibrary': 'true', 'HeaderFileName': 'a_file_name', 'IgnoreStandardIncludePath': 'true', 'InterfaceIdentifierFileName': 'a_file_name', 'MkTypLibCompatible': 'true', 'notgood': 'bogus', 'OutputDirectory': 'a string1', 'PreprocessorDefinitions': 'string1;string2', 'ProxyFileName': 'a_file_name', 'RedirectOutputAndErrors': 'a_file_name', 'StructMemberAlignment': '1', 'SuppressStartupBanner': 'true', 'TargetEnvironment': '1', 'TypeLibraryName': 'a_file_name', 'UndefinePreprocessorDefinitions': 'string1;string2', 'ValidateParameters': 'true', 'WarnAsError': 'true', 'WarningLevel': '1'}, 'VCResourceCompilerTool': { 'AdditionalOptions': 'a string1', 'AdditionalIncludeDirectories': 'folder1;folder2', 'Culture': '1003', 'IgnoreStandardIncludePath': 'true', 'notgood2': 'bogus', 'PreprocessorDefinitions': 'string1;string2', 'ResourceOutputFileName': 'a string1', 'ShowProgress': 'true', 'SuppressStartupBanner': 'true', 'UndefinePreprocessorDefinitions': 'string1;string2'}, 'VCLibrarianTool': { 'AdditionalDependencies': 'file1;file2', 'AdditionalLibraryDirectories': 'folder1;folder2', 'AdditionalOptions': 'a string1', 'ExportNamedFunctions': 'string1;string2', 'ForceSymbolReferences': 'a string1', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreSpecificDefaultLibraries': 'file1;file2', 'LinkLibraryDependencies': 'true', 'ModuleDefinitionFile': 'a_file_name', 'OutputFile': 'a_file_name', 'SuppressStartupBanner': 'true', 'UseUnicodeResponseFiles': 'true'}, 'VCManifestTool': { 'AdditionalManifestFiles': 'file1;file2', 'AdditionalOptions': 'a string1', 'AssemblyIdentity': 'a string1', 'ComponentFileName': 'a_file_name', 'DependencyInformationFile': 'a_file_name', 'GenerateCatalogFiles': 'true', 'InputResourceManifests': 'a string1', 'ManifestResourceFile': 'a_file_name', 'OutputManifestFile': 'a_file_name', 'RegistrarScriptFile': 'a_file_name', 'ReplacementsFile': 'a_file_name', 'SuppressStartupBanner': 'true', 'TypeLibraryFile': 'a_file_name', 'UpdateFileHashes': 'truel', 'UpdateFileHashesSearchPath': 'a_file_name', 'UseFAT32Workaround': 'true', 'UseUnicodeResponseFiles': 'true', 'VerboseOutput': 'true'}}, self.stderr) self._ExpectedWarnings([ 'Warning: for VCCLCompilerTool/BasicRuntimeChecks, ' 'index value (5) not in expected range [0, 4)', 'Warning: for VCCLCompilerTool/BrowseInformation, ' "invalid literal for int() with base 10: 'fdkslj'", 'Warning: for VCCLCompilerTool/CallingConvention, ' 'index value (-1) not in expected range [0, 4)', 'Warning: for VCCLCompilerTool/DebugInformationFormat, ' 'converted value for 2 not specified.', 'Warning: unrecognized setting VCCLCompilerTool/Enableprefast', 'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ', 'Warning: for VCLinkerTool/TargetMachine, ' 'converted value for 2 not specified.', 'Warning: unrecognized setting VCMIDLTool/notgood', 'Warning: unrecognized setting VCResourceCompilerTool/notgood2', 'Warning: for VCManifestTool/UpdateFileHashes, ' "expected bool; got 'truel'" '']) def testValidateMSBuildSettings_settings(self): """Tests that for invalid MSBuild settings.""" MSVSSettings.ValidateMSBuildSettings( {'ClCompile': { 'AdditionalIncludeDirectories': 'folder1;folder2', 'AdditionalOptions': ['string1', 'string2'], 'AdditionalUsingDirectories': 'folder1;folder2', 'AssemblerListingLocation': 'a_file_name', 'AssemblerOutput': 'NoListing', 'BasicRuntimeChecks': 'StackFrameRuntimeCheck', 'BrowseInformation': 'false', 'BrowseInformationFile': 'a_file_name', 'BufferSecurityCheck': 'true', 'BuildingInIDE': 'true', 'CallingConvention': 'Cdecl', 'CompileAs': 'CompileAsC', 'CompileAsManaged': 'Pure', 'CreateHotpatchableImage': 'true', 'DebugInformationFormat': 'ProgramDatabase', 'DisableLanguageExtensions': 'true', 'DisableSpecificWarnings': 'string1;string2', 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions', 'EnableFiberSafeOptimizations': 'true', 'EnablePREfast': 'true', 'Enableprefast': 'bogus', 'ErrorReporting': 'Prompt', 'ExceptionHandling': 'SyncCThrow', 'ExpandAttributedSource': 'true', 'FavorSizeOrSpeed': 'Neither', 'FloatingPointExceptions': 'true', 'FloatingPointModel': 'Precise', 'ForceConformanceInForLoopScope': 'true', 'ForcedIncludeFiles': 'file1;file2', 'ForcedUsingFiles': 'file1;file2', 'FunctionLevelLinking': 'false', 'GenerateXMLDocumentationFiles': 'true', 'IgnoreStandardIncludePath': 'true', 'InlineFunctionExpansion': 'OnlyExplicitInline', 'IntrinsicFunctions': 'false', 'MinimalRebuild': 'true', 'MultiProcessorCompilation': 'true', 'ObjectFileName': 'a_file_name', 'OmitDefaultLibName': 'true', 'OmitFramePointers': 'true', 'OpenMPSupport': 'true', 'Optimization': 'Disabled', 'PrecompiledHeader': 'NotUsing', 'PrecompiledHeaderFile': 'a_file_name', 'PrecompiledHeaderOutputFile': 'a_file_name', 'PreprocessKeepComments': 'true', 'PreprocessorDefinitions': 'string1;string2', 'PreprocessOutputPath': 'a string1', 'PreprocessSuppressLineNumbers': 'false', 'PreprocessToFile': 'false', 'ProcessorNumber': '33', 'ProgramDataBaseFileName': 'a_file_name', 'RuntimeLibrary': 'MultiThreaded', 'RuntimeTypeInfo': 'true', 'ShowIncludes': 'true', 'SmallerTypeCheck': 'true', 'StringPooling': 'true', 'StructMemberAlignment': '1Byte', 'SuppressStartupBanner': 'true', 'TrackerLogDirectory': 'a_folder', 'TreatSpecificWarningsAsErrors': 'string1;string2', 'TreatWarningAsError': 'true', 'TreatWChar_tAsBuiltInType': 'true', 'UndefineAllPreprocessorDefinitions': 'true', 'UndefinePreprocessorDefinitions': 'string1;string2', 'UseFullPaths': 'true', 'UseUnicodeForAssemblerListing': 'true', 'WarningLevel': 'TurnOffAllWarnings', 'WholeProgramOptimization': 'true', 'XMLDocumentationFileName': 'a_file_name', 'ZZXYZ': 'bogus'}, 'Link': { 'AdditionalDependencies': 'file1;file2', 'AdditionalLibraryDirectories': 'folder1;folder2', 'AdditionalManifestDependencies': 'file1;file2', 'AdditionalOptions': 'a string1', 'AddModuleNamesToAssembly': 'file1;file2', 'AllowIsolation': 'true', 'AssemblyDebug': '', 'AssemblyLinkResource': 'file1;file2', 'BaseAddress': 'a string1', 'BuildingInIDE': 'true', 'CLRImageType': 'ForceIJWImage', 'CLRSupportLastError': 'Enabled', 'CLRThreadAttribute': 'MTAThreadingAttribute', 'CLRUnmanagedCodeCheck': 'true', 'CreateHotPatchableImage': 'X86Image', 'DataExecutionPrevention': 'false', 'DelayLoadDLLs': 'file1;file2', 'DelaySign': 'true', 'Driver': 'NotSet', 'EmbedManagedResourceFile': 'file1;file2', 'EnableCOMDATFolding': 'false', 'EnableUAC': 'true', 'EntryPointSymbol': 'a string1', 'FixedBaseAddress': 'false', 'ForceFileOutput': 'Enabled', 'ForceSymbolReferences': 'file1;file2', 'FunctionOrder': 'a_file_name', 'GenerateDebugInformation': 'true', 'GenerateMapFile': 'true', 'HeapCommitSize': 'a string1', 'HeapReserveSize': 'a string1', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreEmbeddedIDL': 'true', 'IgnoreSpecificDefaultLibraries': 'a_file_list', 'ImageHasSafeExceptionHandlers': 'true', 'ImportLibrary': 'a_file_name', 'KeyContainer': 'a_file_name', 'KeyFile': 'a_file_name', 'LargeAddressAware': 'false', 'LinkDLL': 'true', 'LinkErrorReporting': 'SendErrorReport', 'LinkStatus': 'true', 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', 'ManifestFile': 'a_file_name', 'MapExports': 'true', 'MapFileName': 'a_file_name', 'MergedIDLBaseFileName': 'a_file_name', 'MergeSections': 'a string1', 'MidlCommandFile': 'a_file_name', 'MinimumRequiredVersion': 'a string1', 'ModuleDefinitionFile': 'a_file_name', 'MSDOSStubFileName': 'a_file_name', 'NoEntryPoint': 'true', 'OptimizeReferences': 'false', 'OutputFile': 'a_file_name', 'PerUserRedirection': 'true', 'PreventDllBinding': 'true', 'Profile': 'true', 'ProfileGuidedDatabase': 'a_file_name', 'ProgramDatabaseFile': 'a_file_name', 'RandomizedBaseAddress': 'false', 'RegisterOutput': 'true', 'SectionAlignment': '33', 'SetChecksum': 'true', 'ShowProgress': 'LinkVerboseREF', 'SpecifySectionAttributes': 'a string1', 'StackCommitSize': 'a string1', 'StackReserveSize': 'a string1', 'StripPrivateSymbols': 'a_file_name', 'SubSystem': 'Console', 'SupportNobindOfDelayLoadedDLL': 'true', 'SupportUnloadOfDelayLoadedDLL': 'true', 'SuppressStartupBanner': 'true', 'SwapRunFromCD': 'true', 'SwapRunFromNET': 'true', 'TargetMachine': 'MachineX86', 'TerminalServerAware': 'false', 'TrackerLogDirectory': 'a_folder', 'TreatLinkerWarningAsErrors': 'true', 'TurnOffAssemblyGeneration': 'true', 'TypeLibraryFile': 'a_file_name', 'TypeLibraryResourceID': '33', 'UACExecutionLevel': 'AsInvoker', 'UACUIAccess': 'true', 'Version': 'a string1'}, 'ResourceCompile': { 'AdditionalIncludeDirectories': 'folder1;folder2', 'AdditionalOptions': 'a string1', 'Culture': '0x236', 'IgnoreStandardIncludePath': 'true', 'NullTerminateStrings': 'true', 'PreprocessorDefinitions': 'string1;string2', 'ResourceOutputFileName': 'a string1', 'ShowProgress': 'true', 'SuppressStartupBanner': 'true', 'TrackerLogDirectory': 'a_folder', 'UndefinePreprocessorDefinitions': 'string1;string2'}, 'Midl': { 'AdditionalIncludeDirectories': 'folder1;folder2', 'AdditionalOptions': 'a string1', 'ApplicationConfigurationMode': 'true', 'ClientStubFile': 'a_file_name', 'CPreprocessOptions': 'a string1', 'DefaultCharType': 'Signed', 'DllDataFileName': 'a_file_name', 'EnableErrorChecks': 'EnableCustom', 'ErrorCheckAllocations': 'true', 'ErrorCheckBounds': 'true', 'ErrorCheckEnumRange': 'true', 'ErrorCheckRefPointers': 'true', 'ErrorCheckStubData': 'true', 'GenerateClientFiles': 'Stub', 'GenerateServerFiles': 'None', 'GenerateStublessProxies': 'true', 'GenerateTypeLibrary': 'true', 'HeaderFileName': 'a_file_name', 'IgnoreStandardIncludePath': 'true', 'InterfaceIdentifierFileName': 'a_file_name', 'LocaleID': '33', 'MkTypLibCompatible': 'true', 'OutputDirectory': 'a string1', 'PreprocessorDefinitions': 'string1;string2', 'ProxyFileName': 'a_file_name', 'RedirectOutputAndErrors': 'a_file_name', 'ServerStubFile': 'a_file_name', 'StructMemberAlignment': 'NotSet', 'SuppressCompilerWarnings': 'true', 'SuppressStartupBanner': 'true', 'TargetEnvironment': 'Itanium', 'TrackerLogDirectory': 'a_folder', 'TypeLibFormat': 'NewFormat', 'TypeLibraryName': 'a_file_name', 'UndefinePreprocessorDefinitions': 'string1;string2', 'ValidateAllParameters': 'true', 'WarnAsError': 'true', 'WarningLevel': '1'}, 'Lib': { 'AdditionalDependencies': 'file1;file2', 'AdditionalLibraryDirectories': 'folder1;folder2', 'AdditionalOptions': 'a string1', 'DisplayLibrary': 'a string1', 'ErrorReporting': 'PromptImmediately', 'ExportNamedFunctions': 'string1;string2', 'ForceSymbolReferences': 'a string1', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreSpecificDefaultLibraries': 'file1;file2', 'LinkTimeCodeGeneration': 'true', 'MinimumRequiredVersion': 'a string1', 'ModuleDefinitionFile': 'a_file_name', 'Name': 'a_file_name', 'OutputFile': 'a_file_name', 'RemoveObjects': 'file1;file2', 'SubSystem': 'Console', 'SuppressStartupBanner': 'true', 'TargetMachine': 'MachineX86i', 'TrackerLogDirectory': 'a_folder', 'TreatLibWarningAsErrors': 'true', 'UseUnicodeResponseFiles': 'true', 'Verbose': 'true'}, 'Manifest': { 'AdditionalManifestFiles': 'file1;file2', 'AdditionalOptions': 'a string1', 'AssemblyIdentity': 'a string1', 'ComponentFileName': 'a_file_name', 'EnableDPIAwareness': 'fal', 'GenerateCatalogFiles': 'truel', 'GenerateCategoryTags': 'true', 'InputResourceManifests': 'a string1', 'ManifestFromManagedAssembly': 'a_file_name', 'notgood3': 'bogus', 'OutputManifestFile': 'a_file_name', 'OutputResourceManifests': 'a string1', 'RegistrarScriptFile': 'a_file_name', 'ReplacementsFile': 'a_file_name', 'SuppressDependencyElement': 'true', 'SuppressStartupBanner': 'true', 'TrackerLogDirectory': 'a_folder', 'TypeLibraryFile': 'a_file_name', 'UpdateFileHashes': 'true', 'UpdateFileHashesSearchPath': 'a_file_name', 'VerboseOutput': 'true'}, 'ProjectReference': { 'LinkLibraryDependencies': 'true', 'UseLibraryDependencyInputs': 'true'}, 'ManifestResourceCompile': { 'ResourceOutputFileName': 'a_file_name'}, '': { 'EmbedManifest': 'true', 'GenerateManifest': 'true', 'IgnoreImportLibrary': 'true', 'LinkIncremental': 'false'}}, self.stderr) self._ExpectedWarnings([ 'Warning: unrecognized setting ClCompile/Enableprefast', 'Warning: unrecognized setting ClCompile/ZZXYZ', 'Warning: unrecognized setting Manifest/notgood3', 'Warning: for Manifest/GenerateCatalogFiles, ' "expected bool; got 'truel'", 'Warning: for Lib/TargetMachine, unrecognized enumerated value ' 'MachineX86i', "Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"]) def testConvertToMSBuildSettings_empty(self): """Tests an empty conversion.""" msvs_settings = {} expected_msbuild_settings = {} actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) self._ExpectedWarnings([]) def testConvertToMSBuildSettings_minimal(self): """Tests a minimal conversion.""" msvs_settings = { 'VCCLCompilerTool': { 'AdditionalIncludeDirectories': 'dir1', 'AdditionalOptions': '/foo', 'BasicRuntimeChecks': '0', }, 'VCLinkerTool': { 'LinkTimeCodeGeneration': '1', 'ErrorReporting': '1', 'DataExecutionPrevention': '2', }, } expected_msbuild_settings = { 'ClCompile': { 'AdditionalIncludeDirectories': 'dir1', 'AdditionalOptions': '/foo', 'BasicRuntimeChecks': 'Default', }, 'Link': { 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', 'LinkErrorReporting': 'PromptImmediately', 'DataExecutionPrevention': 'true', }, } actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) self._ExpectedWarnings([]) def testConvertToMSBuildSettings_warnings(self): """Tests conversion that generates warnings.""" msvs_settings = { 'VCCLCompilerTool': { 'AdditionalIncludeDirectories': '1', 'AdditionalOptions': '2', # These are incorrect values: 'BasicRuntimeChecks': '12', 'BrowseInformation': '21', 'UsePrecompiledHeader': '13', 'GeneratePreprocessedFile': '14'}, 'VCLinkerTool': { # These are incorrect values: 'Driver': '10', 'LinkTimeCodeGeneration': '31', 'ErrorReporting': '21', 'FixedBaseAddress': '6'}, 'VCResourceCompilerTool': { # Custom 'Culture': '1003'}} expected_msbuild_settings = { 'ClCompile': { 'AdditionalIncludeDirectories': '1', 'AdditionalOptions': '2'}, 'Link': {}, 'ResourceCompile': { # Custom 'Culture': '0x03eb'}} actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) self._ExpectedWarnings([ 'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to ' 'MSBuild, index value (12) not in expected range [0, 4)', 'Warning: while converting VCCLCompilerTool/BrowseInformation to ' 'MSBuild, index value (21) not in expected range [0, 3)', 'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to ' 'MSBuild, index value (13) not in expected range [0, 3)', 'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to ' 'MSBuild, value must be one of [0, 1, 2]; got 14', 'Warning: while converting VCLinkerTool/Driver to ' 'MSBuild, index value (10) not in expected range [0, 4)', 'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to ' 'MSBuild, index value (31) not in expected range [0, 5)', 'Warning: while converting VCLinkerTool/ErrorReporting to ' 'MSBuild, index value (21) not in expected range [0, 3)', 'Warning: while converting VCLinkerTool/FixedBaseAddress to ' 'MSBuild, index value (6) not in expected range [0, 3)', ]) def testConvertToMSBuildSettings_full_synthetic(self): """Tests conversion of all the MSBuild settings.""" msvs_settings = { 'VCCLCompilerTool': { 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'AdditionalUsingDirectories': 'folder1;folder2;folder3', 'AssemblerListingLocation': 'a_file_name', 'AssemblerOutput': '0', 'BasicRuntimeChecks': '1', 'BrowseInformation': '2', 'BrowseInformationFile': 'a_file_name', 'BufferSecurityCheck': 'true', 'CallingConvention': '0', 'CompileAs': '1', 'DebugInformationFormat': '4', 'DefaultCharIsUnsigned': 'true', 'Detect64BitPortabilityProblems': 'true', 'DisableLanguageExtensions': 'true', 'DisableSpecificWarnings': 'd1;d2;d3', 'EnableEnhancedInstructionSet': '0', 'EnableFiberSafeOptimizations': 'true', 'EnableFunctionLevelLinking': 'true', 'EnableIntrinsicFunctions': 'true', 'EnablePREfast': 'true', 'ErrorReporting': '1', 'ExceptionHandling': '2', 'ExpandAttributedSource': 'true', 'FavorSizeOrSpeed': '0', 'FloatingPointExceptions': 'true', 'FloatingPointModel': '1', 'ForceConformanceInForLoopScope': 'true', 'ForcedIncludeFiles': 'file1;file2;file3', 'ForcedUsingFiles': 'file1;file2;file3', 'GeneratePreprocessedFile': '1', 'GenerateXMLDocumentationFiles': 'true', 'IgnoreStandardIncludePath': 'true', 'InlineFunctionExpansion': '2', 'KeepComments': 'true', 'MinimalRebuild': 'true', 'ObjectFile': 'a_file_name', 'OmitDefaultLibName': 'true', 'OmitFramePointers': 'true', 'OpenMP': 'true', 'Optimization': '3', 'PrecompiledHeaderFile': 'a_file_name', 'PrecompiledHeaderThrough': 'a_file_name', 'PreprocessorDefinitions': 'd1;d2;d3', 'ProgramDataBaseFileName': 'a_file_name', 'RuntimeLibrary': '0', 'RuntimeTypeInfo': 'true', 'ShowIncludes': 'true', 'SmallerTypeCheck': 'true', 'StringPooling': 'true', 'StructMemberAlignment': '1', 'SuppressStartupBanner': 'true', 'TreatWChar_tAsBuiltInType': 'true', 'UndefineAllPreprocessorDefinitions': 'true', 'UndefinePreprocessorDefinitions': 'd1;d2;d3', 'UseFullPaths': 'true', 'UsePrecompiledHeader': '1', 'UseUnicodeResponseFiles': 'true', 'WarnAsError': 'true', 'WarningLevel': '2', 'WholeProgramOptimization': 'true', 'XMLDocumentationFileName': 'a_file_name'}, 'VCLinkerTool': { 'AdditionalDependencies': 'file1;file2;file3', 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3', 'AdditionalManifestDependencies': 'file1;file2;file3', 'AdditionalOptions': 'a_string', 'AddModuleNamesToAssembly': 'file1;file2;file3', 'AllowIsolation': 'true', 'AssemblyDebug': '0', 'AssemblyLinkResource': 'file1;file2;file3', 'BaseAddress': 'a_string', 'CLRImageType': '1', 'CLRThreadAttribute': '2', 'CLRUnmanagedCodeCheck': 'true', 'DataExecutionPrevention': '0', 'DelayLoadDLLs': 'file1;file2;file3', 'DelaySign': 'true', 'Driver': '1', 'EmbedManagedResourceFile': 'file1;file2;file3', 'EnableCOMDATFolding': '0', 'EnableUAC': 'true', 'EntryPointSymbol': 'a_string', 'ErrorReporting': '0', 'FixedBaseAddress': '1', 'ForceSymbolReferences': 'file1;file2;file3', 'FunctionOrder': 'a_file_name', 'GenerateDebugInformation': 'true', 'GenerateManifest': 'true', 'GenerateMapFile': 'true', 'HeapCommitSize': 'a_string', 'HeapReserveSize': 'a_string', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreDefaultLibraryNames': 'file1;file2;file3', 'IgnoreEmbeddedIDL': 'true', 'IgnoreImportLibrary': 'true', 'ImportLibrary': 'a_file_name', 'KeyContainer': 'a_file_name', 'KeyFile': 'a_file_name', 'LargeAddressAware': '2', 'LinkIncremental': '1', 'LinkLibraryDependencies': 'true', 'LinkTimeCodeGeneration': '2', 'ManifestFile': 'a_file_name', 'MapExports': 'true', 'MapFileName': 'a_file_name', 'MergedIDLBaseFileName': 'a_file_name', 'MergeSections': 'a_string', 'MidlCommandFile': 'a_file_name', 'ModuleDefinitionFile': 'a_file_name', 'OptimizeForWindows98': '1', 'OptimizeReferences': '0', 'OutputFile': 'a_file_name', 'PerUserRedirection': 'true', 'Profile': 'true', 'ProfileGuidedDatabase': 'a_file_name', 'ProgramDatabaseFile': 'a_file_name', 'RandomizedBaseAddress': '1', 'RegisterOutput': 'true', 'ResourceOnlyDLL': 'true', 'SetChecksum': 'true', 'ShowProgress': '0', 'StackCommitSize': 'a_string', 'StackReserveSize': 'a_string', 'StripPrivateSymbols': 'a_file_name', 'SubSystem': '2', 'SupportUnloadOfDelayLoadedDLL': 'true', 'SuppressStartupBanner': 'true', 'SwapRunFromCD': 'true', 'SwapRunFromNet': 'true', 'TargetMachine': '3', 'TerminalServerAware': '2', 'TurnOffAssemblyGeneration': 'true', 'TypeLibraryFile': 'a_file_name', 'TypeLibraryResourceID': '33', 'UACExecutionLevel': '1', 'UACUIAccess': 'true', 'UseLibraryDependencyInputs': 'false', 'UseUnicodeResponseFiles': 'true', 'Version': 'a_string'}, 'VCResourceCompilerTool': { 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'Culture': '1003', 'IgnoreStandardIncludePath': 'true', 'PreprocessorDefinitions': 'd1;d2;d3', 'ResourceOutputFileName': 'a_string', 'ShowProgress': 'true', 'SuppressStartupBanner': 'true', 'UndefinePreprocessorDefinitions': 'd1;d2;d3'}, 'VCMIDLTool': { 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'CPreprocessOptions': 'a_string', 'DefaultCharType': '0', 'DLLDataFileName': 'a_file_name', 'EnableErrorChecks': '2', 'ErrorCheckAllocations': 'true', 'ErrorCheckBounds': 'true', 'ErrorCheckEnumRange': 'true', 'ErrorCheckRefPointers': 'true', 'ErrorCheckStubData': 'true', 'GenerateStublessProxies': 'true', 'GenerateTypeLibrary': 'true', 'HeaderFileName': 'a_file_name', 'IgnoreStandardIncludePath': 'true', 'InterfaceIdentifierFileName': 'a_file_name', 'MkTypLibCompatible': 'true', 'OutputDirectory': 'a_string', 'PreprocessorDefinitions': 'd1;d2;d3', 'ProxyFileName': 'a_file_name', 'RedirectOutputAndErrors': 'a_file_name', 'StructMemberAlignment': '3', 'SuppressStartupBanner': 'true', 'TargetEnvironment': '1', 'TypeLibraryName': 'a_file_name', 'UndefinePreprocessorDefinitions': 'd1;d2;d3', 'ValidateParameters': 'true', 'WarnAsError': 'true', 'WarningLevel': '4'}, 'VCLibrarianTool': { 'AdditionalDependencies': 'file1;file2;file3', 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'ExportNamedFunctions': 'd1;d2;d3', 'ForceSymbolReferences': 'a_string', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3', 'LinkLibraryDependencies': 'true', 'ModuleDefinitionFile': 'a_file_name', 'OutputFile': 'a_file_name', 'SuppressStartupBanner': 'true', 'UseUnicodeResponseFiles': 'true'}, 'VCManifestTool': { 'AdditionalManifestFiles': 'file1;file2;file3', 'AdditionalOptions': 'a_string', 'AssemblyIdentity': 'a_string', 'ComponentFileName': 'a_file_name', 'DependencyInformationFile': 'a_file_name', 'EmbedManifest': 'true', 'GenerateCatalogFiles': 'true', 'InputResourceManifests': 'a_string', 'ManifestResourceFile': 'my_name', 'OutputManifestFile': 'a_file_name', 'RegistrarScriptFile': 'a_file_name', 'ReplacementsFile': 'a_file_name', 'SuppressStartupBanner': 'true', 'TypeLibraryFile': 'a_file_name', 'UpdateFileHashes': 'true', 'UpdateFileHashesSearchPath': 'a_file_name', 'UseFAT32Workaround': 'true', 'UseUnicodeResponseFiles': 'true', 'VerboseOutput': 'true'}} expected_msbuild_settings = { 'ClCompile': { 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string /J', 'AdditionalUsingDirectories': 'folder1;folder2;folder3', 'AssemblerListingLocation': 'a_file_name', 'AssemblerOutput': 'NoListing', 'BasicRuntimeChecks': 'StackFrameRuntimeCheck', 'BrowseInformation': 'true', 'BrowseInformationFile': 'a_file_name', 'BufferSecurityCheck': 'true', 'CallingConvention': 'Cdecl', 'CompileAs': 'CompileAsC', 'DebugInformationFormat': 'EditAndContinue', 'DisableLanguageExtensions': 'true', 'DisableSpecificWarnings': 'd1;d2;d3', 'EnableEnhancedInstructionSet': 'NotSet', 'EnableFiberSafeOptimizations': 'true', 'EnablePREfast': 'true', 'ErrorReporting': 'Prompt', 'ExceptionHandling': 'Async', 'ExpandAttributedSource': 'true', 'FavorSizeOrSpeed': 'Neither', 'FloatingPointExceptions': 'true', 'FloatingPointModel': 'Strict', 'ForceConformanceInForLoopScope': 'true', 'ForcedIncludeFiles': 'file1;file2;file3', 'ForcedUsingFiles': 'file1;file2;file3', 'FunctionLevelLinking': 'true', 'GenerateXMLDocumentationFiles': 'true', 'IgnoreStandardIncludePath': 'true', 'InlineFunctionExpansion': 'AnySuitable', 'IntrinsicFunctions': 'true', 'MinimalRebuild': 'true', 'ObjectFileName': 'a_file_name', 'OmitDefaultLibName': 'true', 'OmitFramePointers': 'true', 'OpenMPSupport': 'true', 'Optimization': 'Full', 'PrecompiledHeader': 'Create', 'PrecompiledHeaderFile': 'a_file_name', 'PrecompiledHeaderOutputFile': 'a_file_name', 'PreprocessKeepComments': 'true', 'PreprocessorDefinitions': 'd1;d2;d3', 'PreprocessSuppressLineNumbers': 'false', 'PreprocessToFile': 'true', 'ProgramDataBaseFileName': 'a_file_name', 'RuntimeLibrary': 'MultiThreaded', 'RuntimeTypeInfo': 'true', 'ShowIncludes': 'true', 'SmallerTypeCheck': 'true', 'StringPooling': 'true', 'StructMemberAlignment': '1Byte', 'SuppressStartupBanner': 'true', 'TreatWarningAsError': 'true', 'TreatWChar_tAsBuiltInType': 'true', 'UndefineAllPreprocessorDefinitions': 'true', 'UndefinePreprocessorDefinitions': 'd1;d2;d3', 'UseFullPaths': 'true', 'WarningLevel': 'Level2', 'WholeProgramOptimization': 'true', 'XMLDocumentationFileName': 'a_file_name'}, 'Link': { 'AdditionalDependencies': 'file1;file2;file3', 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', 'AdditionalManifestDependencies': 'file1;file2;file3', 'AdditionalOptions': 'a_string', 'AddModuleNamesToAssembly': 'file1;file2;file3', 'AllowIsolation': 'true', 'AssemblyDebug': '', 'AssemblyLinkResource': 'file1;file2;file3', 'BaseAddress': 'a_string', 'CLRImageType': 'ForceIJWImage', 'CLRThreadAttribute': 'STAThreadingAttribute', 'CLRUnmanagedCodeCheck': 'true', 'DataExecutionPrevention': '', 'DelayLoadDLLs': 'file1;file2;file3', 'DelaySign': 'true', 'Driver': 'Driver', 'EmbedManagedResourceFile': 'file1;file2;file3', 'EnableCOMDATFolding': '', 'EnableUAC': 'true', 'EntryPointSymbol': 'a_string', 'FixedBaseAddress': 'false', 'ForceSymbolReferences': 'file1;file2;file3', 'FunctionOrder': 'a_file_name', 'GenerateDebugInformation': 'true', 'GenerateMapFile': 'true', 'HeapCommitSize': 'a_string', 'HeapReserveSize': 'a_string', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreEmbeddedIDL': 'true', 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3', 'ImportLibrary': 'a_file_name', 'KeyContainer': 'a_file_name', 'KeyFile': 'a_file_name', 'LargeAddressAware': 'true', 'LinkErrorReporting': 'NoErrorReport', 'LinkTimeCodeGeneration': 'PGInstrument', 'ManifestFile': 'a_file_name', 'MapExports': 'true', 'MapFileName': 'a_file_name', 'MergedIDLBaseFileName': 'a_file_name', 'MergeSections': 'a_string', 'MidlCommandFile': 'a_file_name', 'ModuleDefinitionFile': 'a_file_name', 'NoEntryPoint': 'true', 'OptimizeReferences': '', 'OutputFile': 'a_file_name', 'PerUserRedirection': 'true', 'Profile': 'true', 'ProfileGuidedDatabase': 'a_file_name', 'ProgramDatabaseFile': 'a_file_name', 'RandomizedBaseAddress': 'false', 'RegisterOutput': 'true', 'SetChecksum': 'true', 'ShowProgress': 'NotSet', 'StackCommitSize': 'a_string', 'StackReserveSize': 'a_string', 'StripPrivateSymbols': 'a_file_name', 'SubSystem': 'Windows', 'SupportUnloadOfDelayLoadedDLL': 'true', 'SuppressStartupBanner': 'true', 'SwapRunFromCD': 'true', 'SwapRunFromNET': 'true', 'TargetMachine': 'MachineARM', 'TerminalServerAware': 'true', 'TurnOffAssemblyGeneration': 'true', 'TypeLibraryFile': 'a_file_name', 'TypeLibraryResourceID': '33', 'UACExecutionLevel': 'HighestAvailable', 'UACUIAccess': 'true', 'Version': 'a_string'}, 'ResourceCompile': { 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'Culture': '0x03eb', 'IgnoreStandardIncludePath': 'true', 'PreprocessorDefinitions': 'd1;d2;d3', 'ResourceOutputFileName': 'a_string', 'ShowProgress': 'true', 'SuppressStartupBanner': 'true', 'UndefinePreprocessorDefinitions': 'd1;d2;d3'}, 'Midl': { 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'CPreprocessOptions': 'a_string', 'DefaultCharType': 'Unsigned', 'DllDataFileName': 'a_file_name', 'EnableErrorChecks': 'All', 'ErrorCheckAllocations': 'true', 'ErrorCheckBounds': 'true', 'ErrorCheckEnumRange': 'true', 'ErrorCheckRefPointers': 'true', 'ErrorCheckStubData': 'true', 'GenerateStublessProxies': 'true', 'GenerateTypeLibrary': 'true', 'HeaderFileName': 'a_file_name', 'IgnoreStandardIncludePath': 'true', 'InterfaceIdentifierFileName': 'a_file_name', 'MkTypLibCompatible': 'true', 'OutputDirectory': 'a_string', 'PreprocessorDefinitions': 'd1;d2;d3', 'ProxyFileName': 'a_file_name', 'RedirectOutputAndErrors': 'a_file_name', 'StructMemberAlignment': '4', 'SuppressStartupBanner': 'true', 'TargetEnvironment': 'Win32', 'TypeLibraryName': 'a_file_name', 'UndefinePreprocessorDefinitions': 'd1;d2;d3', 'ValidateAllParameters': 'true', 'WarnAsError': 'true', 'WarningLevel': '4'}, 'Lib': { 'AdditionalDependencies': 'file1;file2;file3', 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'ExportNamedFunctions': 'd1;d2;d3', 'ForceSymbolReferences': 'a_string', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3', 'ModuleDefinitionFile': 'a_file_name', 'OutputFile': 'a_file_name', 'SuppressStartupBanner': 'true', 'UseUnicodeResponseFiles': 'true'}, 'Manifest': { 'AdditionalManifestFiles': 'file1;file2;file3', 'AdditionalOptions': 'a_string', 'AssemblyIdentity': 'a_string', 'ComponentFileName': 'a_file_name', 'GenerateCatalogFiles': 'true', 'InputResourceManifests': 'a_string', 'OutputManifestFile': 'a_file_name', 'RegistrarScriptFile': 'a_file_name', 'ReplacementsFile': 'a_file_name', 'SuppressStartupBanner': 'true', 'TypeLibraryFile': 'a_file_name', 'UpdateFileHashes': 'true', 'UpdateFileHashesSearchPath': 'a_file_name', 'VerboseOutput': 'true'}, 'ManifestResourceCompile': { 'ResourceOutputFileName': 'my_name'}, 'ProjectReference': { 'LinkLibraryDependencies': 'true', 'UseLibraryDependencyInputs': 'false'}, '': { 'EmbedManifest': 'true', 'GenerateManifest': 'true', 'IgnoreImportLibrary': 'true', 'LinkIncremental': 'false'}} actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) self._ExpectedWarnings([]) def testConvertToMSBuildSettings_actual(self): """Tests the conversion of an actual project. A VS2008 project with most of the options defined was created through the VS2008 IDE. It was then converted to VS2010. The tool settings found in the .vcproj and .vcxproj files were converted to the two dictionaries msvs_settings and expected_msbuild_settings. Note that for many settings, the VS2010 converter adds macros like %(AdditionalIncludeDirectories) to make sure than inherited values are included. Since the Gyp projects we generate do not use inheritance, we removed these macros. They were: ClCompile: AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)' AdditionalOptions: ' %(AdditionalOptions)' AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)' DisableSpecificWarnings: ';%(DisableSpecificWarnings)', ForcedIncludeFiles: ';%(ForcedIncludeFiles)', ForcedUsingFiles: ';%(ForcedUsingFiles)', PreprocessorDefinitions: ';%(PreprocessorDefinitions)', UndefinePreprocessorDefinitions: ';%(UndefinePreprocessorDefinitions)', Link: AdditionalDependencies: ';%(AdditionalDependencies)', AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)', AdditionalManifestDependencies: ';%(AdditionalManifestDependencies)', AdditionalOptions: ' %(AdditionalOptions)', AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)', AssemblyLinkResource: ';%(AssemblyLinkResource)', DelayLoadDLLs: ';%(DelayLoadDLLs)', EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)', ForceSymbolReferences: ';%(ForceSymbolReferences)', IgnoreSpecificDefaultLibraries: ';%(IgnoreSpecificDefaultLibraries)', ResourceCompile: AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)', AdditionalOptions: ' %(AdditionalOptions)', PreprocessorDefinitions: ';%(PreprocessorDefinitions)', Manifest: AdditionalManifestFiles: ';%(AdditionalManifestFiles)', AdditionalOptions: ' %(AdditionalOptions)', InputResourceManifests: ';%(InputResourceManifests)', """ msvs_settings = { 'VCCLCompilerTool': { 'AdditionalIncludeDirectories': 'dir1', 'AdditionalOptions': '/more', 'AdditionalUsingDirectories': 'test', 'AssemblerListingLocation': '$(IntDir)\\a', 'AssemblerOutput': '1', 'BasicRuntimeChecks': '3', 'BrowseInformation': '1', 'BrowseInformationFile': '$(IntDir)\\e', 'BufferSecurityCheck': 'false', 'CallingConvention': '1', 'CompileAs': '1', 'DebugInformationFormat': '4', 'DefaultCharIsUnsigned': 'true', 'Detect64BitPortabilityProblems': 'true', 'DisableLanguageExtensions': 'true', 'DisableSpecificWarnings': 'abc', 'EnableEnhancedInstructionSet': '1', 'EnableFiberSafeOptimizations': 'true', 'EnableFunctionLevelLinking': 'true', 'EnableIntrinsicFunctions': 'true', 'EnablePREfast': 'true', 'ErrorReporting': '2', 'ExceptionHandling': '2', 'ExpandAttributedSource': 'true', 'FavorSizeOrSpeed': '2', 'FloatingPointExceptions': 'true', 'FloatingPointModel': '1', 'ForceConformanceInForLoopScope': 'false', 'ForcedIncludeFiles': 'def', 'ForcedUsingFiles': 'ge', 'GeneratePreprocessedFile': '2', 'GenerateXMLDocumentationFiles': 'true', 'IgnoreStandardIncludePath': 'true', 'InlineFunctionExpansion': '1', 'KeepComments': 'true', 'MinimalRebuild': 'true', 'ObjectFile': '$(IntDir)\\b', 'OmitDefaultLibName': 'true', 'OmitFramePointers': 'true', 'OpenMP': 'true', 'Optimization': '3', 'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche', 'PrecompiledHeaderThrough': 'StdAfx.hd', 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE', 'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb', 'RuntimeLibrary': '3', 'RuntimeTypeInfo': 'false', 'ShowIncludes': 'true', 'SmallerTypeCheck': 'true', 'StringPooling': 'true', 'StructMemberAlignment': '3', 'SuppressStartupBanner': 'false', 'TreatWChar_tAsBuiltInType': 'false', 'UndefineAllPreprocessorDefinitions': 'true', 'UndefinePreprocessorDefinitions': 'wer', 'UseFullPaths': 'true', 'UsePrecompiledHeader': '0', 'UseUnicodeResponseFiles': 'false', 'WarnAsError': 'true', 'WarningLevel': '3', 'WholeProgramOptimization': 'true', 'XMLDocumentationFileName': '$(IntDir)\\c'}, 'VCLinkerTool': { 'AdditionalDependencies': 'zx', 'AdditionalLibraryDirectories': 'asd', 'AdditionalManifestDependencies': 's2', 'AdditionalOptions': '/mor2', 'AddModuleNamesToAssembly': 'd1', 'AllowIsolation': 'false', 'AssemblyDebug': '1', 'AssemblyLinkResource': 'd5', 'BaseAddress': '23423', 'CLRImageType': '3', 'CLRThreadAttribute': '1', 'CLRUnmanagedCodeCheck': 'true', 'DataExecutionPrevention': '0', 'DelayLoadDLLs': 'd4', 'DelaySign': 'true', 'Driver': '2', 'EmbedManagedResourceFile': 'd2', 'EnableCOMDATFolding': '1', 'EnableUAC': 'false', 'EntryPointSymbol': 'f5', 'ErrorReporting': '2', 'FixedBaseAddress': '1', 'ForceSymbolReferences': 'd3', 'FunctionOrder': 'fssdfsd', 'GenerateDebugInformation': 'true', 'GenerateManifest': 'false', 'GenerateMapFile': 'true', 'HeapCommitSize': '13', 'HeapReserveSize': '12', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreDefaultLibraryNames': 'flob;flok', 'IgnoreEmbeddedIDL': 'true', 'IgnoreImportLibrary': 'true', 'ImportLibrary': 'f4', 'KeyContainer': 'f7', 'KeyFile': 'f6', 'LargeAddressAware': '2', 'LinkIncremental': '0', 'LinkLibraryDependencies': 'false', 'LinkTimeCodeGeneration': '1', 'ManifestFile': '$(IntDir)\\$(TargetFileName).2intermediate.manifest', 'MapExports': 'true', 'MapFileName': 'd5', 'MergedIDLBaseFileName': 'f2', 'MergeSections': 'f5', 'MidlCommandFile': 'f1', 'ModuleDefinitionFile': 'sdsd', 'OptimizeForWindows98': '2', 'OptimizeReferences': '2', 'OutputFile': '$(OutDir)\\$(ProjectName)2.exe', 'PerUserRedirection': 'true', 'Profile': 'true', 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd', 'ProgramDatabaseFile': 'Flob.pdb', 'RandomizedBaseAddress': '1', 'RegisterOutput': 'true', 'ResourceOnlyDLL': 'true', 'SetChecksum': 'false', 'ShowProgress': '1', 'StackCommitSize': '15', 'StackReserveSize': '14', 'StripPrivateSymbols': 'd3', 'SubSystem': '1', 'SupportUnloadOfDelayLoadedDLL': 'true', 'SuppressStartupBanner': 'false', 'SwapRunFromCD': 'true', 'SwapRunFromNet': 'true', 'TargetMachine': '1', 'TerminalServerAware': '1', 'TurnOffAssemblyGeneration': 'true', 'TypeLibraryFile': 'f3', 'TypeLibraryResourceID': '12', 'UACExecutionLevel': '2', 'UACUIAccess': 'true', 'UseLibraryDependencyInputs': 'true', 'UseUnicodeResponseFiles': 'false', 'Version': '333'}, 'VCResourceCompilerTool': { 'AdditionalIncludeDirectories': 'f3', 'AdditionalOptions': '/more3', 'Culture': '3084', 'IgnoreStandardIncludePath': 'true', 'PreprocessorDefinitions': '_UNICODE;UNICODE2', 'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res', 'ShowProgress': 'true'}, 'VCManifestTool': { 'AdditionalManifestFiles': 'sfsdfsd', 'AdditionalOptions': 'afdsdafsd', 'AssemblyIdentity': 'sddfdsadfsa', 'ComponentFileName': 'fsdfds', 'DependencyInformationFile': '$(IntDir)\\mt.depdfd', 'EmbedManifest': 'false', 'GenerateCatalogFiles': 'true', 'InputResourceManifests': 'asfsfdafs', 'ManifestResourceFile': '$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf', 'OutputManifestFile': '$(TargetPath).manifestdfs', 'RegistrarScriptFile': 'sdfsfd', 'ReplacementsFile': 'sdffsd', 'SuppressStartupBanner': 'false', 'TypeLibraryFile': 'sfsd', 'UpdateFileHashes': 'true', 'UpdateFileHashesSearchPath': 'sfsd', 'UseFAT32Workaround': 'true', 'UseUnicodeResponseFiles': 'false', 'VerboseOutput': 'true'}} expected_msbuild_settings = { 'ClCompile': { 'AdditionalIncludeDirectories': 'dir1', 'AdditionalOptions': '/more /J', 'AdditionalUsingDirectories': 'test', 'AssemblerListingLocation': '$(IntDir)a', 'AssemblerOutput': 'AssemblyCode', 'BasicRuntimeChecks': 'EnableFastChecks', 'BrowseInformation': 'true', 'BrowseInformationFile': '$(IntDir)e', 'BufferSecurityCheck': 'false', 'CallingConvention': 'FastCall', 'CompileAs': 'CompileAsC', 'DebugInformationFormat': 'EditAndContinue', 'DisableLanguageExtensions': 'true', 'DisableSpecificWarnings': 'abc', 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions', 'EnableFiberSafeOptimizations': 'true', 'EnablePREfast': 'true', 'ErrorReporting': 'Queue', 'ExceptionHandling': 'Async', 'ExpandAttributedSource': 'true', 'FavorSizeOrSpeed': 'Size', 'FloatingPointExceptions': 'true', 'FloatingPointModel': 'Strict', 'ForceConformanceInForLoopScope': 'false', 'ForcedIncludeFiles': 'def', 'ForcedUsingFiles': 'ge', 'FunctionLevelLinking': 'true', 'GenerateXMLDocumentationFiles': 'true', 'IgnoreStandardIncludePath': 'true', 'InlineFunctionExpansion': 'OnlyExplicitInline', 'IntrinsicFunctions': 'true', 'MinimalRebuild': 'true', 'ObjectFileName': '$(IntDir)b', 'OmitDefaultLibName': 'true', 'OmitFramePointers': 'true', 'OpenMPSupport': 'true', 'Optimization': 'Full', 'PrecompiledHeader': 'NotUsing', # Actual conversion gives '' 'PrecompiledHeaderFile': 'StdAfx.hd', 'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche', 'PreprocessKeepComments': 'true', 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE', 'PreprocessSuppressLineNumbers': 'true', 'PreprocessToFile': 'true', 'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb', 'RuntimeLibrary': 'MultiThreadedDebugDLL', 'RuntimeTypeInfo': 'false', 'ShowIncludes': 'true', 'SmallerTypeCheck': 'true', 'StringPooling': 'true', 'StructMemberAlignment': '4Bytes', 'SuppressStartupBanner': 'false', 'TreatWarningAsError': 'true', 'TreatWChar_tAsBuiltInType': 'false', 'UndefineAllPreprocessorDefinitions': 'true', 'UndefinePreprocessorDefinitions': 'wer', 'UseFullPaths': 'true', 'WarningLevel': 'Level3', 'WholeProgramOptimization': 'true', 'XMLDocumentationFileName': '$(IntDir)c'}, 'Link': { 'AdditionalDependencies': 'zx', 'AdditionalLibraryDirectories': 'asd', 'AdditionalManifestDependencies': 's2', 'AdditionalOptions': '/mor2', 'AddModuleNamesToAssembly': 'd1', 'AllowIsolation': 'false', 'AssemblyDebug': 'true', 'AssemblyLinkResource': 'd5', 'BaseAddress': '23423', 'CLRImageType': 'ForceSafeILImage', 'CLRThreadAttribute': 'MTAThreadingAttribute', 'CLRUnmanagedCodeCheck': 'true', 'DataExecutionPrevention': '', 'DelayLoadDLLs': 'd4', 'DelaySign': 'true', 'Driver': 'UpOnly', 'EmbedManagedResourceFile': 'd2', 'EnableCOMDATFolding': 'false', 'EnableUAC': 'false', 'EntryPointSymbol': 'f5', 'FixedBaseAddress': 'false', 'ForceSymbolReferences': 'd3', 'FunctionOrder': 'fssdfsd', 'GenerateDebugInformation': 'true', 'GenerateMapFile': 'true', 'HeapCommitSize': '13', 'HeapReserveSize': '12', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreEmbeddedIDL': 'true', 'IgnoreSpecificDefaultLibraries': 'flob;flok', 'ImportLibrary': 'f4', 'KeyContainer': 'f7', 'KeyFile': 'f6', 'LargeAddressAware': 'true', 'LinkErrorReporting': 'QueueForNextLogin', 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', 'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest', 'MapExports': 'true', 'MapFileName': 'd5', 'MergedIDLBaseFileName': 'f2', 'MergeSections': 'f5', 'MidlCommandFile': 'f1', 'ModuleDefinitionFile': 'sdsd', 'NoEntryPoint': 'true', 'OptimizeReferences': 'true', 'OutputFile': '$(OutDir)$(ProjectName)2.exe', 'PerUserRedirection': 'true', 'Profile': 'true', 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd', 'ProgramDatabaseFile': 'Flob.pdb', 'RandomizedBaseAddress': 'false', 'RegisterOutput': 'true', 'SetChecksum': 'false', 'ShowProgress': 'LinkVerbose', 'StackCommitSize': '15', 'StackReserveSize': '14', 'StripPrivateSymbols': 'd3', 'SubSystem': 'Console', 'SupportUnloadOfDelayLoadedDLL': 'true', 'SuppressStartupBanner': 'false', 'SwapRunFromCD': 'true', 'SwapRunFromNET': 'true', 'TargetMachine': 'MachineX86', 'TerminalServerAware': 'false', 'TurnOffAssemblyGeneration': 'true', 'TypeLibraryFile': 'f3', 'TypeLibraryResourceID': '12', 'UACExecutionLevel': 'RequireAdministrator', 'UACUIAccess': 'true', 'Version': '333'}, 'ResourceCompile': { 'AdditionalIncludeDirectories': 'f3', 'AdditionalOptions': '/more3', 'Culture': '0x0c0c', 'IgnoreStandardIncludePath': 'true', 'PreprocessorDefinitions': '_UNICODE;UNICODE2', 'ResourceOutputFileName': '$(IntDir)%(Filename)3.res', 'ShowProgress': 'true'}, 'Manifest': { 'AdditionalManifestFiles': 'sfsdfsd', 'AdditionalOptions': 'afdsdafsd', 'AssemblyIdentity': 'sddfdsadfsa', 'ComponentFileName': 'fsdfds', 'GenerateCatalogFiles': 'true', 'InputResourceManifests': 'asfsfdafs', 'OutputManifestFile': '$(TargetPath).manifestdfs', 'RegistrarScriptFile': 'sdfsfd', 'ReplacementsFile': 'sdffsd', 'SuppressStartupBanner': 'false', 'TypeLibraryFile': 'sfsd', 'UpdateFileHashes': 'true', 'UpdateFileHashesSearchPath': 'sfsd', 'VerboseOutput': 'true'}, 'ProjectReference': { 'LinkLibraryDependencies': 'false', 'UseLibraryDependencyInputs': 'true'}, '': { 'EmbedManifest': 'false', 'GenerateManifest': 'false', 'IgnoreImportLibrary': 'true', 'LinkIncremental': '' }, 'ManifestResourceCompile': { 'ResourceOutputFileName': '$(IntDir)$(TargetFileName).embed.manifest.resfdsf'} } actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) self._ExpectedWarnings([]) if __name__ == '__main__': unittest.main()
mit
pepetreshere/odoo
addons/account/models/account_cash_rounding.py
4
2921
# -*- coding: utf-8 -*- from odoo import models, fields, api, _ from odoo.tools import float_round from odoo.exceptions import ValidationError class AccountCashRounding(models.Model): """ In some countries, we need to be able to make appear on an invoice a rounding line, appearing there only because the smallest coinage has been removed from the circulation. For example, in Switzerland invoices have to be rounded to 0.05 CHF because coins of 0.01 CHF and 0.02 CHF aren't used anymore. see https://en.wikipedia.org/wiki/Cash_rounding for more details. """ _name = 'account.cash.rounding' _description = 'Account Cash Rounding' name = fields.Char(string='Name', translate=True, required=True) rounding = fields.Float(string='Rounding Precision', required=True, default=0.01, help='Represent the non-zero value smallest coinage (for example, 0.05).') strategy = fields.Selection([('biggest_tax', 'Modify tax amount'), ('add_invoice_line', 'Add a rounding line')], string='Rounding Strategy', default='add_invoice_line', required=True, help='Specify which way will be used to round the invoice amount to the rounding precision') profit_account_id = fields.Many2one('account.account', string='Profit Account', company_dependent=True, domain="[('deprecated', '=', False), ('company_id', '=', current_company_id)]") loss_account_id = fields.Many2one('account.account', string='Loss Account', company_dependent=True, domain="[('deprecated', '=', False), ('company_id', '=', current_company_id)]") rounding_method = fields.Selection(string='Rounding Method', required=True, selection=[('UP', 'UP'), ('DOWN', 'DOWN'), ('HALF-UP', 'HALF-UP')], default='HALF-UP', help='The tie-breaking rule used for float rounding operations') company_id = fields.Many2one('res.company', related='profit_account_id.company_id') @api.constrains('rounding') def validate_rounding(self): for record in self: if record.rounding <= 0: raise ValidationError(_("Please set a strictly positive rounding value.")) def round(self, amount): """Compute the rounding on the amount passed as parameter. :param amount: the amount to round :return: the rounded amount depending the rounding value and the rounding method """ return float_round(amount, precision_rounding=self.rounding, rounding_method=self.rounding_method) def compute_difference(self, currency, amount): """Compute the difference between the base_amount and the amount after rounding. For example, base_amount=23.91, after rounding=24.00, the result will be 0.09. :param currency: The currency. :param amount: The amount :return: round(difference) """ difference = self.round(amount) - amount return currency.round(difference)
agpl-3.0
michaelBenin/geonode
geonode/upload/upload.py
1
21917
######################################################################### # # Copyright (C) 2012 OpenPlans # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ######################################################################### """ Provide views and business logic of doing an upload. The upload process may be multi step so views are all handled internally here by the view function. The pattern to support separation of view/logic is each step in the upload process is suffixed with "_step". The view for that step is suffixed with "_step_view". The goal of seperation of view/logic is to support various programatic uses of this API. The logic steps should not accept request objects or return response objects. State is stored in a UploaderSession object stored in the user's session. This needs to be made more stateful by adding a model. """ from geonode.geoserver.helpers import get_sld_for from geonode.layers.utils import get_valid_layer_name from geonode.layers.utils import layer_type from geonode.layers.metadata import set_metadata from geonode.layers.models import Layer from geonode.layers.utils import layer_set_permissions from geonode.people.models import Profile from geonode import GeoNodeException from geonode.people.utils import get_default_user from geonode.upload.models import Upload from geonode.upload import signals from geonode.upload.utils import create_geoserver_db_featurestore from geonode.upload.utils import find_file_re from geonode.upload.utils import gs_uploader import geoserver from geoserver.resource import Coverage from geoserver.resource import FeatureType from geonode.geoserver.uploader.uploader import BadRequest from django.conf import settings from django.contrib.auth.models import User from django.db.models import Max import shutil import os.path import logging import uuid logger = logging.getLogger(__name__) class UploadException(Exception): '''A handled exception meant to be presented to the user''' @staticmethod def from_exc(msg, ex): args = [msg] args.extend(ex.args) return UploadException(*args) class UploaderSession(object): """All objects held must be able to surive a good pickling""" # the uploader session object import_session = None # if provided, this file will be uploaded to geoserver and set as # the default import_sld_file = None # location of any temporary uploaded files tempdir = None #the main uploaded file, zip, shp, tif, etc. base_file = None #the name to try to give the layer name = None # blob of permissions JSON permissions = None form = None # @todo - needed? # defaults to REPLACE if not provided. Accepts APPEND, too update_mode = None # Import to GeoGit repository geogit = None # Configure Time for this Layer time = None # the title given to the layer layer_title = None # the abstract layer_abstract = None # computed target (dict since gsconfig objects do not pickle, but # attributes matching a datastore) of the import import_target = None # track the most recently completed upload step completed_step = None # the upload type - see the _pages dict in views upload_type = None def set_target(self, target): self.import_target = { 'name': target.name, 'workspace_name': target.workspace.name, 'resource_type': target.resource_type } def __init__(self, **kw): for k, v in kw.items(): if hasattr(self, k): setattr(self, k, v) else: raise Exception('not handled : %s' % k) def cleanup(self): """do what we should at the given state of the upload""" pass def upload(name, base_file, user=None, time_attribute=None, time_transform_type=None, end_time_attribute=None, end_time_transform_type=None, presentation_strategy=None, precision_value=None, precision_step=None, use_big_date=False, overwrite=False): if user is None: user = get_default_user() if isinstance(user, basestring): user = User.objects.get(username=user) import_session = save_step(user, name, base_file, overwrite) upload_session = UploaderSession( base_file=base_file, name=name, import_session=import_session, layer_abstract="", layer_title=name, permissions=None ) time_step(upload_session, time_attribute, time_transform_type, presentation_strategy, precision_value, precision_step, end_time_attribute=end_time_attribute, end_time_transform_type=end_time_transform_type, time_format=None, srs=None, use_big_date=use_big_date) target = run_import(upload_session, async=False) upload_session.set_target(target) final_step(upload_session, user) def _log(msg, *args): logger.info(msg, *args) def save_step(user, layer, base_file, overwrite=True): _log('Uploading layer: [%s], base file [%s]', layer, base_file) # TODO Add better error handling assert os.path.exists(base_file), 'invalid base_file - does not exist' name = get_valid_layer_name(layer, overwrite) _log('Name for layer: [%s]', name) # Step 2. Check that it is uploading to the same resource type as # the existing resource the_layer_type = layer_type(base_file) # Check if the store exists in geoserver try: store = Layer.objects.gs_catalog.get_store(name) except geoserver.catalog.FailedRequestError, e: # There is no store, ergo the road is clear pass else: # If we get a store, we do the following: resources = store.get_resources() # Is it empty? if len(resources) == 0: # What should we do about that empty store? if overwrite: # We can just delete it and recreate it later. store.delete() else: msg = ('The layer exists and the overwrite parameter is %s' % overwrite) raise GeoNodeException(msg) else: # If our resource is already configured in the store it # needs to have the right resource type for resource in resources: if resource.name == name: assert overwrite, "Name already in use and overwrite is False" existing_type = resource.resource_type if existing_type != the_layer_type: msg = ('Type of uploaded file %s (%s) does not match type ' 'of existing resource type %s' % (name, the_layer_type, existing_type)) _log(msg) raise GeoNodeException(msg) if the_layer_type not in (FeatureType.resource_type, Coverage.resource_type): raise Exception('Expected the layer type to be a FeatureType or Coverage, not %s' % the_layer_type) _log('Uploading %s', the_layer_type) error_msg = None try: # importer tracks ids by autoincrement but is prone to corruption # which potentially may reset the id - hopefully prevent this... next_id = Upload.objects.all().aggregate(Max('import_id')).values()[0] next_id = next_id + 1 if next_id else 1 # @todo settings for use_url or auto detection if geoserver is # on same host import_session = gs_uploader().upload( base_file, use_url=False, import_id=next_id) # save record of this whether valid or not - will help w/ debugging upload = Upload.objects.create_from_session(user, import_session) if not import_session.tasks: error_msg = 'No upload tasks were created' elif not import_session.tasks[0].items: error_msg = 'No upload items found for task' if error_msg: upload.state = upload.STATE_INVALID upload.save() # @todo once the random tmp9723481758915 type of name is not # around, need to track the name computed above, for now, the # target store name can be used except Exception, e: logger.exception('Error creating import session') error_msg = str(e) if error_msg: raise Exception('Could not save the layer %s, there was an upload error: %s' % (name, error_msg)) else: _log("Finished upload of [%s] to GeoServer without errors.", name) return import_session def run_import(upload_session, async): """Run the import, possibly asynchronously. Returns the target datastore. """ import_session = upload_session.import_session import_session = gs_uploader().get_session(import_session.id) if import_session.state == 'INCOMPLETE': item = upload_session.import_session.tasks[0].items[0] if item.state == 'NO_CRS': err = 'No projection found' else: err = item.state or 'Session not ready for import.' if err: raise Exception(err) # if a target datastore is configured, ensure the datastore exists # in geoserver and set the uploader target appropriately if (hasattr(settings, 'GEOGIT_DATASTORE') and settings.GEOGIT_DATASTORE and upload_session.geogit == True and import_session.tasks[0].items[0].layer.layer_type != 'RASTER'): target = create_geoserver_db_featurestore(store_type='geogit') _log('setting target datastore %s %s', target.name, target.workspace.name ) import_session.tasks[0].set_target( target.name, target.workspace.name) elif (settings.DB_DATASTORE and import_session.tasks[0].items[0].layer.layer_type != 'RASTER'): target = create_geoserver_db_featurestore(store_type='postgis') _log('setting target datastore %s %s', target.name, target.workspace.name ) import_session.tasks[0].set_target( target.name, target.workspace.name) else: target = import_session.tasks[0].target if upload_session.update_mode: _log('setting updateMode to %s', upload_session.update_mode) import_session.tasks[0].set_update_mode(upload_session.update_mode) _log('running import session') # run async if using a database import_session.commit(async) # @todo check status of import session - it may fail, but due to protocol, # this will not be reported during the commit return target def time_step(upload_session, time_attribute, time_transform_type, presentation_strategy, precision_value, precision_step, end_time_attribute=None, end_time_transform_type=None, end_time_format=None, time_format=None, use_big_date=None): ''' time_attribute - name of attribute to use as time time_transform_type - name of transform. either DateFormatTransform or IntegerFieldToDateTransform time_format - optional string format end_time_attribute - optional name of attribute to use as end time end_time_transform_type - optional name of transform. either DateFormatTransform or IntegerFieldToDateTransform end_time_format - optional string format presentation_strategy - LIST, DISCRETE_INTERVAL, CONTINUOUS_INTERVAL precision_value - number precision_step - year, month, day, week, etc. ''' resource = upload_session.import_session.tasks[0].items[0].resource transforms = [] def build_time_transform(att, type, format): trans = {'type': type, 'field': att} if format: trans['format'] = format return trans def build_att_remap_transform(att): # @todo the target is so ugly it should be obvious return {'type': 'AttributeRemapTransform', 'field': att, 'target': 'org.geotools.data.postgis.PostGISDialect$XDate'} if use_big_date is None: try: use_big_date = settings.USE_BIG_DATE except: use_big_date = False if time_attribute: if time_transform_type: transforms.append( build_time_transform( time_attribute, time_transform_type, time_format ) ) if end_time_attribute and end_time_transform_type: transforms.append( build_time_transform( end_time_attribute, end_time_transform_type, end_time_format ) ) # this must go after the remapping transform to ensure the # type change is applied if use_big_date: transforms.append(build_att_remap_transform(time_attribute)) if end_time_attribute: transforms.append( build_att_remap_transform(end_time_attribute) ) transforms.append({ 'type': 'CreateIndexTransform', 'field': time_attribute }) resource.add_time_dimension_info( time_attribute, end_time_attribute, presentation_strategy, precision_value, precision_step, ) logger.info('Setting time dimension info') resource.save() if transforms: logger.info('Setting transforms %s' % transforms) upload_session.import_session.tasks[0].items[0].add_transforms(transforms) try: upload_session.import_session.tasks[0].items[0].save() except BadRequest, br: raise UploadException.from_exc('Error configuring time:',br) def csv_step(upload_session, lat_field, lng_field): import_session = upload_session.import_session item = import_session.tasks[0].items[0] feature_type = item.resource transform = {'type': 'AttributesToPointGeometryTransform', 'latField': lat_field, 'lngField': lng_field, } feature_type.set_srs('EPSG:4326') item.add_transforms([transform]) item.save() def srs_step(upload_session, srs): resource = upload_session.import_session.tasks[0].items[0].resource srs = srs.strip().upper() if not srs.startswith("EPSG:"): srs = "EPSG:%s" % srs logger.info('Setting SRS to %s', srs) resource.set_srs(srs) def final_step(upload_session, user): import_session = upload_session.import_session _log('Reloading session %s to check validity', import_session.id) import_session = import_session.reload() upload_session.import_session = import_session # the importer chooses an available featuretype name late in the game need # to verify the resource.name otherwise things will fail. This happens # when the same data is uploaded a second time and the default name is # chosen cat = Layer.objects.gs_catalog cat._cache.clear() # Create the style and assign it to the created resource # FIXME: Put this in gsconfig.py # @todo see above in save_step, regarding computed unique name name = import_session.tasks[0].items[0].layer.name # keep checking to see if geoserver has the layer. # it can take significantly longer than a few of seconds # to upload data to geoserver in some cases: slower/busy server, larger # data, etc import time wait_counter = 0 while wait_counter < 30: wait_counter += 1 time.sleep(2) publishing = cat.get_layer(name) if publishing is not None: break _log('Creating style for [%s]', name) publishing = cat.get_layer(name) if publishing is None: raise Exception("Expected to find layer named '%s' in geoserver", name) # get_files will not find the sld if it doesn't match the base name # so we've worked around that in the view - if provided, it will be here if upload_session.import_sld_file: _log('using provided sld file') base_file = upload_session.base_file sld_file = os.path.join( os.path.dirname(base_file), upload_session.import_sld_file ) f = open(sld_file, 'r') sld = f.read() f.close() else: sld = get_sld_for(publishing) if sld is not None: try: cat.create_style(name, sld) except geoserver.catalog.ConflictingDataError, e: msg = 'There was already a style named %s in GeoServer, cannot overwrite: "%s"' % (name, str(e)) # what are we doing with this var? # style = cat.get_style(name) logger.warn(msg) e.args = (msg,) #FIXME: Should we use the fully qualified typename? publishing.default_style = cat.get_style(name) _log('default style set to %s', name) cat.save(publishing) _log('Creating Django record for [%s]', name) resource = import_session.tasks[0].items[0].resource target = import_session.tasks[0].target upload_session.set_target(target) typename = "%s:%s" % (target.workspace.name, resource.name) layer_uuid = str(uuid.uuid1()) title = upload_session.layer_title abstract = upload_session.layer_abstract # @todo hacking - any cached layers might cause problems (maybe # delete hook on layer should fix this?) cat._cache.clear() storeType = 'dataStore' if target.resource_type == 'featureType' else 'coverageStore' saved_layer, created = Layer.objects.get_or_create( name=resource.name, defaults=dict( store=target.name, storeType=storeType, typename=typename, workspace=target.workspace.name, title=title or resource.title, uuid=layer_uuid, abstract=abstract or '', owner=user, ) ) # Should we throw a clearer error here? assert saved_layer is not None # @todo if layer was not created, need to ensure upload target is # same as existing target _log('layer was created : %s', created) if created: saved_layer.set_default_permissions() # Create the points of contact records for the layer # A user without a profile might be uploading this _log('Creating points of contact records for [%s]', name) poc_contact, __ = Profile.objects.get_or_create(user=user, defaults={"name": user.username }) author_contact, __ = Profile.objects.get_or_create(user=user, defaults={"name": user.username }) saved_layer.poc = poc_contact saved_layer.metadata_author = author_contact # look for xml xml_file = find_file_re(upload_session.base_file, '.*\.xml') if xml_file: saved_layer.metadata_uploaded = True # get model properties from XML vals, keywords = set_metadata(open(xml_file[0]).read()) # set taggit keywords saved_layer.keywords.add(*keywords) # set model properties for (key, value) in vals.items(): setattr(saved_layer, key, value) saved_layer.save() # Set default permissions on the newly created layer # FIXME: Do this as part of the post_save hook permissions = upload_session.permissions _log('Setting default permissions for [%s]', name) if permissions is not None: layer_set_permissions(saved_layer, permissions) _log('Verifying the layer [%s] was created correctly' % name) # Verify the object was saved to the Django database # @revisit - this should always work since we just created it above and the # message is confusing try: Layer.objects.get(name=name) except Layer.DoesNotExist, e: msg = ('There was a problem saving the layer %s to Django. Error is: %s' % (name, str(e))) logger.exception(msg) logger.debug('Attempting to clean up after failed save for layer [%s]', name) # Since the layer creation was not successful, we need to clean up # @todo implement/test cleanup # cleanup(name, layer_uuid) raise GeoNodeException(msg) # Verify it is correctly linked to GeoServer _log('Verifying the layer [%s] was created in GeoServer correctly' % name) try: saved_layer.verify() except GeoNodeException, e: msg = ('The layer [%s] was not correctly saved to GeoServer. Error is: %s' % (name, str(e))) logger.exception(msg) e.args = (msg,) # Deleting the layer saved_layer.delete() raise if upload_session.tempdir and os.path.exists(upload_session.tempdir): shutil.rmtree(upload_session.tempdir) signals.upload_complete.send(sender=final_step, layer=saved_layer) upload = Upload.objects.get(import_id = upload_session.import_session.id) _log('Reloading Import session for [%s] one last time' % name) import_session = import_session.reload() upload.import_session = import_session upload.layer = saved_layer upload.save() return saved_layer
gpl-3.0
robinro/ansible
lib/ansible/modules/system/java_cert.py
55
10054
#!/usr/bin/python # # (c) 2013, RSD Services S.A # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: java_cert version_added: '2.3' short_description: Uses keytool to import/remove key from java keystore(cacerts) description: - This is a wrapper module around keytool. Which can be used to import/remove certificates from a given java keystore. options: cert_url: description: - Basic URL to fetch SSL certificate from. One of cert_url or cert_path is required to load certificate. cert_port: description: - Port to connect to URL. This will be used to create server URL:PORT default: 443 cert_path: description: - Local path to load certificate from. One of cert_url or cert_path is required to load certificate. cert_alias: description: - Imported certificate alias. keystore_path: description: - Path to keystore. keystore_pass: description: - Keystore password. required: true keystore_create: description: - Create keystore if it doesn't exist executable: description: - Path to keytool binary if not used we search in PATH for it. default: keytool state: description: - Defines action which can be either certificate import or removal. choices: [ 'present', 'absent' ] default: present author: Adam Hamsik @haad ''' EXAMPLES = ''' # Import SSL certificate from google.com to a given cacerts keystore java_cert: cert_url: google.com cert_port: 443 keystore_path: /usr/lib/jvm/jre7/lib/security/cacerts keystore_pass: changeit state: present # Remove certificate with given alias from a keystore java_cert: cert_url: google.com keystore_path: /usr/lib/jvm/jre7/lib/security/cacerts keystore_pass: changeit executable: /usr/lib/jvm/jre7/bin/keytool state: absent # Import SSL certificate from google.com to a keystore, # create it if it doesn't exist java_cert: cert_url: google.com keystore_path: /tmp/cacerts keystore_pass: changeit keystore_create: yes state: present ''' RETURN = ''' msg: description: Output from stdout of keytool command after execution of given command. returned: success type: string sample: "Module require existing keystore at keystore_path '/tmp/test/cacerts'" rc: description: Keytool command execution return value returned: success type: int sample: "0" cmd: description: Executed command to get action done returned: success type: string sample: "keytool -importcert -noprompt -keystore" ''' import os # import module snippets from ansible.module_utils.basic import AnsibleModule def check_cert_present(module, executable, keystore_path, keystore_pass, alias): ''' Check if certificate with alias is present in keystore located at keystore_path ''' test_cmd = ("%s -noprompt -list -keystore '%s' -storepass '%s' " "-alias '%s'")%(executable, keystore_path, keystore_pass, alias) (check_rc, _, _) = module.run_command(test_cmd) if check_rc == 0: return True return False def import_cert_url(module, executable, url, port, keystore_path, keystore_pass, alias): ''' Import certificate from URL into keystore located at keystore_path ''' fetch_cmd = ("%s -printcert -rfc -sslserver %s:%d")%(executable, url, port) import_cmd = ("%s -importcert -noprompt -keystore '%s' " "-storepass '%s' -alias '%s'")%(executable, keystore_path, keystore_pass, alias) if module.check_mode: module.exit_json(changed=True) # Fetch SSL certificate from remote host. (_, fetch_out, _) = module.run_command(fetch_cmd, check_rc=True) # Use remote certificate from remote host and import it to a java keystore (import_rc, import_out, import_err) = module.run_command(import_cmd, data=fetch_out, check_rc=False) diff = {'before': '\n', 'after': '%s\n'%alias} if import_rc == 0: return module.exit_json(changed=True, msg=import_out, rc=import_rc, cmd=import_cmd, stdout=import_out, diff=diff) else: return module.fail_json(msg=import_out, rc=import_rc, cmd=import_cmd, error=import_err) def import_cert_path(module, executable, path, keystore_path, keystore_pass, alias): ''' Import certificate from path into keystore located on keystore_path as alias ''' import_cmd = ("%s -importcert -noprompt -keystore '%s' " "-storepass '%s' -file '%s' -alias '%s'")%(executable, keystore_path, keystore_pass, path, alias) if module.check_mode: module.exit_json(changed=True) # Use local certificate from local path and import it to a java keystore (import_rc, import_out, import_err) = module.run_command(import_cmd, check_rc=False) diff = {'before': '\n', 'after': '%s\n'%alias} if import_rc == 0: return module.exit_json(changed=True, msg=import_out, rc=import_rc, cmd=import_cmd, stdout=import_out, error=import_err, diff=diff) else: return module.fail_json(msg=import_out, rc=import_rc, cmd=import_cmd) def delete_cert(module, executable, keystore_path, keystore_pass, alias): ''' Delete cerificate identified with alias from keystore on keystore_path ''' del_cmd = ("%s -delete -keystore '%s' -storepass '%s' " "-alias '%s'")%(executable, keystore_path, keystore_pass, alias) if module.check_mode: module.exit_json(changed=True) # Delete SSL certificate from keystore (del_rc, del_out, del_err) = module.run_command(del_cmd, check_rc=True) diff = {'before': '%s\n'%alias, 'after': None} return module.exit_json(changed=True, msg=del_out, rc=del_rc, cmd=del_cmd, stdout=del_out, error=del_err, diff=diff) def test_keytool(module, executable): ''' Test if keytool is actuall executable or not ''' test_cmd = "%s"%(executable) module.run_command(test_cmd, check_rc=True) def test_keystore(module, keystore_path): ''' Check if we can access keystore as file or not ''' if keystore_path is None: keystore_path = '' if not os.path.exists(keystore_path) and not os.path.isfile(keystore_path): ## Keystore doesn't exist we want to create it return module.fail_json(changed=False, msg="Module require existing keystore at keystore_path '%s'" %(keystore_path)) def main(): argument_spec = dict( cert_url=dict(type='str'), cert_path=dict(type='str'), cert_alias=dict(type='str'), cert_port=dict(default='443', type='int'), keystore_path=dict(type='str'), keystore_pass=dict(required=True, type='str', no_log=True), keystore_create=dict(default=False, type='bool'), executable=dict(default='keytool', type='str'), state=dict(default='present', choices=['present', 'absent']) ) module = AnsibleModule( argument_spec=argument_spec, required_one_of=[['cert_path', 'cert_url']], required_together=[['keystore_path', 'keystore_pass']], mutually_exclusive=[ ['cert_url', 'cert_path'] ], supports_check_mode=True, ) url = module.params.get('cert_url') path = module.params.get('cert_path') port = module.params.get('cert_port') cert_alias = module.params.get('cert_alias') or url keystore_path = module.params.get('keystore_path') keystore_pass = module.params.get('keystore_pass') keystore_create = module.params.get('keystore_create') executable = module.params.get('executable') state = module.params.get('state') if path and not cert_alias: module.fail_json(changed=False, msg="Using local path import from %s requires alias argument." %(keystore_path)) test_keytool(module, executable) if not keystore_create: test_keystore(module, keystore_path) cert_present = check_cert_present(module, executable, keystore_path, keystore_pass, cert_alias) if state == 'absent': if cert_present: delete_cert(module, executable, keystore_path, keystore_pass, cert_alias) elif state == 'present': if not cert_present: if path: import_cert_path(module, executable, path, keystore_path, keystore_pass, cert_alias) if url: import_cert_url(module, executable, url, port, keystore_path, keystore_pass, cert_alias) module.exit_json(changed=False) if __name__ == "__main__": main()
gpl-3.0
ojengwa/oh-mainline
vendor/packages/gdata/samples/oauth/2_legged_oauth.py
128
2463
#!/usr/bin/python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'e.bidelman (Eric Bidelman)' import gdata.contacts import gdata.contacts.service import gdata.docs import gdata.docs.service CONSUMER_KEY = 'yourdomain.com' CONSUMER_SECRET = 'YOUR_CONSUMER_KEY' SIG_METHOD = gdata.auth.OAuthSignatureMethod.HMAC_SHA1 requestor_id = 'any.user@yourdomain.com' # Contacts Data API ============================================================ contacts = gdata.contacts.service.ContactsService() contacts.SetOAuthInputParameters( SIG_METHOD, CONSUMER_KEY, consumer_secret=CONSUMER_SECRET, two_legged_oauth=True, requestor_id=requestor_id) # GET - fetch user's contact list print "\nList of contacts for %s:" % (requestor_id,) feed = contacts.GetContactsFeed() for entry in feed.entry: print entry.title.text # GET - fetch another user's contact list requestor_id = 'another_user@yourdomain.com' print "\nList of contacts for %s:" % (requestor_id,) contacts.GetOAuthInputParameters().requestor_id = requestor_id feed = contacts.GetContactsFeed() for entry in feed.entry: print entry.title.text # Google Documents List Data API =============================================== docs = gdata.docs.service.DocsService() docs.SetOAuthInputParameters( SIG_METHOD, CONSUMER_KEY, consumer_secret=CONSUMER_SECRET, two_legged_oauth=True, requestor_id=requestor_id) # POST - upload a document print "\nUploading document to %s's Google Documents account:" % (requestor_id,) ms = gdata.MediaSource( file_path='/path/to/test.txt', content_type=gdata.docs.service.SUPPORTED_FILETYPES['TXT']) # GET - fetch user's document list entry = docs.UploadDocument(ms, 'Company Perks') print 'Document now accessible online at:', entry.GetAlternateLink().href print "\nList of Google Documents for %s" % (requestor_id,) feed = docs.GetDocumentListFeed() for entry in feed.entry: print entry.title.text
agpl-3.0
garrettcap/Bulletproof-Backup
wx/lib/agw/foldpanelbar.py
3
79179
# --------------------------------------------------------------------------- # # FOLDPANELBAR wxPython IMPLEMENTATION # Ported From Jorgen Bodde & Julian Smart (Extended Demo) C++ Code By: # # Andrea Gavana, @ 23 Mar 2005 # Latest Revision: 14 Mar 2012, 21.00 GMT # # # TODO List # # All The C++ TODOs Are Still Alive. I Am Not Able to Read Jorges's Mind # So I Don't Really Know What Will Be The New Features/Additions He Will # Make On His Code. At The Moment They Are: # # 1. OnPaint Function In CaptionBar Class: # TODO: Maybe First A Memory Dc Should Draw All, And Then Paint It On The # Caption. This Way A Flickering Arrow During Resize Is Not Visible. # # 2. OnChar Function In CaptionBar Class: # TODO: This Is Easy To Do But I Don't Have Any Useful Idea On Which Kind # Of Features To Add. Does Anyone Have An Intelligent Idea? # # 3. AddFoldPanelWindow Function In FoldPanelBar Class: # TODO: Take Old And New Heights, And If Difference, Reposition All The # Lower Panels. This Is Because The User Can Add New wxWindow Controls # Somewhere In Between When Other Panels Are Already Present. # Don't Know What It Means. Probably Is My Poor English... # # 4. OnSizePanel Function In FoldPanelBar Class: # TODO: A Smart Way To Check Wether The Old - New Width Of The # Panel Changed, If So No Need To Resize The Fold Panel Items # # # DONE List: # # 1. Implemented Styles Like FPB_SINGLE_FOLD and FPB_EXCLUSIVE_FOLD # Thanks To E. A. Tacao For His Nice Suggestions. # # 2. Added Some Maquillage To FoldPanelBar: When The Mouse Enters The Icon # Region, It Is Changed To wx.CURSOR_HAND. # # # For The Original TODO List From Jorgen, Please Refer To: # http://www.solidsteel.nl/jorg/components/foldpanel/wxFoldPanelBar.php#todo_list # # # # For All Kind Of Problems, Requests Of Enhancements And Bug Reports, Please # Write To Me At: # # andrea.gavana@gmail.com # andrea.gavana@maerskoil.com # # Or, Obviously, To The wxPython Mailing List!!! # # # End Of Comments # --------------------------------------------------------------------------- # """ :class:`FoldPanelBar` is a control that contains multiple panels, which can be expanded or collapsed. Description =========== The :class:`FoldPanelBar` is a control that contains multiple panels (of type :class:`FoldPanelItem`) that can be expanded or collapsed. The captionbar of the :class:`FoldPanelBar` can be customized by setting it to a horizontal gradient style, vertical gradient style, a single colour, a rectangle or filled rectangle. The `FoldPanel` items can be collapsed in place or to the bottom of the control. :class:`Window` derived controls can be added dynamically, and separated by separator lines. How does it work ---------------- The internals of the :class:`FoldPanelBar` is a list of :class:`FoldPanelItem` objects. Through the reference of `FoldPanel` these panels can be controlled by adding new controls to a FoldPanel or adding new FoldPanels to the :class:`FoldPanelBar`. The :class:`CaptionBar` fires events to the parent (container of all panel items) when a sub-panel needs resizing (either folding or expanding). The fold or expand process is simply a resize of the panel so it looks like all controls on it are gone. All controls are still child of the `FoldPanel` they are located on. If they don't handle the event (and they won't) then the owner of the :class:`FoldPanelBar` gets the events. This is what you need to handle the controls. There isn't much to it just a lot of calculations to see what panel belongs where. There are no sizers involved in the panels, everything is purely x-y positioning. What can it do and what not? ---------------------------- a) What it can do: * Run-time addition of panels (no deletion just yet); * Run time addition of controls to the panel (it will be resized accordingly); * Creating panels in collapsed mode or expanded mode; * Various modes of caption behaviour and filling to make it more appealing; * Panels can be folded and collapsed (or all of them) to allow more space. b) What it cannot do: * Selection of a panel like in a listctrl; * Dragging and dropping the panels; * Re-ordering the panels (not yet). Usage ===== Usage example:: import wx import wx.lib.agw.foldpanelbar as fpb class MyFrame(wx.Frame): def __init__(self, parent): wx.Frame.__init__(self, parent, -1, "FoldPanelBar Demo") text_ctrl = wx.TextCtrl(self, -1, size=(400, 100), style=wx.TE_MULTILINE) panel_bar = fpb.FoldPanelBar(self, -1, agwStyle=fpb.FPB_HORIZONTAL|fpb.FPB_DEFAULT_STYLE) fold_panel = panel_bar.AddFoldPanel("Thing") thing = wx.TextCtrl(fold_panel, -1, size=(400, -1), style=wx.TE_MULTILINE) panel_bar.AddFoldPanelWindow(fold_panel, thing) main_sizer = wx.BoxSizer(wx.HORIZONTAL) main_sizer.Add(text_ctrl, 1, wx.EXPAND) main_sizer.Add(panel_bar, 0, wx.EXPAND) self.SetSizer(main_sizer) # our normal wxApp-derived class, as usual app = wx.App(0) frame = MyFrame(None) app.SetTopWindow(frame) frame.Show() app.MainLoop() Supported Platforms =================== :class:`FoldPanelBar` is supported on the following platforms: * Windows (Verified on Windows XP, 2000) * Linux/Unix (GTK2) (Thanks to Toni Brkic and Robin Dunn) * Mac OSX (Thanks to Robin Dunn for the :class:`CaptionBar` size patch) Window Styles ============= This class supports the following window styles: ========================== =========== ================================================== Window Styles Hex Value Description ========================== =========== ================================================== ``FPB_SINGLE_FOLD`` 0x1 Single fold forces other panels to close when they are open, and only opens the current panel. This will allow the open panel to gain the full size left in the client area. ``FPB_COLLAPSE_TO_BOTTOM`` 0x2 All panels are stacked to the bottom. When they are expanded again they show up at the top. ``FPB_EXCLUSIVE_FOLD`` 0x4 ``FPB_SINGLE_FOLD`` style plus the panels will be stacked at the bottom. ``FPB_HORIZONTAL`` 0x8 :class:`FoldPanelBar` will be horizontal. ``FPB_VERTICAL`` 0x10 :class:`FoldPanelBar` will be vertical. ========================== =========== ================================================== Events Processing ================= This class processes the following events: ================== ================================================== Event Name Description ================== ================================================== ``EVT_CAPTIONBAR`` The user has pressed the caption bar: :class:`FoldPanelBar` will either expand or collapse the underlying panel. ================== ================================================== License And Version =================== :class:`FoldPanelBar` is distributed under the wxPython license. Latest Revision: Andrea Gavana @ 14 Mar 2012, 21.00 GMT Version 0.5 """ import wx #---------------------------------------------------------------------- # Collapsed And Expanded Bitmap Images # Created With img2py.py #---------------------------------------------------------------------- from wx.lib.embeddedimage import PyEmbeddedImage CollapsedIcon = PyEmbeddedImage( "iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABHNCSVQICAgIfAhkiAAAADdJ" "REFUOI1jZGRiZqAEMFGke/Ab8P/f3/8D5wKY7YRcQRsXoNuKzxXUdwEu23CJU+wCxtG8wAAA" "mvUb+vltJD8AAAAASUVORK5CYII=") ExpandedIcon = PyEmbeddedImage( "iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABHNCSVQICAgIfAhkiAAAAEJJ" "REFUOI1jZGRiZqAEMFGke1AYwIIu8P/f3/+4FDMyMTNS3QUYBmCzBZ84bQIR3TZcttPOBci2" "4rOdKi5gHM0LDACevARXGc9htQAAAABJRU5ErkJggg==") #---------------------------------------------------------------------- # FOLDPANELBAR Starts Here #---------------------------------------------------------------------- # CAPTIONBAR STYLES # #- CAPTIONBAR_GRADIENT_V: Draws a vertical gradient from top to bottom #- CAPTIONBAR_GRADIENT_H: Draws a horizontal gradient from left to right #- CAPTIONBAR_SINGLE: Draws a single filled rectangle to draw the caption #- CAPTIONBAR_RECTANGLE: Draws a single colour with a rectangle around the caption #- CAPTIONBAR_FILLED_RECTANGLE: Draws a filled rectangle and a border around it CAPTIONBAR_NOSTYLE = 0 """ The :class:`CaptionBar` has no style bit set. """ CAPTIONBAR_GRADIENT_V = 1 """ Draws a vertical gradient from top to bottom. """ CAPTIONBAR_GRADIENT_H = 2 """ Draws a vertical gradient from left to right. """ CAPTIONBAR_SINGLE = 3 """ Draws a single filled rectangle to draw the caption. """ CAPTIONBAR_RECTANGLE = 4 """ Draws a single colour with a rectangle around the caption. """ CAPTIONBAR_FILLED_RECTANGLE = 5 """ Draws a filled rectangle and a border around it. """ FPB_EXTRA_X = 10 """ Extra horizontal padding, in pixels. """ FPB_EXTRA_Y = 4 """ Extra vertical padding, in pixels. """ # pixels of the bmp to be aligned from the right filled with space FPB_BMP_RIGHTSPACE = 2 """ Pixels of the bmp to be aligned from the right filled with space. """ # Now supported! Single fold forces # other panels to close when they are open, and only opens the current panel. # This will allow the open panel to gain the full size left in the client area FPB_SINGLE_FOLD = 0x0001 """ Single fold forces other panels to close when they are open, and only opens the current panel. This will allow the open panel to gain the full size left in the client area.""" # All panels are stacked to the bottom. When they are expanded again they # show up at the top FPB_COLLAPSE_TO_BOTTOM = 0x0002 """ All panels are stacked to the bottom. When they are expanded again they show up at the top. """ # Now supported! Single fold plus panels # will be stacked at the bottom FPB_EXCLUSIVE_FOLD = 0x0004 """ ``FPB_SINGLE_FOLD`` style plus the panels will be stacked at the bottom. """ # Orientation Flag FPB_HORIZONTAL = 0x0008 """ :class:`FoldPanelBar` will be horizontal. """ FPB_VERTICAL = 0x0010 """ :class:`FoldPanelBar` will be vertical. """ # FoldPanelItem default settings FPB_ALIGN_LEFT = 0 """ Aligns left instead of fitting the width of the child window to be added. Use either this one or ``FPB_ALIGN_WIDTH``. """ FPB_ALIGN_WIDTH = 1 """ The :class:`Window` to be added will be aligned to fit the width of the FoldPanel when it is resized. Very handy for sizer items, buttons and text boxes. """ FPB_DEFAULT_LEFTSPACING = 5 FPB_DEFAULT_RIGHTSPACING = 10 FPB_DEFAULT_SPACING = 8 FPB_DEFAULT_LEFTLINESPACING = 2 FPB_DEFAULT_RIGHTLINESPACING = 2 # ------------------------------------------------------------------------------ # # class CaptionBarStyle # ------------------------------------------------------------------------------ # class CaptionBarStyle(object): """ This class encapsulates the styles you wish to set for the :class:`CaptionBar` (this is the part of the `FoldPanel` where the caption is displayed). It can either be applied at creation time be reapplied when styles need to be changed. At construction time, all styles are set to their default transparency. This means none of the styles will be applied to the :class:`CaptionBar` in question, meaning it will be created using the default internals. When setting i.e the colour, font or panel style, these styles become active to be used. """ def __init__(self): """ Default constructor for this class. """ self.ResetDefaults() def ResetDefaults(self): """ Resets default :class:`CaptionBarStyle`. """ self._firstColourUsed = False self._secondColourUsed = False self._textColourUsed = False self._captionFontUsed = False self._captionStyleUsed = False self._captionStyle = CAPTIONBAR_GRADIENT_V # ------- CaptionBar Font ------- def SetCaptionFont(self, font): """ Sets font for the caption bar. :param `font`: a valid :class:`Font` object. :note: If this is not set, the font property is undefined and will not be used. Use :meth:`~CaptionBarStyle.CaptionFontUsed` to check if this style is used. """ self._captionFont = font self._captionFontUsed = True def CaptionFontUsed(self): """ Checks if the caption bar font is set. """ return self._captionFontUsed def GetCaptionFont(self): """ Returns the font for the caption bar. :note: Please be warned this will result in an assertion failure when this property is not previously set. :see: :meth:`~CaptionBarStyle.SetCaptionFont`, :meth:`~CaptionBarStyle.CaptionFontUsed` """ return self._captionFont # ------- First Colour ------- def SetFirstColour(self, colour): """ Sets first colour for the caption bar. :param `colour`: a valid :class:`Colour` object. :note: If this is not set, the colour property is undefined and will not be used. Use :meth:`~CaptionBarStyle.FirstColourUsed` to check if this style is used. """ self._firstColour = colour self._firstColourUsed = True def FirstColourUsed(self): """ Checks if the first colour of the caption bar is set.""" return self._firstColourUsed def GetFirstColour(self): """ Returns the first colour for the caption bar. :note: Please be warned this will result in an assertion failure when this property is not previously set. :see: :meth:`~CaptionBarStyle.SetFirstColour`, :meth:`~CaptionBarStyle.FirstColourUsed` """ return self._firstColour # ------- Second Colour ------- def SetSecondColour(self, colour): """ Sets second colour for the caption bar. :param `colour`: a valid :class:`Colour` object. :note: If this is not set, the colour property is undefined and will not be used. Use :meth:`~CaptionBarStyle.SecondColourUsed` to check if this style is used. """ self._secondColour = colour self._secondColourUsed = True def SecondColourUsed(self): """ Checks if the second colour of the caption bar is set.""" return self._secondColourUsed def GetSecondColour(self): """ Returns the second colour for the caption bar. :note: Please be warned this will result in an assertion failure when this property is not previously set. :see: :meth:`~CaptionBarStyle.SetSecondColour`, :meth:`~CaptionBarStyle.SecondColourUsed` """ return self._secondColour # ------- Caption Text Colour ------- def SetCaptionColour(self, colour): """ Sets caption colour for the caption bar. :param `colour`: a valid :class:`Colour` object. :note: If this is not set, the colour property is undefined and will not be used. Use :meth:`~CaptionBarStyle.CaptionColourUsed` to check if this style is used. """ self._textColour = colour self._textColourUsed = True def CaptionColourUsed(self): """ Checks if the caption colour of the caption bar is set.""" return self._textColourUsed def GetCaptionColour(self): """ Returns the caption colour for the caption bar. :note: Please be warned this will result in an assertion failure when this property is not previously set. :see: :meth:`~CaptionBarStyle.SetCaptionColour`, :meth:`~CaptionBarStyle.CaptionColourUsed` """ return self._textColour # ------- CaptionStyle ------- def SetCaptionStyle(self, style): """ Sets caption style for the caption bar. :param `style`: can be one of the following bits: =============================== ======= ============================= Caption Style Value Description =============================== ======= ============================= ``CAPTIONBAR_GRADIENT_V`` 1 Draws a vertical gradient from top to bottom ``CAPTIONBAR_GRADIENT_H`` 2 Draws a horizontal gradient from left to right ``CAPTIONBAR_SINGLE`` 3 Draws a single filled rectangle to draw the caption ``CAPTIONBAR_RECTANGLE`` 4 Draws a single colour with a rectangle around the caption ``CAPTIONBAR_FILLED_RECTANGLE`` 5 Draws a filled rectangle and a border around it =============================== ======= ============================= :note: If this is not set, the property is undefined and will not be used. Use :meth:`~CaptionBarStyle.CaptionStyleUsed` to check if this style is used. """ self._captionStyle = style self._captionStyleUsed = True def CaptionStyleUsed(self): """ Checks if the caption style of the caption bar is set.""" return self._captionStyleUsed def GetCaptionStyle(self): """ Returns the caption style for the caption bar. :note: Please be warned this will result in an assertion failure when this property is not previously set. :see: :meth:`~CaptionBarStyle.SetCaptionStyle`, :meth:`~CaptionBarStyle.CaptionStyleUsed` """ return self._captionStyle #-----------------------------------# # CaptionBarEvent #-----------------------------------# wxEVT_CAPTIONBAR = wx.NewEventType() EVT_CAPTIONBAR = wx.PyEventBinder(wxEVT_CAPTIONBAR, 0) """ The user has pressed the caption bar: :class:`FoldPanelBar` will either expand or""" \ """ collapse the underlying panel. """ # Define Empty CaptionBar Style EmptyCaptionBarStyle = CaptionBarStyle() # ---------------------------------------------------------------------------- # # class CaptionBarEvent # ---------------------------------------------------------------------------- # class CaptionBarEvent(wx.PyCommandEvent): """ This event will be sent when a ``EVT_CAPTIONBAR`` is mapped in the parent. It is to notify the parent that the bar is now in collapsed or expanded state. The parent should re-arrange the associated windows accordingly """ def __init__(self, evtType): """ Default class constructor. :param `evtType`: the event type. """ wx.PyCommandEvent.__init__(self, evtType) def GetFoldStatus(self): """ Returns whether the bar is expanded or collapsed. ``True`` means expanded. """ return not self._bar.IsCollapsed() def GetBar(self): """ Returns the selected :class:`CaptionBar`. """ return self._bar def SetTag(self, tag): """ Assigns a tag to the selected :class:`CaptionBar`. :param `tag`: an instance of :class:`FoldPanelBar`. """ self._tag = tag def GetTag(self): """ Returns the tag assigned to the selected :class:`CaptionBar`. """ return self._tag def SetBar(self, bar): """ Sets the bar associated with this event. :param `bar`: an instance of :class:`CaptionBar`. :note: Should not be used by any other than the originator of the event. """ self._bar = bar # -------------------------------------------------------------------------------- # # class CaptionBar # -------------------------------------------------------------------------------- # class CaptionBar(wx.Window): """ This class is a graphical caption component that consists of a caption and a clickable arrow. The :class:`CaptionBar` fires an event ``EVT_CAPTIONBAR`` which is a :class:`CaptionBarEvent`. This event can be caught and the parent window can act upon the collapsed or expanded state of the bar (which is actually just the icon which changed). The parent panel can reduce size or expand again. """ def __init__(self, parent, id, pos, size, caption="", foldIcons=None, cbstyle=None, rightIndent=FPB_BMP_RIGHTSPACE, iconWidth=16, iconHeight=16, collapsed=False): """ Default class constructor. :param `parent`: the :class:`CaptionBar` parent window; :param `id`: an identifier for the control: a value of -1 is taken to mean a default; :param `pos`: the control position. A value of (-1, -1) indicates a default position, chosen by either the windowing system or wxPython, depending on platform; :param `size`: the control size. A value of (-1, -1) indicates a default size, chosen by either the windowing system or wxPython, depending on platform; :param `caption`: the string to be displayed in :class:`CaptionBar`; :param `foldIcons`: an instance of :class:`ImageList` containing the icons to display next to the caption text; :param `cbstyle`: the :class:`CaptionBar` window style. Must be an instance of :class:`CaptionBarStyle`; :param `rightIndent`: number of pixels of the bmp to be aligned from the right filled with space; :param `iconWidth`: the :class:`CaptionBar` icon width; :param `iconHeight`: the :class:`CaptionBar` icon height; :param `collapsed`: ``True`` if the :class:`CaptionBar` should start in the collapsed state, ``False`` otherwise. """ wx.Window.__init__(self, parent, wx.ID_ANY, pos=pos, size=(20,20), style=wx.NO_BORDER) self._controlCreated = False self._collapsed = collapsed self.ApplyCaptionStyle(cbstyle, True) if foldIcons is None: foldIcons = wx.ImageList(16, 16) bmp = ExpandedIcon.GetBitmap() foldIcons.Add(bmp) bmp = CollapsedIcon.GetBitmap() foldIcons.Add(bmp) # set initial size if foldIcons: assert foldIcons.GetImageCount() > 1 iconWidth, iconHeight = foldIcons.GetSize(0) self._caption = caption self._foldIcons = foldIcons self._style = cbstyle self._rightIndent = rightIndent self._iconWidth = iconWidth self._iconHeight = iconHeight self._oldSize = wx.Size(20,20) self._controlCreated = True self.Bind(wx.EVT_PAINT, self.OnPaint) self.Bind(wx.EVT_SIZE, self.OnSize) self.Bind(wx.EVT_MOUSE_EVENTS, self.OnMouseEvent) self.Bind(wx.EVT_CHAR, self.OnChar) def ApplyCaptionStyle(self, cbstyle=None, applyDefault=True): """ Applies the style defined in `cbstyle` to the :class:`CaptionBar`. :param `cbstyle`: an instance of :class:`CaptionBarStyle`; :param `applyDefault`: if ``True``, the colours used in the :class:`CaptionBarStyle` will be reset to their default values. """ if cbstyle is None: cbstyle = EmptyCaptionBarStyle newstyle = cbstyle if applyDefault: # get first colour from style or make it default if not newstyle.FirstColourUsed(): newstyle.SetFirstColour(wx.WHITE) # get second colour from style or make it default if not newstyle.SecondColourUsed(): # make the second colour slightly darker then the background colour = self.GetParent().GetBackgroundColour() r, g, b = int(colour.Red()), int(colour.Green()), int(colour.Blue()) colour = ((r >> 1) + 20, (g >> 1) + 20, (b >> 1) + 20) newstyle.SetSecondColour(wx.Colour(colour[0], colour[1], colour[2])) # get text colour if not newstyle.CaptionColourUsed(): newstyle.SetCaptionColour(wx.BLACK) # get font colour if not newstyle.CaptionFontUsed(): newstyle.SetCaptionFont(self.GetParent().GetFont()) # apply caption style if not newstyle.CaptionStyleUsed(): newstyle.SetCaptionStyle(CAPTIONBAR_GRADIENT_V) self._style = newstyle def SetCaptionStyle(self, cbstyle=None, applyDefault=True): """ Sets :class:`CaptionBar` styles with :class:`CaptionBarStyle` class. :param `cbstyle`: an instance of :class:`CaptionBarStyle`; :param `applyDefault`: if ``True``, the colours used in the :class:`CaptionBarStyle` will be reset to their default values. :note: All styles that are actually set, are applied. If you set `applyDefault` to ``True``, all other (not defined) styles will be set to default. If it is ``False``, the styles which are not set in the :class:`CaptionBarStyle` will be ignored. """ if cbstyle is None: cbstyle = EmptyCaptionBarStyle self.ApplyCaptionStyle(cbstyle, applyDefault) self.Refresh() def GetCaptionStyle(self): """ Returns the current style of the captionbar in a :class:`CaptionBarStyle` class. :note: This can be used to change and set back the changes. """ return self._style def IsCollapsed(self): """ Returns wether the status of the bar is expanded or collapsed. """ return self._collapsed def SetRightIndent(self, pixels): """ Sets the amount of pixels on the right from which the bitmap is trailing. :param `pixels`: the number of pixels on the right from which the bitmap is trailing. If this is 0, it will be drawn all the way to the right, default is equal to ``FPB_BMP_RIGHTSPACE``. Assign this before assigning an image list to prevent a redraw. """ assert pixels >= 0 self._rightIndent = pixels if self._foldIcons: self.Refresh() def Collapse(self): """ This sets the internal state/representation to collapsed. :note: This does not trigger a :class:`CaptionBarEvent` to be sent to the parent. """ self._collapsed = True self.RedrawIconBitmap() def Expand(self): """ This sets the internal state/representation to expanded. :note: This does not trigger a :class:`CaptionBarEvent` to be sent to the parent. """ self._collapsed = False self.RedrawIconBitmap() def SetBoldFont(self): """ Sets the :class:`CaptionBar` font weight to bold.""" self.GetFont().SetWeight(wx.BOLD) def SetNormalFont(self): """ Sets the :class:`CaptionBar` font weight to normal.""" self.GetFont().SetWeight(wx.NORMAL) def IsVertical(self): """ Returns wether the :class:`CaptionBar` has a default orientation or not. Default is vertical. """ fld = self.GetParent().GetGrandParent() if isinstance(fld, FoldPanelBar): return self.GetParent().GetGrandParent().IsVertical() else: raise Exception("ERROR: Wrong Parent " + repr(fld)) def OnPaint(self, event): """ Handles the ``wx.EVT_PAINT`` event for :class:`CaptionBar`. :param `event`: a :class:`PaintEvent` event to be processed. """ if not self._controlCreated: event.Skip() return dc = wx.PaintDC(self) wndRect = self.GetRect() vertical = self.IsVertical() # TODO: Maybe first a memory DC should draw all, and then paint it on # the caption. This way a flickering arrow during resize is not visible self.FillCaptionBackground(dc) dc.SetFont(self._style.GetCaptionFont()) dc.SetTextForeground(self._style.GetCaptionColour()) if vertical: dc.DrawText(self._caption, 4, FPB_EXTRA_Y/2) else: dc.DrawRotatedText(self._caption, FPB_EXTRA_Y/2, wndRect.GetBottom() - 4, 90) # draw small icon, either collapsed or expanded # based on the state of the bar. If we have any bmp's if self._foldIcons: index = self._collapsed if vertical: drw = wndRect.GetRight() - self._iconWidth - self._rightIndent self._foldIcons.Draw(index, dc, drw, (wndRect.GetHeight() - self._iconHeight)/2, wx.IMAGELIST_DRAW_TRANSPARENT) else: self._foldIcons.Draw(index, dc, (wndRect.GetWidth() - self._iconWidth)/2, self._rightIndent, wx.IMAGELIST_DRAW_TRANSPARENT) ## event.Skip() def FillCaptionBackground(self, dc): """ Fills the background of the caption with either a gradient or a solid colour. :param `dc`: an instance of :class:`DC`. """ style = self._style.GetCaptionStyle() if style == CAPTIONBAR_GRADIENT_V: if self.IsVertical(): self.DrawVerticalGradient(dc, self.GetRect()) else: self.DrawHorizontalGradient(dc, self.GetRect()) elif style == CAPTIONBAR_GRADIENT_H: if self.IsVertical(): self.DrawHorizontalGradient(dc, self.GetRect()) else: self.DrawVerticalGradient(dc, self.GetRect()) elif style == CAPTIONBAR_SINGLE: self.DrawSingleColour(dc, self.GetRect()) elif style == CAPTIONBAR_RECTANGLE or style == CAPTIONBAR_FILLED_RECTANGLE: self.DrawSingleRectangle(dc, self.GetRect()) else: raise Exception("STYLE Error: Undefined Style Selected: " + repr(style)) def OnMouseEvent(self, event): """ Handles the ``wx.EVT_MOUSE_EVENTS`` event for :class:`CaptionBar`. :param `event`: a :class:`MouseEvent` event to be processed. :note: This method catches the mouse click-double click. If clicked on the arrow (single) or double on the caption we change state and an event must be fired to let this panel collapse or expand. """ send_event = False vertical = self.IsVertical() if event.LeftDown() and self._foldIcons: pt = event.GetPosition() rect = self.GetRect() drw = (rect.GetWidth() - self._iconWidth - self._rightIndent) if vertical and pt.x > drw or not vertical and \ pt.y < (self._iconHeight + self._rightIndent): send_event = True elif event.LeftDClick(): self.SetCursor(wx.StockCursor(wx.CURSOR_ARROW)) send_event = True elif event.Entering() and self._foldIcons: pt = event.GetPosition() rect = self.GetRect() drw = (rect.GetWidth() - self._iconWidth - self._rightIndent) if vertical and pt.x > drw or not vertical and \ pt.y < (self._iconHeight + self._rightIndent): self.SetCursor(wx.StockCursor(wx.CURSOR_HAND)) else: self.SetCursor(wx.StockCursor(wx.CURSOR_ARROW)) elif event.Leaving(): self.SetCursor(wx.StockCursor(wx.CURSOR_ARROW)) elif event.Moving(): pt = event.GetPosition() rect = self.GetRect() drw = (rect.GetWidth() - self._iconWidth - self._rightIndent) if vertical and pt.x > drw or not vertical and \ pt.y < (self._iconHeight + self._rightIndent): self.SetCursor(wx.StockCursor(wx.CURSOR_HAND)) else: self.SetCursor(wx.StockCursor(wx.CURSOR_ARROW)) # send the collapse, expand event to the parent if send_event: event = CaptionBarEvent(wxEVT_CAPTIONBAR) event.SetId(self.GetId()) event.SetEventObject(self) event.SetBar(self) self.GetEventHandler().ProcessEvent(event) def OnChar(self, event): """ Handles the ``wx.EVT_CHAR`` event for :class:`CaptionBar`. :param `event`: a :class:`KeyEvent` event to be processed. :note: This method currently does nothing. """ # TODO: Anything here? event.Skip() def DoGetBestSize(self): """ Returns the best size for this panel, based upon the font assigned to this window, and the caption string. :note: Overridden from :class:`Window`. """ if self.IsVertical(): x, y = self.GetTextExtent(self._caption) else: y, x = self.GetTextExtent(self._caption) if x < self._iconWidth: x = self._iconWidth if y < self._iconHeight: y = self._iconHeight # TODO: The extra FPB_EXTRA_X constants should be adjustable as well return wx.Size(x + FPB_EXTRA_X, y + FPB_EXTRA_Y) def DrawVerticalGradient(self, dc, rect): """ Gradient fill from colour 1 to colour 2 from top to bottom. :param `dc`: an instance of :class:`DC`; :param `rect`: the :class:`CaptionBar` client rectangle. """ if rect.height < 1 or rect.width < 1: return dc.SetPen(wx.TRANSPARENT_PEN) # calculate gradient coefficients col2 = self._style.GetSecondColour() col1 = self._style.GetFirstColour() r1, g1, b1 = int(col1.Red()), int(col1.Green()), int(col1.Blue()) r2, g2, b2 = int(col2.Red()), int(col2.Green()), int(col2.Blue()) flrect = float(rect.height) rstep = float((r2 - r1)) / flrect gstep = float((g2 - g1)) / flrect bstep = float((b2 - b1)) / flrect rf, gf, bf = 0, 0, 0 for y in range(rect.y, rect.y + rect.height): currCol = (r1 + rf, g1 + gf, b1 + bf) dc.SetBrush(wx.Brush(currCol, wx.SOLID)) dc.DrawRectangle(rect.x, rect.y + (y - rect.y), rect.width, rect.height) rf = rf + rstep gf = gf + gstep bf = bf + bstep def DrawHorizontalGradient(self, dc, rect): """ Gradient fill from colour 1 to colour 2 from left to right. :param `dc`: an instance of :class:`DC`; :param `rect`: the :class:`CaptionBar` client rectangle. """ if rect.height < 1 or rect.width < 1: return dc.SetPen(wx.TRANSPARENT_PEN) # calculate gradient coefficients col2 = self._style.GetSecondColour() col1 = self._style.GetFirstColour() r1, g1, b1 = int(col1.Red()), int(col1.Green()), int(col1.Blue()) r2, g2, b2 = int(col2.Red()), int(col2.Green()), int(col2.Blue()) flrect = float(rect.width) rstep = float((r2 - r1)) / flrect gstep = float((g2 - g1)) / flrect bstep = float((b2 - b1)) / flrect rf, gf, bf = 0, 0, 0 for x in range(rect.x, rect.x + rect.width): currCol = (r1 + rf, g1 + gf, b1 + bf) dc.SetBrush(wx.Brush(currCol, wx.SOLID)) dc.DrawRectangle(rect.x + (x - rect.x), rect.y, 1, rect.height) rf = rf + rstep gf = gf + gstep bf = bf + bstep def DrawSingleColour(self, dc, rect): """ Single colour fill for :class:`CaptionBar`. :param `dc`: an instance of :class:`DC`; :param `rect`: the :class:`CaptionBar` client rectangle. """ if rect.height < 1 or rect.width < 1: return dc.SetPen(wx.TRANSPARENT_PEN) # draw simple rectangle dc.SetBrush(wx.Brush(self._style.GetFirstColour(), wx.SOLID)) dc.DrawRectangle(rect.x, rect.y, rect.width, rect.height) def DrawSingleRectangle(self, dc, rect): """ Single rectangle for :class:`CaptionBar`. :param `dc`: an instance of :class:`DC`; :param `rect`: the :class:`CaptionBar` client rectangle. """ if rect.height < 2 or rect.width < 1: return # single frame, set up internal fill colour if self._style.GetCaptionStyle() == CAPTIONBAR_RECTANGLE: colour = self.GetParent().GetBackgroundColour() br = wx.Brush(colour, wx.SOLID) else: colour = self._style.GetFirstColour() br = wx.Brush(colour, wx.SOLID) # setup the pen frame pen = wx.Pen(self._style.GetSecondColour()) dc.SetPen(pen) dc.SetBrush(br) dc.DrawRectangle(rect.x, rect.y, rect.width, rect.height - 1) bgpen = wx.Pen(self.GetParent().GetBackgroundColour()) dc.SetPen(bgpen) dc.DrawLine(rect.x, rect.y + rect.height - 1, rect.x + rect.width, rect.y + rect.height - 1) def OnSize(self, event): """ Handles the ``wx.EVT_SIZE`` event for :class:`CaptionBar`. :param `event`: a :class:`SizeEvent` event to be processed. """ if not self._controlCreated: event.Skip() return size = event.GetSize() if self._foldIcons: # What I am doing here is simply invalidating the part of the window # exposed. So when I make a rect with as width the newly exposed part, # and the x,y of the old window size origin, I don't need a bitmap # calculation in it, or do I ? The bitmap needs redrawing anyway. # Leave it like this until I figured it out. # set rect to redraw as old bitmap area which is entitled to redraw rect = wx.Rect(size.GetWidth() - self._iconWidth - self._rightIndent, 0, self._iconWidth + self._rightIndent, self._iconWidth + self._rightIndent) # adjust rectangle when more is slided so we need to redraw all # the old stuff but not all (ugly flickering) diffX = size.GetWidth() - self._oldSize.GetWidth() if diffX > 1: # adjust the rect with all the crap to redraw rect.SetWidth(rect.GetWidth() + diffX + 10) rect.SetX(rect.GetX() - diffX - 10) self.RefreshRect(rect) else: rect = self.GetRect() self.RefreshRect(rect) self._oldSize = size def RedrawIconBitmap(self): """ Redraws the icons (if they exists). """ if self._foldIcons: # invalidate the bitmap area and force a redraw rect = self.GetRect() rect.SetX(rect.GetWidth() - self._iconWidth - self._rightIndent) rect.SetWidth(self._iconWidth + self._rightIndent) self.RefreshRect(rect) # ---------------------------------------------------------------------------------- # # class FoldPanelBar # ---------------------------------------------------------------------------------- # class FoldPanelBar(wx.Panel): """ The :class:`FoldPanelBar` is a class which can maintain a list of collapsible panels. Once a panel is collapsed, only it's caption bar is visible to the user. This will provide more space for the other panels, or allow the user to close panels which are not used often to get the most out of the work area. This control is easy to use. Simply create it as a child for a panel or sash window, and populate panels with :meth:`FoldPanelBar.AddFoldPanel() <FoldPanelBar.AddFoldPanel>`. Then use the :meth:`FoldPanelBar.AddFoldPanelWindow() <FoldPanelBar.AddFoldPanelWindow>` to add :class:`Window` derived controls to the current fold panel. Use :meth:`FoldPanelBar.AddFoldPanelSeparator() <FoldPanelBar.AddFoldPanelSeparator>` to put separators between the groups of controls that need a visual separator to group them together. After all is constructed, the user can fold the panels by double clicking on the bar or single click on the arrow, which will indicate the collapsed or expanded state. """ def __init__(self, parent, id=-1, pos=wx.DefaultPosition, size=wx.DefaultSize, style=wx.TAB_TRAVERSAL|wx.NO_BORDER, agwStyle=0): """ Default class constructor. :param `parent`: the :class:`FoldPanelBar` parent window; :param `id`: an identifier for the control: a value of -1 is taken to mean a default; :param `pos`: the control position. A value of (-1, -1) indicates a default position, chosen by either the windowing system or wxPython, depending on platform; :param `size`: the control size. A value of (-1, -1) indicates a default size, chosen by either the windowing system or wxPython, depending on platform; :param `style`: the underlying :class:`Panel` window style; :param `agwStyle`: the AGW-specific :class:`FoldPanelBar` window style. It can be one of the following bits: ========================== =========== ================================================== Window Styles Hex Value Description ========================== =========== ================================================== ``FPB_SINGLE_FOLD`` 0x1 Single fold forces other panels to close when they are open, and only opens the current panel. This will allow the open panel to gain the full size left in the client area. ``FPB_COLLAPSE_TO_BOTTOM`` 0x2 All panels are stacked to the bottom. When they are expanded again they show up at the top. ``FPB_EXCLUSIVE_FOLD`` 0x4 ``FPB_SINGLE_FOLD`` style plus the panels will be stacked at the bottom. ``FPB_HORIZONTAL`` 0x4 :class:`FoldPanelBar` will be horizontal. ``FPB_VERTICAL`` 0x8 :class:`FoldPanelBar` will be vertical. ========================== =========== ================================================== """ self._controlCreated = False # make sure there is any orientation if not agwStyle & (FPB_HORIZONTAL | FPB_VERTICAL): agwStyle = agwStyle | FPB_VERTICAL if agwStyle & FPB_HORIZONTAL: self._isVertical = False else: self._isVertical = True self._agwStyle = agwStyle # create the panel (duh!). This causes a size event, which we are going # to skip when we are not initialised wx.Panel.__init__(self, parent, id, pos, size, style) # the fold panel area self._foldPanel = wx.Panel(self, wx.ID_ANY, pos, size, wx.NO_BORDER | wx.TAB_TRAVERSAL) self._controlCreated = True self._panels = [] self.Bind(EVT_CAPTIONBAR, self.OnPressCaption) self.Bind(wx.EVT_SIZE, self.OnSizePanel) def AddFoldPanel(self, caption="", collapsed=False, foldIcons=None, cbstyle=None): """ Adds a fold panel to the list of panels. :param `caption`: the caption to be displayed in the associated :class:`CaptionBar`; :param `collapsed`: if set to ``True``, the panel is collapsed initially; :param `foldIcons`: an instance of :class:`ImageList` containing the icons to display next to the caption text; :param `cbstyle`: an instance of :class:`CaptionBarStyle`. :note: The FoldPanel item which is returned, can be used as a reference to perform actions upon the fold panel like collapsing it, expanding it, or deleting it from the list. Use this foldpanel to add windows to it. :see: :meth:`~FoldPanelBar.AddFoldPanelWindow` and :meth:`~FoldPanelBar.AddFoldPanelSeparator` to see how to add items derived from :class:`Window` to the panels. """ if cbstyle is None: cbstyle = EmptyCaptionBarStyle # create a fold panel item, which is first only the caption. # the user can now add a panel area which will be folded in # when pressed. if foldIcons is None: foldIcons = wx.ImageList(16, 16) bmp = ExpandedIcon.GetBitmap() foldIcons.Add(bmp) bmp = CollapsedIcon.GetBitmap() foldIcons.Add(bmp) item = FoldPanelItem(self._foldPanel, -1, caption=caption, foldIcons=foldIcons, collapsed=collapsed, cbstyle=cbstyle) pos = 0 if len(self._panels) > 0: pos = self._panels[-1].GetItemPos() + self._panels[-1].GetPanelLength() item.Reposition(pos) self._panels.append(item) return item def AddFoldPanelWindow(self, panel, window, flags=FPB_ALIGN_WIDTH, spacing=FPB_DEFAULT_SPACING, leftSpacing=FPB_DEFAULT_LEFTLINESPACING, rightSpacing=FPB_DEFAULT_RIGHTLINESPACING): """ Adds a :class:`Window` derived instance to the referenced fold panel. :param `panel`: an instance of :class:`FoldPanelItem`; :param `window`: the window we wish to add to the fold panel, an instance of :class:`Window`; :param `flags`: can be one of the following bits: ====================== ======= ==================================== Align Flag Value Description ====================== ======= ==================================== ``FPB_ALIGN_WIDTH`` 1 The :class:`Window` to be added will be aligned to fit the width of the FoldPanel when it is resized. Very handy for sizer items, buttons and text boxes. ``FPB_ALIGN_LEFT`` 0 Aligns left instead of fitting the width of the child window to be added. Use either this one or ``FPB_ALIGN_WIDTH``. ====================== ======= ==================================== :param `spacing`: the :class:`Window` to be added can be slightly indented from left and right so it is more visibly placed in the fold panel. Use `spacing` > 0 to give the control an y offset from the previous :class:`Window` added; :param `leftSpacing`: give the :class:`Window` added a slight indent from the left; :param `rightSpacing`: give the :class:`Window` added a slight indent from the right; :note: Make the window be a child of the fold panel! The following example adds a FoldPanel to the :class:`FoldPanelBar` and adds two :class:`Window` derived controls to the FoldPanel:: # Create the FoldPanelBar m_pnl = FoldPanelBar(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, agwStyle=0x2) # Add a foldpanel to the control. "Test me" is the caption and it is # initially not collapsed. item = m_pnl.AddFoldPanel("Test me", False) # Now add a button to the fold panel. Mind that the button should be # made child of the FoldPanel and not of the main form. m_pnl.AddFoldPanelWindow(item, wx.Button(item, ID_COLLAPSEME, "Collapse Me")) # Add a separator between the two controls. This is purely a visual # line that can have a certain colour and also the indents and width # aligning like a control. m_pnl.AddFoldPanelSeparator(item) # Now add a text ctrl. Also very easy. Align this on width so that # when the control gets wider the text control also sizes along. m_pnl.AddFoldPanelWindow(item, wx.TextCtrl(item, wx.ID_ANY, "Comment"), FPB_ALIGN_WIDTH, FPB_DEFAULT_SPACING, 20) """ try: item = self._panels.index(panel) except: raise Exception("ERROR: Invalid Panel Passed To AddFoldPanelWindow: " + repr(panel)) panel.AddWindow(window, flags, spacing, leftSpacing, rightSpacing) # TODO: Take old and new height, and if difference, reposition all the lower # panels this is because the user can add new wxWindow controls somewhere in # between when other panels are already present. return 0 def AddFoldPanelSeparator(self, panel, colour=wx.BLACK, spacing=FPB_DEFAULT_SPACING, leftSpacing=FPB_DEFAULT_LEFTLINESPACING, rightSpacing=FPB_DEFAULT_RIGHTLINESPACING): """ Adds a separator line to the current fold panel. The separator is a simple line which is drawn and is no real component. It can be used to separate groups of controls which belong to each other. :param `colour`: the separator colour, an instance of :class:`Colour`; :param `spacing`: the separator to be added can be slightly indented from left and right so it is more visibly placed in the fold panel. Use `spacing` > 0 to give the control an y offset from the previous :class:`Window` added; :param `leftSpacing`: give the added separator a slight indent from the left; :param `rightSpacing`: give the added separator a slight indent from the right. """ try: item = self._panels.index(panel) except: raise Exception("ERROR: Invalid Panel Passed To AddFoldPanelSeparator: " + repr(panel)) panel.AddSeparator(colour, spacing, leftSpacing, rightSpacing) return 0 def OnSizePanel(self, event): """ Handles the ``wx.EVT_SIZE`` event for :class:`FoldPanelBar`. :param `event`: a :class:`SizeEvent` event to be processed. """ # skip all stuff when we are not initialised yet if not self._controlCreated: event.Skip() return foldrect = self.GetRect() # fold panel itself. If too little space, # don't show it foldrect.SetX(0) foldrect.SetY(0) self._foldPanel.SetSize(foldrect[2:]) if self._agwStyle & FPB_COLLAPSE_TO_BOTTOM or self._agwStyle & FPB_EXCLUSIVE_FOLD: rect = self.RepositionCollapsedToBottom() vertical = self.IsVertical() if vertical and rect.GetHeight() > 0 or not vertical and rect.GetWidth() > 0: self.RefreshRect(rect) # TODO: A smart way to check wether the old - new width of the # panel changed, if so no need to resize the fold panel items self.RedisplayFoldPanelItems() def OnPressCaption(self, event): """ Handles the ``wx.EVT_CAPTIONBAR`` event for :class:`CaptionBar`. :param `event`: a :class:`CaptionBarEvent` event to be processed. """ # act upon the folding or expanding status of the bar # to expand or collapse the panel(s) if event.GetFoldStatus(): self.Collapse(event.GetTag()) else: self.Expand(event.GetTag()) def RefreshPanelsFrom(self, item): """ Refreshes all the panels from given one down to last one. :param `item`: the first :class:`FoldPanelItem` to be refreshed. """ try: i = self._panels.index(item) except: raise Exception("ERROR: Invalid Panel Passed To RefreshPanelsFrom: " + repr(item)) self.Freeze() # if collapse to bottom is on, the panels that are not expanded # should be drawn at the bottom. All panels that are expanded # are drawn on top. The last expanded panel gets all the extra space if self._agwStyle & FPB_COLLAPSE_TO_BOTTOM or self._agwStyle & FPB_EXCLUSIVE_FOLD: offset = 0 for panels in self._panels: if panels.IsExpanded(): offset = offset + panels.Reposition(offset) # put all non collapsed panels at the bottom where there is space, # else put them right behind the expanded ones self.RepositionCollapsedToBottom() else: pos = self._panels[i].GetItemPos() + self._panels[i].GetPanelLength() for j in range(i+1, len(self._panels)): pos = pos + self._panels[j].Reposition(pos) self.Thaw() def RedisplayFoldPanelItems(self): """ Resizes the fold panels so they match the width. """ # resize them all. No need to reposition for panels in self._panels: panels.ResizePanel() panels.Refresh() def RepositionCollapsedToBottom(self): """ Repositions all the collapsed panels to the bottom. When it is not possible to align them to the bottom, stick them behind the visible panels. """ value = wx.Rect(0,0,0,0) vertical = self.IsVertical() # determine wether the number of panels left # times the size of their captions is enough # to be placed in the left over space expanded = 0 collapsed = 0 collapsed, expanded, values = self.GetPanelsLength(collapsed, expanded) # if no room stick them behind the normal ones, else # at the bottom if (vertical and [self.GetSize().GetHeight()] or \ [self.GetSize().GetWidth()])[0] - expanded - collapsed < 0: offset = expanded else: # value is the region which is left unpainted # I will send it back as 'slack' so it does not need to # be recalculated. value.SetHeight(self.GetSize().GetHeight()) value.SetWidth(self.GetSize().GetWidth()) if vertical: value.SetY(expanded) value.SetHeight(value.GetHeight() - expanded) else: value.SetX(expanded) value.SetWidth(value.GetWidth() - expanded) offset = (vertical and [self.GetSize().GetHeight()] or \ [self.GetSize().GetWidth()])[0] - collapsed # go reposition for panels in self._panels: if not panels.IsExpanded(): offset = offset + panels.Reposition(offset) return value def GetPanelsLength(self, collapsed, expanded): """ Returns the length of the panels that are expanded and collapsed. :param `collapsed`: the current value of the collapsed panels; :param `expanded`: the current value of the expanded panels. :note: This is useful to determine quickly what size is used to display, and what is left at the bottom (right) to align the collapsed panels. """ value = 0 # assumed here that all the panels that are expanded # are positioned after each other from 0,0 to end. for j in range(0, len(self._panels)): offset = self._panels[j].GetPanelLength() value = value + offset if self._panels[j].IsExpanded(): expanded = expanded + offset else: collapsed = collapsed + offset return collapsed, expanded, value def Collapse(self, foldpanel): """ Collapses the given fold panel reference, and updates the foldpanel bar. :param `foldpanel`: an instance of :class:`FoldPanelItem`. :note: With the ``FPB_COLLAPSE_TO_BOTTOM`` style set, all collapsed captions are put at the bottom of the control. In the normal mode, they stay where they are. """ try: item = self._panels.index(foldpanel) except: raise Exception("ERROR: Invalid Panel Passed To Collapse: " + repr(foldpanel)) foldpanel.Collapse() self.RefreshPanelsFrom(foldpanel) def Expand(self, foldpanel): """ Expands the given fold panel reference, and updates the foldpanel bar. :param `foldpanel`: an instance of :class:`FoldPanelItem`. :note: With the ``FPB_COLLAPSE_TO_BOTTOM`` style set, they will be removed from the bottom and the order where the panel originally was placed is restored. """ fpbextrastyle = 0 if self._agwStyle & FPB_SINGLE_FOLD or self._agwStyle & FPB_EXCLUSIVE_FOLD: fpbextrastyle = 1 for panel in self._panels: panel.Collapse() foldpanel.Expand() if fpbextrastyle: if self._agwStyle & FPB_EXCLUSIVE_FOLD: self.RepositionCollapsedToBottom() self.RefreshPanelsFrom(self._panels[0]) else: self.RefreshPanelsFrom(foldpanel) def ApplyCaptionStyle(self, foldpanel, cbstyle): """ Sets the style of the caption bar (:class:`CaptionBar`) of the fold panel. :param `foldpanel`: an instance of :class:`FoldPanelItem`; :param `cbstyle`: an instance of :class:`CaptionBarStyle`. :note: The changes are applied immediately. All styles not set in the :class:`CaptionBarStyle` class are not applied. Use the :class:`CaptionBar` reference to indicate what captionbar you want to apply the style to. To apply one style to all :class:`CaptionBar` items, use :meth:`~FoldPanelBar.ApplyCaptionStyleAll`. """ foldpanel.ApplyCaptionStyle(cbstyle) def ApplyCaptionStyleAll(self, cbstyle): """ Sets the style of all the caption bars of the fold panel. The changes are applied immediately. :param `cbstyle`: an instance of :class:`CaptionBarStyle`. """ for panels in self._panels: self.ApplyCaptionStyle(panels, cbstyle) def GetCaptionStyle(self, foldpanel): """ Returns the currently used caption style for the fold panel. It is returned as a :class:`CaptionBarStyle` class. After modifying it, it can be set again. :param `foldpanel`: an instance of :class:`FoldPanelItem`. """ return foldpanel.GetCaptionStyle() def IsVertical(self): """ Returns whether the :class:`CaptionBar` has default orientation or not. Default is vertical. """ return self._isVertical def GetFoldPanel(self, item): """ Returns the panel associated with the index `item`. :param `item`: an integer representing the :class:`FoldPanelItem` in the list of panels in this :class:`FoldPanelBar`. """ try: ind = self._panels[item] return self._panels[item] except: raise Exception("ERROR: List Index Out Of Range Or Bad Item Passed: " + repr(item) + \ ". Item Should Be An Integer Between " + repr(0) + " And " + \ repr(len(self._panels))) def GetCount(self): """ Returns the number of panels in the :class:`FoldPanelBar`. """ try: return len(self._panels) except: raise Exception("ERROR: No Panels Have Been Added To FoldPanelBar") # --------------------------------------------------------------------------------- # # class FoldPanelItem # --------------------------------------------------------------------------------- # class FoldPanelItem(wx.Panel): """ This class is a child sibling of the :class:`FoldPanelBar` class. It will contain a :class:`CaptionBar` class for receiving of events, and a the rest of the area can be populated by a :class:`Panel` derived class. """ def __init__(self, parent, id=wx.ID_ANY, caption="", foldIcons=None, collapsed=False, cbstyle=None): """ Default class constructor. :param `parent`: the :class:`FoldPanelItem` parent window; :param `id`: an identifier for the control: a value of -1 is taken to mean a default; :param `caption`: the string to be displayed in :class:`CaptionBar`; :param `foldIcons`: an instance of :class:`ImageList` containing the icons to display next to the caption text; :param `collapsed`: ``True`` if the :class:`CaptionBar` should start in the collapsed state, ``False`` otherwise; :param `cbstyle`: the :class:`CaptionBar` window style. Must be an instance of :class:`CaptionBarStyle`. """ wx.Panel.__init__(self, parent, id, wx.Point(0,0), style=wx.CLIP_CHILDREN) self._controlCreated = False self._UserSize = 0 self._PanelSize = 0 self._LastInsertPos = 0 self._itemPos = 0 self._userSized = False if foldIcons is None: foldIcons = wx.ImageList(16, 16) bmp = ExpandedIcon.GetBitmap() foldIcons.Add(bmp) bmp = CollapsedIcon.GetBitmap() foldIcons.Add(bmp) self._foldIcons = foldIcons if cbstyle is None: cbstyle = EmptyCaptionBarStyle # create the caption bar, in collapsed or expanded state self._captionBar = CaptionBar(self, wx.ID_ANY, wx.Point(0,0), size=wx.DefaultSize, caption=caption, foldIcons=foldIcons, cbstyle=cbstyle) if collapsed: self._captionBar.Collapse() self._controlCreated = True # make initial size for component, if collapsed, the # size is determined on the panel height and won't change size = self._captionBar.GetSize() self._PanelSize = (self.IsVertical() and [size.GetHeight()] or \ [size.GetWidth()])[0] self._LastInsertPos = self._PanelSize self._items = [] self.Bind(EVT_CAPTIONBAR, self.OnPressCaption) self.Bind(wx.EVT_PAINT, self.OnPaint) def AddWindow(self, window, flags=FPB_ALIGN_WIDTH, spacing=FPB_DEFAULT_SPACING, leftSpacing=FPB_DEFAULT_LEFTLINESPACING, rightSpacing=FPB_DEFAULT_RIGHTLINESPACING): """ Adds a window item to the list of items on this panel. :param `window`: an instance of :class:`Window`; :param `flags`: can be one of the following bits: ====================== ======= ==================================== Align Flag Value Description ====================== ======= ==================================== ``FPB_ALIGN_WIDTH`` 1 The :class:`Window` to be added will be aligned to fit the width of the FoldPanel when it is resized. Very handy for sizer items, buttons and text boxes. ``FPB_ALIGN_LEFT`` 0 Aligns left instead of fitting the width of the child window to be added. Use either this one or ``FPB_ALIGN_WIDTH``. ====================== ======= ==================================== :param `spacing`: reserves a number of pixels before the window element; :param `leftSpacing`: an indent, in pixels; :param `rightSpacing`: a right spacing, only relevant when the style ``FPB_ALIGN_WIDTH`` is chosen. """ wi = FoldWindowItem(self, window, Type="WINDOW", flags=flags, spacing=spacing, leftSpacing=leftSpacing, rightSpacing=rightSpacing) self._items.append(wi) vertical = self.IsVertical() self._spacing = spacing self._leftSpacing = leftSpacing self._rightSpacing = rightSpacing xpos = (vertical and [leftSpacing] or [self._LastInsertPos + spacing])[0] ypos = (vertical and [self._LastInsertPos + spacing] or [leftSpacing])[0] window.SetDimensions(xpos, ypos, -1, -1, wx.SIZE_USE_EXISTING) self._LastInsertPos = self._LastInsertPos + wi.GetWindowLength(vertical) self.ResizePanel() def AddSeparator(self, colour=wx.BLACK, spacing=FPB_DEFAULT_SPACING, leftSpacing=FPB_DEFAULT_LEFTSPACING, rightSpacing=FPB_DEFAULT_RIGHTSPACING): """ Adds a separator item to the list of items on this panel. :param `colour`: the separator colour, an instance of :class:`Colour`; :param `spacing`: the separator to be added can be slightly indented from left and right so it is more visibly placed in the fold panel. Use `spacing` > 0 to give the control an y offset from the previous :class:`Window` added; :param `leftSpacing`: give the added separator a slight indent from the left; :param `rightSpacing`: give the added separator a slight indent from the right. """ wi = FoldWindowItem(self, window=None, Type="SEPARATOR", flags=FPB_ALIGN_WIDTH, y=self._LastInsertPos, colour=colour, spacing=spacing, leftSpacing=leftSpacing, rightSpacing=rightSpacing) self._items.append(wi) self._LastInsertPos = self._LastInsertPos + \ wi.GetWindowLength(self.IsVertical()) self.ResizePanel() def Reposition(self, pos): """ Repositions this :class:`FoldPanelItem` and reports the length occupied for the next :class:`FoldPanelItem` in the list. :param `pos`: the new item position. """ # NOTE: Call Resize before Reposition when an item is added, because the new # size needed will be calculated by Resize. Of course the relative position # of the controls have to be correct in respect to the caption bar self.Freeze() vertical = self.IsVertical() xpos = (vertical and [-1] or [pos])[0] ypos = (vertical and [pos] or [-1])[0] self.SetDimensions(xpos, ypos, -1, -1, wx.SIZE_USE_EXISTING) self._itemPos = pos self.Thaw() return self.GetPanelLength() def OnPressCaption(self, event): """ Handles the ``wx.EVT_CAPTIONBAR`` event for :class:`FoldPanelItem`. :param `event`: a :class:`CaptionBarEvent` event to be processed. """ # tell the upper container we are responsible # for this event, so it can fold the panel item # and do a refresh event.SetTag(self) event.Skip() def ResizePanel(self): """ Resizes the panel. """ # prevent unnecessary updates by blocking repaints for a sec self.Freeze() vertical = self.IsVertical() # force this panel to take the width of the parent panel and the y of the # user or calculated width (which will be recalculated by the contents here) if self._captionBar.IsCollapsed(): size = self._captionBar.GetSize() self._PanelSize = (vertical and [size.GetHeight()] or [size.GetWidth()])[0] else: size = self.GetBestSize() self._PanelSize = (vertical and [size.GetHeight()] or [size.GetWidth()])[0] if self._UserSize: if vertical: size.SetHeight(self._UserSize) else: size.SetWidth(self._UserSize) pnlsize = self.GetParent().GetSize() if vertical: size.SetWidth(pnlsize.GetWidth()) else: size.SetHeight(pnlsize.GetHeight()) # resize caption bar xsize = (vertical and [size.GetWidth()] or [-1])[0] ysize = (vertical and [-1] or [size.GetHeight()])[0] self._captionBar.SetSize((xsize, ysize)) # resize the panel self.SetSize(size) # go by all the controls and call Layout for items in self._items: items.ResizeItem((vertical and [size.GetWidth()] or \ [size.GetHeight()])[0], vertical) self.Thaw() def OnPaint(self, event): """ Handles the ``wx.EVT_PAINT`` event for :class:`FoldPanelItem`. :param `event`: a :class:`PaintEvent` event to be processed. """ # draw all the items that are lines dc = wx.PaintDC(self) vertical = self.IsVertical() for item in self._items: if item.GetType() == "SEPARATOR": pen = wx.Pen(item.GetLineColour(), 1, wx.SOLID) dc.SetPen(pen) a = item.GetLeftSpacing() b = item.GetLineY() + item.GetSpacing() c = item.GetLineLength() d = a + c if vertical: dc.DrawLine(a, b, d, b) else: dc.DrawLine(b, a, b, d) event.Skip() def IsVertical(self): """ Returns whether the :class:`CaptionBar` has default orientation or not. Default is vertical. """ # grandparent of FoldPanelItem is FoldPanelBar # default is vertical if isinstance(self.GetGrandParent(), FoldPanelBar): return self.GetGrandParent().IsVertical() else: raise Exception("ERROR: Wrong Parent " + repr(self.GetGrandParent())) def IsExpanded(self): """ Returns expanded or collapsed status. If the panel is expanded, ``True`` is returned. """ return not self._captionBar.IsCollapsed() def GetItemPos(self): """ Returns item's position. """ return self._itemPos def Collapse(self): """ Internal method. This should not be called by the user, because it doesn't trigger the parent to tell it that we are collapsed or expanded, it only changes visual state. """ self._captionBar.Collapse() self.ResizePanel() def Expand(self): """ Internal method. This should not be called by the user, because it doesn't trigger the parent to tell it that we are collapsed or expanded, it only changes visual state. """ self._captionBar.Expand() self.ResizePanel() def GetPanelLength(self): """ Returns size of panel. """ if self._captionBar.IsCollapsed(): return self.GetCaptionLength() elif self._userSized: return self._UserSize return self._PanelSize def GetCaptionLength(self): """ Returns height of caption only. This is for folding calculation purposes. """ size = self._captionBar.GetSize() return (self.IsVertical() and [size.GetHeight()] or [size.GetWidth()])[0] def ApplyCaptionStyle(self, cbstyle): """ Applies the style defined in `cbstyle` to the :class:`CaptionBar`.""" self._captionBar.SetCaptionStyle(cbstyle) def GetCaptionStyle(self): """ Returns the current style of the captionbar in a :class:`CaptionBarStyle` class. This can be used to change and set back the changes. """ return self._captionBar.GetCaptionStyle() # ----------------------------------------------------------------------------------- # # class FoldWindowItem # ----------------------------------------------------------------------------------- # class FoldWindowItem(object): """ This class is a child sibling of the :class:`FoldPanelItem` class. It will contain :class:`Window` that can be either a separator (a coloured line simulated by a :class:`Window`) or a wxPython controls (such as a :class:`Button`, a :class:`ListCtrl` etc...). """ def __init__(self, parent, window=None, **kw): """ Default class constructor :param `parent`: the :class:`FoldWindowItem` parent; :param `window`: the window contained in this item. :keyword `Type`: can be "WINDOW" or "SEPARATOR"; :keyword `lineColour`: the separator colour (meaningful for separators only); :keyword `y`: the separator y position (meaningful for separators only); :keyword `flags`: the alignment flags; :keyword `spacing`: reserves a number of pixels before the window/separator element; :keyword `leftSpacing`: an indent, in pixels; :keyword `rightSpacing`: a right spacing, only relevant when the style ``FPB_ALIGN_WIDTH`` is chosen. :see: :meth:`FoldPanelBar.AddFoldPanelWindow() <FoldPanelBar.AddFoldPanelWindow>` for a list of valid alignment flags. """ if not kw.has_key("Type"): raise Exception('ERROR: Missing Window Type Information. This Should Be "WINDOW" Or "SEPARATOR"') if kw.get("Type") == "WINDOW": # Window constructor. This initialises the class as a wx.Window Type if kw.has_key("flags"): self._flags = kw.get("flags") else: self._flags = FPB_ALIGN_WIDTH if kw.has_key("spacing"): self._spacing = kw.get("spacing") else: self._spacing = FPB_DEFAULT_SPACING if kw.has_key("leftSpacing"): self._leftSpacing = kw.get("leftSpacing") else: self._leftSpacing = FPB_DEFAULT_LEFTSPACING if kw.has_key("rightSpacing"): self._rightSpacing = kw.get("rightSpacing") else: self._rightSpacing = FPB_DEFAULT_RIGHTSPACING self._lineY = 0 self._sepLineColour = None self._wnd = window elif kw.get("Type") == "SEPARATOR": # separator constructor. This initialises the class as a separator type if kw.has_key("y"): self._lineY = kw.get("y") else: raise Exception("ERROR: Undefined Y Position For The Separator") if kw.has_key("lineColour"): self._sepLineColour = kw.get("lineColour") else: self._sepLineColour = wx.BLACK if kw.has_key("flags"): self._flags = kw.get("flags") else: self._flags = FPB_ALIGN_WIDTH if kw.has_key("spacing"): self._spacing = kw.get("spacing") else: self._spacing = FPB_DEFAULT_SPACING if kw.has_key("leftSpacing"): self._leftSpacing = kw.get("leftSpacing") else: self._leftSpacing = FPB_DEFAULT_LEFTSPACING if kw.has_key("rightSpacing"): self._rightSpacing = kw.get("rightSpacing") else: self._rightSpacing = FPB_DEFAULT_RIGHTSPACING self._wnd = window else: raise Exception("ERROR: Undefined Window Type Selected: " + repr(kw.get("Type"))) self._type = kw.get("Type") self._lineLength = 0 def GetType(self): """ Returns the :class:`FoldWindowItem` type. """ return self._type def GetLineY(self): """ Returns the y position of the separator. """ return self._lineY def GetLineLength(self): """ Returns the separator line length. """ return self._lineLength def GetLineColour(self): """ Returns the separator line colour. """ return self._sepLineColour def GetLeftSpacing(self): """ Returns the left indent of :class:`FoldWindowItem`. """ return self._leftSpacing def GetRightSpacing(self): """ Returns the right indent of :class:`FoldWindowItem`. """ return self._rightSpacing def GetSpacing(self): """ Returns the spacing of :class:`FoldWindowItem`. """ return self._spacing def GetWindowLength(self, vertical=True): """ Returns space needed by the window if type is :class:`FoldWindowItem` "WINDOW" and returns the total size plus the extra spacing. :param `vertical`: ``True`` if the parent :class:`FoldPanelBar` is in vertical mode. """ value = 0 if self._type == "WINDOW": size = self._wnd.GetSize() value = (vertical and [size.GetHeight()] or [size.GetWidth()])[0] + \ self._spacing elif self._type == "SEPARATOR": value = 1 + self._spacing return value def ResizeItem(self, size, vertical=True): """ Resizes the element, whatever it is. A separator or line will be always aligned by width or height depending on orientation of the whole panel. :param `size`: the maximum size available for the :class:`FoldWindowItem`; :param `vertical`: ``True`` if the parent :class:`FoldPanelBar` is in vertical mode. """ if self._flags & FPB_ALIGN_WIDTH: # align by taking full width mySize = size - self._leftSpacing - self._rightSpacing if mySize < 0: mySize = 10 # can't have negative width if self._type == "SEPARATOR": self._lineLength = mySize else: xsize = (vertical and [mySize] or [-1])[0] ysize = (vertical and [-1] or [mySize])[0] self._wnd.SetSize((xsize, ysize))
gpl-2.0
kaiseu/pat-data-processing
component/mem.py
1
2129
#!/usr/bin/python # encoding: utf-8 """ @author: xuk1 @license: (C) Copyright 2013-2017 @contact: kai.a.xu@intel.com @file: mem.py @time: 8/15/2017 10:50 @desc: """ import numpy as np import pandas as pd from component.base import CommonBase class Mem(CommonBase): """ Node memory attribute, phasing memory data from original PAT file """ used_col = ['HostName', 'TimeStamp', 'kbmemfree', 'kbmemused', 'kbbuffers', 'kbcached'] converter = {col: np.int64 for col in used_col[2:]} def __init__(self, file_path): self.file_path = file_path def get_data_by_time(self, start, end): """ get average value of this attribute and all raw data within the start and end timestamp. if start and end all equal to [0] will calculate all the data. :param start: list of start timestamp :param end: list of end timestamp, should be the same length of start :return: dict that contains avg value of all the timestamp pair and all raw data """ df = pd.read_csv(self.file_path, delim_whitespace=True, usecols=self.used_col, header=0) df = df.loc[0::2] # read every two rows pd.to_datetime(df['TimeStamp'], unit='s') df = df.set_index('TimeStamp').astype(self.converter) avg = [] if start[0] == end[0] == 0: # calc all the data avg.append(df.iloc[:, 2:len(self.used_col)].mean(axis=0).astype('float32')) if len(start) == 1: return avg, df else: for i in range(1, len(start)): # calc the data within the pair of time period avg.append(df.loc[str(start[i]): str(end[i])].iloc[:, 2:len(self.used_col)].mean(axis=0)) return avg, df for i in range(len(start)): # calc the data within the pair of time period avg.append(df.loc[str(start[i]): str(end[i]), self.used_col[2:]].mean(axis=0).astype('float32')) return avg, df def used_col_num(self): return len(self.__used_col)
apache-2.0
Dhivyap/ansible
lib/ansible/modules/cloud/vmware/vmware_vm_info.py
13
10816
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2015, Joseph Callen <jcallen () csc.com> # Copyright: (c) 2018, Ansible Project # Copyright: (c) 2018, Fedor Vompe <f.vompe () comptek.ru> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = { 'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community' } DOCUMENTATION = r''' --- module: vmware_vm_info short_description: Return basic info pertaining to a VMware machine guest description: - Return basic information pertaining to a vSphere or ESXi virtual machine guest. - Cluster name as fact is added in version 2.7. - This module was called C(vmware_vm_facts) before Ansible 2.9. The usage did not change. version_added: '2.0' author: - Joseph Callen (@jcpowermac) - Abhijeet Kasurde (@Akasurde) - Fedor Vompe (@sumkincpp) notes: - Tested on ESXi 6.7, vSphere 5.5 and vSphere 6.5 - From 2.8 and onwards, information are returned as list of dict instead of dict. requirements: - python >= 2.6 - PyVmomi options: vm_type: description: - If set to C(vm), then information are gathered for virtual machines only. - If set to C(template), then information are gathered for virtual machine templates only. - If set to C(all), then information are gathered for all virtual machines and virtual machine templates. required: False default: 'all' choices: [ all, vm, template ] version_added: 2.5 type: str show_attribute: description: - Attributes related to VM guest shown in information only when this is set C(true). default: no type: bool version_added: 2.8 folder: description: - Specify a folder location of VMs to gather information from. - 'Examples:' - ' folder: /ha-datacenter/vm' - ' folder: ha-datacenter/vm' - ' folder: /datacenter1/vm' - ' folder: datacenter1/vm' - ' folder: /datacenter1/vm/folder1' - ' folder: datacenter1/vm/folder1' - ' folder: /folder1/datacenter1/vm' - ' folder: folder1/datacenter1/vm' - ' folder: /folder1/datacenter1/vm/folder2' type: str version_added: 2.9 show_tag: description: - Tags related to virtual machine are shown if set to C(True). default: False type: bool version_added: 2.9 extends_documentation_fragment: vmware.documentation ''' EXAMPLES = r''' - name: Gather all registered virtual machines vmware_vm_info: hostname: '{{ vcenter_hostname }}' username: '{{ vcenter_username }}' password: '{{ vcenter_password }}' delegate_to: localhost register: vminfo - debug: var: vminfo.virtual_machines - name: Gather only registered virtual machine templates vmware_vm_info: hostname: '{{ vcenter_hostname }}' username: '{{ vcenter_username }}' password: '{{ vcenter_password }}' vm_type: template delegate_to: localhost register: template_info - debug: var: template_info.virtual_machines - name: Gather only registered virtual machines vmware_vm_info: hostname: '{{ vcenter_hostname }}' username: '{{ vcenter_username }}' password: '{{ vcenter_password }}' vm_type: vm delegate_to: localhost register: vm_info - debug: var: vm_info.virtual_machines - name: Get UUID from given VM Name block: - name: Get virtual machine info vmware_vm_info: hostname: '{{ vcenter_hostname }}' username: '{{ vcenter_username }}' password: '{{ vcenter_password }}' folder: "/datacenter/vm/folder" delegate_to: localhost register: vm_info - debug: msg: "{{ item.uuid }}" with_items: - "{{ vm_info.virtual_machines | json_query(query) }}" vars: query: "[?guest_name=='DC0_H0_VM0']" - name: Get Tags from given VM Name block: - name: Get virtual machine info vmware_vm_info: hostname: '{{ vcenter_hostname }}' username: '{{ vcenter_username }}' password: '{{ vcenter_password }}' folder: "/datacenter/vm/folder" delegate_to: localhost register: vm_info - debug: msg: "{{ item.tags }}" with_items: - "{{ vm_info.virtual_machines | json_query(query) }}" vars: query: "[?guest_name=='DC0_H0_VM0']" ''' RETURN = r''' virtual_machines: description: list of dictionary of virtual machines and their information returned: success type: list sample: [ { "guest_name": "ubuntu_t", "cluster": null, "esxi_hostname": "10.76.33.226", "guest_fullname": "Ubuntu Linux (64-bit)", "ip_address": "", "mac_address": [ "00:50:56:87:a5:9a" ], "power_state": "poweredOff", "uuid": "4207072c-edd8-3bd5-64dc-903fd3a0db04", "vm_network": { "00:50:56:87:a5:9a": { "ipv4": [ "10.76.33.228" ], "ipv6": [] } }, "attributes": { "job": "backup-prepare" }, "tags": [ { "category_id": "urn:vmomi:InventoryServiceCategory:b316cc45-f1a9-4277-811d-56c7e7975203:GLOBAL", "category_name": "cat_0001", "description": "", "id": "urn:vmomi:InventoryServiceTag:43737ec0-b832-4abf-abb1-fd2448ce3b26:GLOBAL", "name": "tag_0001" } ] } ] ''' try: from pyVmomi import vim except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.vmware import PyVmomi, get_all_objs, vmware_argument_spec, _get_vm_prop from ansible.module_utils.vmware_rest_client import VmwareRestClient class VmwareVmInfo(PyVmomi): def __init__(self, module): super(VmwareVmInfo, self).__init__(module) def get_tag_info(self, vm_dynamic_obj): vmware_client = VmwareRestClient(self.module) return vmware_client.get_tags_for_vm(vm_mid=vm_dynamic_obj._moId) def get_vm_attributes(self, vm): return dict((x.name, v.value) for x in self.custom_field_mgr for v in vm.customValue if x.key == v.key) # https://github.com/vmware/pyvmomi-community-samples/blob/master/samples/getallvms.py def get_all_virtual_machines(self): """ Get all virtual machines and related configurations information """ folder = self.params.get('folder') folder_obj = None if folder: folder_obj = self.content.searchIndex.FindByInventoryPath(folder) if not folder_obj: self.module.fail_json(msg="Failed to find folder specified by %(folder)s" % self.params) virtual_machines = get_all_objs(self.content, [vim.VirtualMachine], folder=folder_obj) _virtual_machines = [] for vm in virtual_machines: _ip_address = "" summary = vm.summary if summary.guest is not None: _ip_address = summary.guest.ipAddress if _ip_address is None: _ip_address = "" _mac_address = [] all_devices = _get_vm_prop(vm, ('config', 'hardware', 'device')) if all_devices: for dev in all_devices: if isinstance(dev, vim.vm.device.VirtualEthernetCard): _mac_address.append(dev.macAddress) net_dict = {} vmnet = _get_vm_prop(vm, ('guest', 'net')) if vmnet: for device in vmnet: net_dict[device.macAddress] = dict() net_dict[device.macAddress]['ipv4'] = [] net_dict[device.macAddress]['ipv6'] = [] for ip_addr in device.ipAddress: if "::" in ip_addr: net_dict[device.macAddress]['ipv6'].append(ip_addr) else: net_dict[device.macAddress]['ipv4'].append(ip_addr) esxi_hostname = None esxi_parent = None if summary.runtime.host: esxi_hostname = summary.runtime.host.summary.config.name esxi_parent = summary.runtime.host.parent cluster_name = None if esxi_parent and isinstance(esxi_parent, vim.ClusterComputeResource): cluster_name = summary.runtime.host.parent.name vm_attributes = dict() if self.module.params.get('show_attribute'): vm_attributes = self.get_vm_attributes(vm) vm_tags = list() if self.module.params.get('show_tag'): vm_tags = self.get_tag_info(vm) virtual_machine = { "guest_name": summary.config.name, "guest_fullname": summary.config.guestFullName, "power_state": summary.runtime.powerState, "ip_address": _ip_address, # Kept for backward compatibility "mac_address": _mac_address, # Kept for backward compatibility "uuid": summary.config.uuid, "vm_network": net_dict, "esxi_hostname": esxi_hostname, "cluster": cluster_name, "attributes": vm_attributes, "tags": vm_tags } vm_type = self.module.params.get('vm_type') is_template = _get_vm_prop(vm, ('config', 'template')) if vm_type == 'vm' and not is_template: _virtual_machines.append(virtual_machine) elif vm_type == 'template' and is_template: _virtual_machines.append(virtual_machine) elif vm_type == 'all': _virtual_machines.append(virtual_machine) return _virtual_machines def main(): argument_spec = vmware_argument_spec() argument_spec.update( vm_type=dict(type='str', choices=['vm', 'all', 'template'], default='all'), show_attribute=dict(type='bool', default='no'), show_tag=dict(type='bool', default=False), folder=dict(type='str'), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True ) if module._name == 'vmware_vm_facts': module.deprecate("The 'vmware_vm_facts' module has been renamed to 'vmware_vm_info'", version='2.13') vmware_vm_info = VmwareVmInfo(module) _virtual_machines = vmware_vm_info.get_all_virtual_machines() module.exit_json(changed=False, virtual_machines=_virtual_machines) if __name__ == '__main__': main()
gpl-3.0
bottompawn/kbengine
kbe/src/lib/python/Lib/xml/dom/pulldom.py
850
11761
import xml.sax import xml.sax.handler START_ELEMENT = "START_ELEMENT" END_ELEMENT = "END_ELEMENT" COMMENT = "COMMENT" START_DOCUMENT = "START_DOCUMENT" END_DOCUMENT = "END_DOCUMENT" PROCESSING_INSTRUCTION = "PROCESSING_INSTRUCTION" IGNORABLE_WHITESPACE = "IGNORABLE_WHITESPACE" CHARACTERS = "CHARACTERS" class PullDOM(xml.sax.ContentHandler): _locator = None document = None def __init__(self, documentFactory=None): from xml.dom import XML_NAMESPACE self.documentFactory = documentFactory self.firstEvent = [None, None] self.lastEvent = self.firstEvent self.elementStack = [] self.push = self.elementStack.append try: self.pop = self.elementStack.pop except AttributeError: # use class' pop instead pass self._ns_contexts = [{XML_NAMESPACE:'xml'}] # contains uri -> prefix dicts self._current_context = self._ns_contexts[-1] self.pending_events = [] def pop(self): result = self.elementStack[-1] del self.elementStack[-1] return result def setDocumentLocator(self, locator): self._locator = locator def startPrefixMapping(self, prefix, uri): if not hasattr(self, '_xmlns_attrs'): self._xmlns_attrs = [] self._xmlns_attrs.append((prefix or 'xmlns', uri)) self._ns_contexts.append(self._current_context.copy()) self._current_context[uri] = prefix or None def endPrefixMapping(self, prefix): self._current_context = self._ns_contexts.pop() def startElementNS(self, name, tagName , attrs): # Retrieve xml namespace declaration attributes. xmlns_uri = 'http://www.w3.org/2000/xmlns/' xmlns_attrs = getattr(self, '_xmlns_attrs', None) if xmlns_attrs is not None: for aname, value in xmlns_attrs: attrs._attrs[(xmlns_uri, aname)] = value self._xmlns_attrs = [] uri, localname = name if uri: # When using namespaces, the reader may or may not # provide us with the original name. If not, create # *a* valid tagName from the current context. if tagName is None: prefix = self._current_context[uri] if prefix: tagName = prefix + ":" + localname else: tagName = localname if self.document: node = self.document.createElementNS(uri, tagName) else: node = self.buildDocument(uri, tagName) else: # When the tagname is not prefixed, it just appears as # localname if self.document: node = self.document.createElement(localname) else: node = self.buildDocument(None, localname) for aname,value in attrs.items(): a_uri, a_localname = aname if a_uri == xmlns_uri: if a_localname == 'xmlns': qname = a_localname else: qname = 'xmlns:' + a_localname attr = self.document.createAttributeNS(a_uri, qname) node.setAttributeNodeNS(attr) elif a_uri: prefix = self._current_context[a_uri] if prefix: qname = prefix + ":" + a_localname else: qname = a_localname attr = self.document.createAttributeNS(a_uri, qname) node.setAttributeNodeNS(attr) else: attr = self.document.createAttribute(a_localname) node.setAttributeNode(attr) attr.value = value self.lastEvent[1] = [(START_ELEMENT, node), None] self.lastEvent = self.lastEvent[1] self.push(node) def endElementNS(self, name, tagName): self.lastEvent[1] = [(END_ELEMENT, self.pop()), None] self.lastEvent = self.lastEvent[1] def startElement(self, name, attrs): if self.document: node = self.document.createElement(name) else: node = self.buildDocument(None, name) for aname,value in attrs.items(): attr = self.document.createAttribute(aname) attr.value = value node.setAttributeNode(attr) self.lastEvent[1] = [(START_ELEMENT, node), None] self.lastEvent = self.lastEvent[1] self.push(node) def endElement(self, name): self.lastEvent[1] = [(END_ELEMENT, self.pop()), None] self.lastEvent = self.lastEvent[1] def comment(self, s): if self.document: node = self.document.createComment(s) self.lastEvent[1] = [(COMMENT, node), None] self.lastEvent = self.lastEvent[1] else: event = [(COMMENT, s), None] self.pending_events.append(event) def processingInstruction(self, target, data): if self.document: node = self.document.createProcessingInstruction(target, data) self.lastEvent[1] = [(PROCESSING_INSTRUCTION, node), None] self.lastEvent = self.lastEvent[1] else: event = [(PROCESSING_INSTRUCTION, target, data), None] self.pending_events.append(event) def ignorableWhitespace(self, chars): node = self.document.createTextNode(chars) self.lastEvent[1] = [(IGNORABLE_WHITESPACE, node), None] self.lastEvent = self.lastEvent[1] def characters(self, chars): node = self.document.createTextNode(chars) self.lastEvent[1] = [(CHARACTERS, node), None] self.lastEvent = self.lastEvent[1] def startDocument(self): if self.documentFactory is None: import xml.dom.minidom self.documentFactory = xml.dom.minidom.Document.implementation def buildDocument(self, uri, tagname): # Can't do that in startDocument, since we need the tagname # XXX: obtain DocumentType node = self.documentFactory.createDocument(uri, tagname, None) self.document = node self.lastEvent[1] = [(START_DOCUMENT, node), None] self.lastEvent = self.lastEvent[1] self.push(node) # Put everything we have seen so far into the document for e in self.pending_events: if e[0][0] == PROCESSING_INSTRUCTION: _,target,data = e[0] n = self.document.createProcessingInstruction(target, data) e[0] = (PROCESSING_INSTRUCTION, n) elif e[0][0] == COMMENT: n = self.document.createComment(e[0][1]) e[0] = (COMMENT, n) else: raise AssertionError("Unknown pending event ",e[0][0]) self.lastEvent[1] = e self.lastEvent = e self.pending_events = None return node.firstChild def endDocument(self): self.lastEvent[1] = [(END_DOCUMENT, self.document), None] self.pop() def clear(self): "clear(): Explicitly release parsing structures" self.document = None class ErrorHandler: def warning(self, exception): print(exception) def error(self, exception): raise exception def fatalError(self, exception): raise exception class DOMEventStream: def __init__(self, stream, parser, bufsize): self.stream = stream self.parser = parser self.bufsize = bufsize if not hasattr(self.parser, 'feed'): self.getEvent = self._slurp self.reset() def reset(self): self.pulldom = PullDOM() # This content handler relies on namespace support self.parser.setFeature(xml.sax.handler.feature_namespaces, 1) self.parser.setContentHandler(self.pulldom) def __getitem__(self, pos): rc = self.getEvent() if rc: return rc raise IndexError def __next__(self): rc = self.getEvent() if rc: return rc raise StopIteration def __iter__(self): return self def expandNode(self, node): event = self.getEvent() parents = [node] while event: token, cur_node = event if cur_node is node: return if token != END_ELEMENT: parents[-1].appendChild(cur_node) if token == START_ELEMENT: parents.append(cur_node) elif token == END_ELEMENT: del parents[-1] event = self.getEvent() def getEvent(self): # use IncrementalParser interface, so we get the desired # pull effect if not self.pulldom.firstEvent[1]: self.pulldom.lastEvent = self.pulldom.firstEvent while not self.pulldom.firstEvent[1]: buf = self.stream.read(self.bufsize) if not buf: self.parser.close() return None self.parser.feed(buf) rc = self.pulldom.firstEvent[1][0] self.pulldom.firstEvent[1] = self.pulldom.firstEvent[1][1] return rc def _slurp(self): """ Fallback replacement for getEvent() using the standard SAX2 interface, which means we slurp the SAX events into memory (no performance gain, but we are compatible to all SAX parsers). """ self.parser.parse(self.stream) self.getEvent = self._emit return self._emit() def _emit(self): """ Fallback replacement for getEvent() that emits the events that _slurp() read previously. """ rc = self.pulldom.firstEvent[1][0] self.pulldom.firstEvent[1] = self.pulldom.firstEvent[1][1] return rc def clear(self): """clear(): Explicitly release parsing objects""" self.pulldom.clear() del self.pulldom self.parser = None self.stream = None class SAX2DOM(PullDOM): def startElementNS(self, name, tagName , attrs): PullDOM.startElementNS(self, name, tagName, attrs) curNode = self.elementStack[-1] parentNode = self.elementStack[-2] parentNode.appendChild(curNode) def startElement(self, name, attrs): PullDOM.startElement(self, name, attrs) curNode = self.elementStack[-1] parentNode = self.elementStack[-2] parentNode.appendChild(curNode) def processingInstruction(self, target, data): PullDOM.processingInstruction(self, target, data) node = self.lastEvent[0][1] parentNode = self.elementStack[-1] parentNode.appendChild(node) def ignorableWhitespace(self, chars): PullDOM.ignorableWhitespace(self, chars) node = self.lastEvent[0][1] parentNode = self.elementStack[-1] parentNode.appendChild(node) def characters(self, chars): PullDOM.characters(self, chars) node = self.lastEvent[0][1] parentNode = self.elementStack[-1] parentNode.appendChild(node) default_bufsize = (2 ** 14) - 20 def parse(stream_or_string, parser=None, bufsize=None): if bufsize is None: bufsize = default_bufsize if isinstance(stream_or_string, str): stream = open(stream_or_string, 'rb') else: stream = stream_or_string if not parser: parser = xml.sax.make_parser() return DOMEventStream(stream, parser, bufsize) def parseString(string, parser=None): from io import StringIO bufsize = len(string) buf = StringIO(string) if not parser: parser = xml.sax.make_parser() return DOMEventStream(buf, parser, bufsize)
lgpl-3.0
MichaelMGonzalez/MagneticFieldLocalization
SerialCommunication/GenerateMsgHeaders.py
1
2139
import json import sys # Open Header info file_path = "SerialMsgHeader.json" if len(sys.argv) > 1: file_path = sys.argv[1] f = open(file_path) json_obj = json.load(f) file_name = file_path.split(".json")[0] for file_type in json_obj["file_types"]: # Load the format settings new_name = file_name+"."+file_type["type"] if "name" in file_type: new_name = file_type["name"] + "." + file_type["type"] before_text = file_type["before_def"] after_text = file_type["after_def"] def_text = file_type["def"] setup_text = file_type["setup_def"] end_def = file_type["end_def"] assignment = file_type["assignment"] comment = file_type["comment"] if "dir" in file_type: new_name = file_type["dir"] + new_name # Open the new file to write in new_file = open(new_name, 'w') nl = "\n" if not ("ignore_def" in file_type): new_file.write(before_text + nl + nl) if comment != "NA": new_file.write(comment + "Setup Constants \n\n") for m in json_obj["setup"]: new_file.write( setup_text + " " + m["name"] + assignment) if not ("ignore_def" in file_type): new_file.write( str(m["value"]) + end_def + nl ) else: new_file.write( end_def + nl ) # Write the definitions to the file keys_map = {} if comment != "NA": new_file.write(comment + "Message Constants \n\n") for m in json_obj["msgs"]: keys_map[m["name"]] = m["value"] new_file.write( def_text + " " + m["name"] + assignment) if not ("ignore_def" in file_type): new_file.write( str(m["value"]) + end_def + nl ) else: new_file.write( end_def + nl ) if "map" in file_type: map_def = file_type["map"] keys = map_def["key_def"] keys += ", ".join([ '"'+k+'"' for k in keys_map.keys()]) keys += map_def["end_def"] values = map_def["value_def"] values += ", ".join([ k for k in keys_map.keys()]) values += map_def["end_def"] new_file.write(keys + nl) new_file.write(values + nl) new_file.write(nl + after_text) new_file.close()
gpl-3.0
nazeehshoura/crawler
env/lib/python2.7/site-packages/django/forms/widgets.py
52
31660
""" HTML Widget classes """ from __future__ import unicode_literals import copy from itertools import chain import warnings from django.conf import settings from django.forms.utils import flatatt, to_current_timezone from django.utils.datastructures import MultiValueDict, MergeDict from django.utils.deprecation import RemovedInDjango18Warning from django.utils.encoding import force_text, python_2_unicode_compatible from django.utils.html import conditional_escape, format_html from django.utils.translation import ugettext_lazy from django.utils.safestring import mark_safe from django.utils import formats, six from django.utils.six.moves.urllib.parse import urljoin __all__ = ( 'Media', 'MediaDefiningClass', 'Widget', 'TextInput', 'EmailInput', 'URLInput', 'NumberInput', 'PasswordInput', 'HiddenInput', 'MultipleHiddenInput', 'ClearableFileInput', 'FileInput', 'DateInput', 'DateTimeInput', 'TimeInput', 'Textarea', 'CheckboxInput', 'Select', 'NullBooleanSelect', 'SelectMultiple', 'RadioSelect', 'CheckboxSelectMultiple', 'MultiWidget', 'SplitDateTimeWidget', 'SplitHiddenDateTimeWidget', ) MEDIA_TYPES = ('css', 'js') @python_2_unicode_compatible class Media(object): def __init__(self, media=None, **kwargs): if media: media_attrs = media.__dict__ else: media_attrs = kwargs self._css = {} self._js = [] for name in MEDIA_TYPES: getattr(self, 'add_' + name)(media_attrs.get(name, None)) # Any leftover attributes must be invalid. # if media_attrs != {}: # raise TypeError("'class Media' has invalid attribute(s): %s" % ','.join(media_attrs.keys())) def __str__(self): return self.render() def render(self): return mark_safe('\n'.join(chain(*[getattr(self, 'render_' + name)() for name in MEDIA_TYPES]))) def render_js(self): return [format_html('<script type="text/javascript" src="{0}"></script>', self.absolute_path(path)) for path in self._js] def render_css(self): # To keep rendering order consistent, we can't just iterate over items(). # We need to sort the keys, and iterate over the sorted list. media = sorted(self._css.keys()) return chain(*[ [format_html('<link href="{0}" type="text/css" media="{1}" rel="stylesheet" />', self.absolute_path(path), medium) for path in self._css[medium]] for medium in media]) def absolute_path(self, path, prefix=None): if path.startswith(('http://', 'https://', '/')): return path if prefix is None: if settings.STATIC_URL is None: # backwards compatibility prefix = settings.MEDIA_URL else: prefix = settings.STATIC_URL return urljoin(prefix, path) def __getitem__(self, name): "Returns a Media object that only contains media of the given type" if name in MEDIA_TYPES: return Media(**{str(name): getattr(self, '_' + name)}) raise KeyError('Unknown media type "%s"' % name) def add_js(self, data): if data: for path in data: if path not in self._js: self._js.append(path) def add_css(self, data): if data: for medium, paths in data.items(): for path in paths: if not self._css.get(medium) or path not in self._css[medium]: self._css.setdefault(medium, []).append(path) def __add__(self, other): combined = Media() for name in MEDIA_TYPES: getattr(combined, 'add_' + name)(getattr(self, '_' + name, None)) getattr(combined, 'add_' + name)(getattr(other, '_' + name, None)) return combined def media_property(cls): def _media(self): # Get the media property of the superclass, if it exists sup_cls = super(cls, self) try: base = sup_cls.media except AttributeError: base = Media() # Get the media definition for this class definition = getattr(cls, 'Media', None) if definition: extend = getattr(definition, 'extend', True) if extend: if extend is True: m = base else: m = Media() for medium in extend: m = m + base[medium] return m + Media(definition) else: return Media(definition) else: return base return property(_media) class MediaDefiningClass(type): """ Metaclass for classes that can have media definitions. """ def __new__(mcs, name, bases, attrs): new_class = (super(MediaDefiningClass, mcs) .__new__(mcs, name, bases, attrs)) if 'media' not in attrs: new_class.media = media_property(new_class) return new_class @python_2_unicode_compatible class SubWidget(object): """ Some widgets are made of multiple HTML elements -- namely, RadioSelect. This is a class that represents the "inner" HTML element of a widget. """ def __init__(self, parent_widget, name, value, attrs, choices): self.parent_widget = parent_widget self.name, self.value = name, value self.attrs, self.choices = attrs, choices def __str__(self): args = [self.name, self.value, self.attrs] if self.choices: args.append(self.choices) return self.parent_widget.render(*args) class Widget(six.with_metaclass(MediaDefiningClass)): needs_multipart_form = False # Determines does this widget need multipart form is_localized = False is_required = False def __init__(self, attrs=None): if attrs is not None: self.attrs = attrs.copy() else: self.attrs = {} def __deepcopy__(self, memo): obj = copy.copy(self) obj.attrs = self.attrs.copy() memo[id(self)] = obj return obj @property def is_hidden(self): return self.input_type == 'hidden' if hasattr(self, 'input_type') else False @is_hidden.setter def is_hidden(self, *args): warnings.warn( "`is_hidden` property is now read-only (and checks `input_type`). " "Please update your code.", RemovedInDjango18Warning, stacklevel=2 ) def subwidgets(self, name, value, attrs=None, choices=()): """ Yields all "subwidgets" of this widget. Used only by RadioSelect to allow template access to individual <input type="radio"> buttons. Arguments are the same as for render(). """ yield SubWidget(self, name, value, attrs, choices) def render(self, name, value, attrs=None): """ Returns this Widget rendered as HTML, as a Unicode string. The 'value' given is not guaranteed to be valid input, so subclass implementations should program defensively. """ raise NotImplementedError('subclasses of Widget must provide a render() method') def build_attrs(self, extra_attrs=None, **kwargs): "Helper function for building an attribute dictionary." attrs = dict(self.attrs, **kwargs) if extra_attrs: attrs.update(extra_attrs) return attrs def value_from_datadict(self, data, files, name): """ Given a dictionary of data and this widget's name, returns the value of this widget. Returns None if it's not provided. """ return data.get(name, None) def id_for_label(self, id_): """ Returns the HTML ID attribute of this Widget for use by a <label>, given the ID of the field. Returns None if no ID is available. This hook is necessary because some widgets have multiple HTML elements and, thus, multiple IDs. In that case, this method should return an ID value that corresponds to the first ID in the widget's tags. """ return id_ class Input(Widget): """ Base class for all <input> widgets (except type='checkbox' and type='radio', which are special). """ input_type = None # Subclasses must define this. def _format_value(self, value): if self.is_localized: return formats.localize_input(value) return value def render(self, name, value, attrs=None): if value is None: value = '' final_attrs = self.build_attrs(attrs, type=self.input_type, name=name) if value != '': # Only add the 'value' attribute if a value is non-empty. final_attrs['value'] = force_text(self._format_value(value)) return format_html('<input{0} />', flatatt(final_attrs)) class TextInput(Input): input_type = 'text' def __init__(self, attrs=None): if attrs is not None: self.input_type = attrs.pop('type', self.input_type) super(TextInput, self).__init__(attrs) class NumberInput(TextInput): input_type = 'number' class EmailInput(TextInput): input_type = 'email' class URLInput(TextInput): input_type = 'url' class PasswordInput(TextInput): input_type = 'password' def __init__(self, attrs=None, render_value=False): super(PasswordInput, self).__init__(attrs) self.render_value = render_value def render(self, name, value, attrs=None): if not self.render_value: value = None return super(PasswordInput, self).render(name, value, attrs) class HiddenInput(Input): input_type = 'hidden' class MultipleHiddenInput(HiddenInput): """ A widget that handles <input type="hidden"> for fields that have a list of values. """ def __init__(self, attrs=None, choices=()): super(MultipleHiddenInput, self).__init__(attrs) # choices can be any iterable self.choices = choices def render(self, name, value, attrs=None, choices=()): if value is None: value = [] final_attrs = self.build_attrs(attrs, type=self.input_type, name=name) id_ = final_attrs.get('id', None) inputs = [] for i, v in enumerate(value): input_attrs = dict(value=force_text(v), **final_attrs) if id_: # An ID attribute was given. Add a numeric index as a suffix # so that the inputs don't all have the same ID attribute. input_attrs['id'] = '%s_%s' % (id_, i) inputs.append(format_html('<input{0} />', flatatt(input_attrs))) return mark_safe('\n'.join(inputs)) def value_from_datadict(self, data, files, name): if isinstance(data, (MultiValueDict, MergeDict)): return data.getlist(name) return data.get(name, None) class FileInput(Input): input_type = 'file' needs_multipart_form = True def render(self, name, value, attrs=None): return super(FileInput, self).render(name, None, attrs=attrs) def value_from_datadict(self, data, files, name): "File widgets take data from FILES, not POST" return files.get(name, None) FILE_INPUT_CONTRADICTION = object() class ClearableFileInput(FileInput): initial_text = ugettext_lazy('Currently') input_text = ugettext_lazy('Change') clear_checkbox_label = ugettext_lazy('Clear') template_with_initial = '%(initial_text)s: %(initial)s %(clear_template)s<br />%(input_text)s: %(input)s' template_with_clear = '%(clear)s <label for="%(clear_checkbox_id)s">%(clear_checkbox_label)s</label>' url_markup_template = '<a href="{0}">{1}</a>' def clear_checkbox_name(self, name): """ Given the name of the file input, return the name of the clear checkbox input. """ return name + '-clear' def clear_checkbox_id(self, name): """ Given the name of the clear checkbox input, return the HTML id for it. """ return name + '_id' def render(self, name, value, attrs=None): substitutions = { 'initial_text': self.initial_text, 'input_text': self.input_text, 'clear_template': '', 'clear_checkbox_label': self.clear_checkbox_label, } template = '%(input)s' substitutions['input'] = super(ClearableFileInput, self).render(name, value, attrs) if value and hasattr(value, "url"): template = self.template_with_initial substitutions['initial'] = format_html(self.url_markup_template, value.url, force_text(value)) if not self.is_required: checkbox_name = self.clear_checkbox_name(name) checkbox_id = self.clear_checkbox_id(checkbox_name) substitutions['clear_checkbox_name'] = conditional_escape(checkbox_name) substitutions['clear_checkbox_id'] = conditional_escape(checkbox_id) substitutions['clear'] = CheckboxInput().render(checkbox_name, False, attrs={'id': checkbox_id}) substitutions['clear_template'] = self.template_with_clear % substitutions return mark_safe(template % substitutions) def value_from_datadict(self, data, files, name): upload = super(ClearableFileInput, self).value_from_datadict(data, files, name) if not self.is_required and CheckboxInput().value_from_datadict( data, files, self.clear_checkbox_name(name)): if upload: # If the user contradicts themselves (uploads a new file AND # checks the "clear" checkbox), we return a unique marker # object that FileField will turn into a ValidationError. return FILE_INPUT_CONTRADICTION # False signals to clear any existing value, as opposed to just None return False return upload class Textarea(Widget): def __init__(self, attrs=None): # Use slightly better defaults than HTML's 20x2 box default_attrs = {'cols': '40', 'rows': '10'} if attrs: default_attrs.update(attrs) super(Textarea, self).__init__(default_attrs) def render(self, name, value, attrs=None): if value is None: value = '' final_attrs = self.build_attrs(attrs, name=name) return format_html('<textarea{0}>\r\n{1}</textarea>', flatatt(final_attrs), force_text(value)) class DateTimeBaseInput(TextInput): format_key = '' supports_microseconds = False def __init__(self, attrs=None, format=None): super(DateTimeBaseInput, self).__init__(attrs) self.format = format if format else None def _format_value(self, value): return formats.localize_input(value, self.format or formats.get_format(self.format_key)[0]) class DateInput(DateTimeBaseInput): format_key = 'DATE_INPUT_FORMATS' class DateTimeInput(DateTimeBaseInput): format_key = 'DATETIME_INPUT_FORMATS' class TimeInput(DateTimeBaseInput): format_key = 'TIME_INPUT_FORMATS' # Defined at module level so that CheckboxInput is picklable (#17976) def boolean_check(v): return not (v is False or v is None or v == '') class CheckboxInput(Widget): def __init__(self, attrs=None, check_test=None): super(CheckboxInput, self).__init__(attrs) # check_test is a callable that takes a value and returns True # if the checkbox should be checked for that value. self.check_test = boolean_check if check_test is None else check_test def render(self, name, value, attrs=None): final_attrs = self.build_attrs(attrs, type='checkbox', name=name) if self.check_test(value): final_attrs['checked'] = 'checked' if not (value is True or value is False or value is None or value == ''): # Only add the 'value' attribute if a value is non-empty. final_attrs['value'] = force_text(value) return format_html('<input{0} />', flatatt(final_attrs)) def value_from_datadict(self, data, files, name): if name not in data: # A missing value means False because HTML form submission does not # send results for unselected checkboxes. return False value = data.get(name) # Translate true and false strings to boolean values. values = {'true': True, 'false': False} if isinstance(value, six.string_types): value = values.get(value.lower(), value) return bool(value) class Select(Widget): allow_multiple_selected = False def __init__(self, attrs=None, choices=()): super(Select, self).__init__(attrs) # choices can be any iterable, but we may need to render this widget # multiple times. Thus, collapse it into a list so it can be consumed # more than once. self.choices = list(choices) def render(self, name, value, attrs=None, choices=()): if value is None: value = '' final_attrs = self.build_attrs(attrs, name=name) output = [format_html('<select{0}>', flatatt(final_attrs))] options = self.render_options(choices, [value]) if options: output.append(options) output.append('</select>') return mark_safe('\n'.join(output)) def render_option(self, selected_choices, option_value, option_label): if option_value is None: option_value = '' option_value = force_text(option_value) if option_value in selected_choices: selected_html = mark_safe(' selected="selected"') if not self.allow_multiple_selected: # Only allow for a single selection. selected_choices.remove(option_value) else: selected_html = '' return format_html('<option value="{0}"{1}>{2}</option>', option_value, selected_html, force_text(option_label)) def render_options(self, choices, selected_choices): # Normalize to strings. selected_choices = set(force_text(v) for v in selected_choices) output = [] for option_value, option_label in chain(self.choices, choices): if isinstance(option_label, (list, tuple)): output.append(format_html('<optgroup label="{0}">', force_text(option_value))) for option in option_label: output.append(self.render_option(selected_choices, *option)) output.append('</optgroup>') else: output.append(self.render_option(selected_choices, option_value, option_label)) return '\n'.join(output) class NullBooleanSelect(Select): """ A Select Widget intended to be used with NullBooleanField. """ def __init__(self, attrs=None): choices = (('1', ugettext_lazy('Unknown')), ('2', ugettext_lazy('Yes')), ('3', ugettext_lazy('No'))) super(NullBooleanSelect, self).__init__(attrs, choices) def render(self, name, value, attrs=None, choices=()): try: value = {True: '2', False: '3', '2': '2', '3': '3'}[value] except KeyError: value = '1' return super(NullBooleanSelect, self).render(name, value, attrs, choices) def value_from_datadict(self, data, files, name): value = data.get(name, None) return {'2': True, True: True, 'True': True, '3': False, 'False': False, False: False}.get(value, None) class SelectMultiple(Select): allow_multiple_selected = True def render(self, name, value, attrs=None, choices=()): if value is None: value = [] final_attrs = self.build_attrs(attrs, name=name) output = [format_html('<select multiple="multiple"{0}>', flatatt(final_attrs))] options = self.render_options(choices, value) if options: output.append(options) output.append('</select>') return mark_safe('\n'.join(output)) def value_from_datadict(self, data, files, name): if isinstance(data, (MultiValueDict, MergeDict)): return data.getlist(name) return data.get(name, None) @python_2_unicode_compatible class ChoiceInput(SubWidget): """ An object used by ChoiceFieldRenderer that represents a single <input type='$input_type'>. """ input_type = None # Subclasses must define this def __init__(self, name, value, attrs, choice, index): self.name = name self.value = value self.attrs = attrs self.choice_value = force_text(choice[0]) self.choice_label = force_text(choice[1]) self.index = index if 'id' in self.attrs: self.attrs['id'] += "_%d" % self.index def __str__(self): return self.render() def render(self, name=None, value=None, attrs=None, choices=()): if self.id_for_label: label_for = format_html(' for="{0}"', self.id_for_label) else: label_for = '' return format_html('<label{0}>{1} {2}</label>', label_for, self.tag(), self.choice_label) def is_checked(self): return self.value == self.choice_value def tag(self): final_attrs = dict(self.attrs, type=self.input_type, name=self.name, value=self.choice_value) if self.is_checked(): final_attrs['checked'] = 'checked' return format_html('<input{0} />', flatatt(final_attrs)) @property def id_for_label(self): return self.attrs.get('id', '') class RadioChoiceInput(ChoiceInput): input_type = 'radio' def __init__(self, *args, **kwargs): super(RadioChoiceInput, self).__init__(*args, **kwargs) self.value = force_text(self.value) class RadioInput(RadioChoiceInput): def __init__(self, *args, **kwargs): msg = "RadioInput has been deprecated. Use RadioChoiceInput instead." warnings.warn(msg, RemovedInDjango18Warning, stacklevel=2) super(RadioInput, self).__init__(*args, **kwargs) class CheckboxChoiceInput(ChoiceInput): input_type = 'checkbox' def __init__(self, *args, **kwargs): super(CheckboxChoiceInput, self).__init__(*args, **kwargs) self.value = set(force_text(v) for v in self.value) def is_checked(self): return self.choice_value in self.value @python_2_unicode_compatible class ChoiceFieldRenderer(object): """ An object used by RadioSelect to enable customization of radio widgets. """ choice_input_class = None def __init__(self, name, value, attrs, choices): self.name = name self.value = value self.attrs = attrs self.choices = choices def __getitem__(self, idx): choice = self.choices[idx] # Let the IndexError propagate return self.choice_input_class(self.name, self.value, self.attrs.copy(), choice, idx) def __str__(self): return self.render() def render(self): """ Outputs a <ul> for this set of choice fields. If an id was given to the field, it is applied to the <ul> (each item in the list will get an id of `$id_$i`). """ id_ = self.attrs.get('id', None) start_tag = format_html('<ul id="{0}">', id_) if id_ else '<ul>' output = [start_tag] for i, choice in enumerate(self.choices): choice_value, choice_label = choice if isinstance(choice_label, (tuple, list)): attrs_plus = self.attrs.copy() if id_: attrs_plus['id'] += '_{0}'.format(i) sub_ul_renderer = ChoiceFieldRenderer(name=self.name, value=self.value, attrs=attrs_plus, choices=choice_label) sub_ul_renderer.choice_input_class = self.choice_input_class output.append(format_html('<li>{0}{1}</li>', choice_value, sub_ul_renderer.render())) else: w = self.choice_input_class(self.name, self.value, self.attrs.copy(), choice, i) output.append(format_html('<li>{0}</li>', force_text(w))) output.append('</ul>') return mark_safe('\n'.join(output)) class RadioFieldRenderer(ChoiceFieldRenderer): choice_input_class = RadioChoiceInput class CheckboxFieldRenderer(ChoiceFieldRenderer): choice_input_class = CheckboxChoiceInput class RendererMixin(object): renderer = None # subclasses must define this _empty_value = None def __init__(self, *args, **kwargs): # Override the default renderer if we were passed one. renderer = kwargs.pop('renderer', None) if renderer: self.renderer = renderer super(RendererMixin, self).__init__(*args, **kwargs) def subwidgets(self, name, value, attrs=None, choices=()): for widget in self.get_renderer(name, value, attrs, choices): yield widget def get_renderer(self, name, value, attrs=None, choices=()): """Returns an instance of the renderer.""" if value is None: value = self._empty_value final_attrs = self.build_attrs(attrs) choices = list(chain(self.choices, choices)) return self.renderer(name, value, final_attrs, choices) def render(self, name, value, attrs=None, choices=()): return self.get_renderer(name, value, attrs, choices).render() def id_for_label(self, id_): # Widgets using this RendererMixin are made of a collection of # subwidgets, each with their own <label>, and distinct ID. # The IDs are made distinct by y "_X" suffix, where X is the zero-based # index of the choice field. Thus, the label for the main widget should # reference the first subwidget, hence the "_0" suffix. if id_: id_ += '_0' return id_ class RadioSelect(RendererMixin, Select): renderer = RadioFieldRenderer _empty_value = '' class CheckboxSelectMultiple(RendererMixin, SelectMultiple): renderer = CheckboxFieldRenderer _empty_value = [] class MultiWidget(Widget): """ A widget that is composed of multiple widgets. Its render() method is different than other widgets', because it has to figure out how to split a single value for display in multiple widgets. The ``value`` argument can be one of two things: * A list. * A normal value (e.g., a string) that has been "compressed" from a list of values. In the second case -- i.e., if the value is NOT a list -- render() will first "decompress" the value into a list before rendering it. It does so by calling the decompress() method, which MultiWidget subclasses must implement. This method takes a single "compressed" value and returns a list. When render() does its HTML rendering, each value in the list is rendered with the corresponding widget -- the first value is rendered in the first widget, the second value is rendered in the second widget, etc. Subclasses may implement format_output(), which takes the list of rendered widgets and returns a string of HTML that formats them any way you'd like. You'll probably want to use this class with MultiValueField. """ def __init__(self, widgets, attrs=None): self.widgets = [w() if isinstance(w, type) else w for w in widgets] super(MultiWidget, self).__init__(attrs) @property def is_hidden(self): return all(w.is_hidden for w in self.widgets) def render(self, name, value, attrs=None): if self.is_localized: for widget in self.widgets: widget.is_localized = self.is_localized # value is a list of values, each corresponding to a widget # in self.widgets. if not isinstance(value, list): value = self.decompress(value) output = [] final_attrs = self.build_attrs(attrs) id_ = final_attrs.get('id', None) for i, widget in enumerate(self.widgets): try: widget_value = value[i] except IndexError: widget_value = None if id_: final_attrs = dict(final_attrs, id='%s_%s' % (id_, i)) output.append(widget.render(name + '_%s' % i, widget_value, final_attrs)) return mark_safe(self.format_output(output)) def id_for_label(self, id_): # See the comment for RadioSelect.id_for_label() if id_: id_ += '_0' return id_ def value_from_datadict(self, data, files, name): return [widget.value_from_datadict(data, files, name + '_%s' % i) for i, widget in enumerate(self.widgets)] def format_output(self, rendered_widgets): """ Given a list of rendered widgets (as strings), returns a Unicode string representing the HTML for the whole lot. This hook allows you to format the HTML design of the widgets, if needed. """ return ''.join(rendered_widgets) def decompress(self, value): """ Returns a list of decompressed values for the given compressed value. The given value can be assumed to be valid, but not necessarily non-empty. """ raise NotImplementedError('Subclasses must implement this method.') def _get_media(self): "Media for a multiwidget is the combination of all media of the subwidgets" media = Media() for w in self.widgets: media = media + w.media return media media = property(_get_media) def __deepcopy__(self, memo): obj = super(MultiWidget, self).__deepcopy__(memo) obj.widgets = copy.deepcopy(self.widgets) return obj @property def needs_multipart_form(self): return any(w.needs_multipart_form for w in self.widgets) class SplitDateTimeWidget(MultiWidget): """ A Widget that splits datetime input into two <input type="text"> boxes. """ supports_microseconds = False def __init__(self, attrs=None, date_format=None, time_format=None): widgets = (DateInput(attrs=attrs, format=date_format), TimeInput(attrs=attrs, format=time_format)) super(SplitDateTimeWidget, self).__init__(widgets, attrs) def decompress(self, value): if value: value = to_current_timezone(value) return [value.date(), value.time().replace(microsecond=0)] return [None, None] class SplitHiddenDateTimeWidget(SplitDateTimeWidget): """ A Widget that splits datetime input into two <input type="hidden"> inputs. """ def __init__(self, attrs=None, date_format=None, time_format=None): super(SplitHiddenDateTimeWidget, self).__init__(attrs, date_format, time_format) for widget in self.widgets: widget.input_type = 'hidden'
mit
j5shi/Thruster
pylibs/test/test_types.py
13
30479
# Python test set -- part 6, built-in types from test.test_support import run_unittest, have_unicode, run_with_locale, \ check_py3k_warnings import unittest import sys import locale class TypesTests(unittest.TestCase): def test_truth_values(self): if None: self.fail('None is true instead of false') if 0: self.fail('0 is true instead of false') if 0L: self.fail('0L is true instead of false') if 0.0: self.fail('0.0 is true instead of false') if '': self.fail('\'\' is true instead of false') if not 1: self.fail('1 is false instead of true') if not 1L: self.fail('1L is false instead of true') if not 1.0: self.fail('1.0 is false instead of true') if not 'x': self.fail('\'x\' is false instead of true') if not {'x': 1}: self.fail('{\'x\': 1} is false instead of true') def f(): pass class C: pass x = C() if not f: self.fail('f is false instead of true') if not C: self.fail('C is false instead of true') if not sys: self.fail('sys is false instead of true') if not x: self.fail('x is false instead of true') def test_boolean_ops(self): if 0 or 0: self.fail('0 or 0 is true instead of false') if 1 and 1: pass else: self.fail('1 and 1 is false instead of true') if not 1: self.fail('not 1 is true instead of false') def test_comparisons(self): if 0 < 1 <= 1 == 1 >= 1 > 0 != 1: pass else: self.fail('int comparisons failed') if 0L < 1L <= 1L == 1L >= 1L > 0L != 1L: pass else: self.fail('long int comparisons failed') if 0.0 < 1.0 <= 1.0 == 1.0 >= 1.0 > 0.0 != 1.0: pass else: self.fail('float comparisons failed') if '' < 'a' <= 'a' == 'a' < 'abc' < 'abd' < 'b': pass else: self.fail('string comparisons failed') if None is None: pass else: self.fail('identity test failed') def test_float_constructor(self): self.assertRaises(ValueError, float, '') self.assertRaises(ValueError, float, '5\0') def test_zero_division(self): try: 5.0 / 0.0 except ZeroDivisionError: pass else: self.fail("5.0 / 0.0 didn't raise ZeroDivisionError") try: 5.0 // 0.0 except ZeroDivisionError: pass else: self.fail("5.0 // 0.0 didn't raise ZeroDivisionError") try: 5.0 % 0.0 except ZeroDivisionError: pass else: self.fail("5.0 % 0.0 didn't raise ZeroDivisionError") try: 5 / 0L except ZeroDivisionError: pass else: self.fail("5 / 0L didn't raise ZeroDivisionError") try: 5 // 0L except ZeroDivisionError: pass else: self.fail("5 // 0L didn't raise ZeroDivisionError") try: 5 % 0L except ZeroDivisionError: pass else: self.fail("5 % 0L didn't raise ZeroDivisionError") def test_numeric_types(self): if 0 != 0L or 0 != 0.0 or 0L != 0.0: self.fail('mixed comparisons') if 1 != 1L or 1 != 1.0 or 1L != 1.0: self.fail('mixed comparisons') if -1 != -1L or -1 != -1.0 or -1L != -1.0: self.fail('int/long/float value not equal') # calling built-in types without argument must return 0 if int() != 0: self.fail('int() does not return 0') if long() != 0L: self.fail('long() does not return 0L') if float() != 0.0: self.fail('float() does not return 0.0') if int(1.9) == 1 == int(1.1) and int(-1.1) == -1 == int(-1.9): pass else: self.fail('int() does not round properly') if long(1.9) == 1L == long(1.1) and long(-1.1) == -1L == long(-1.9): pass else: self.fail('long() does not round properly') if float(1) == 1.0 and float(-1) == -1.0 and float(0) == 0.0: pass else: self.fail('float() does not work properly') def test_float_to_string(self): def test(f, result): self.assertEqual(f.__format__('e'), result) self.assertEqual('%e' % f, result) # test all 2 digit exponents, both with __format__ and with # '%' formatting for i in range(-99, 100): test(float('1.5e'+str(i)), '1.500000e{0:+03d}'.format(i)) # test some 3 digit exponents self.assertEqual(1.5e100.__format__('e'), '1.500000e+100') self.assertEqual('%e' % 1.5e100, '1.500000e+100') self.assertEqual(1.5e101.__format__('e'), '1.500000e+101') self.assertEqual('%e' % 1.5e101, '1.500000e+101') self.assertEqual(1.5e-100.__format__('e'), '1.500000e-100') self.assertEqual('%e' % 1.5e-100, '1.500000e-100') self.assertEqual(1.5e-101.__format__('e'), '1.500000e-101') self.assertEqual('%e' % 1.5e-101, '1.500000e-101') self.assertEqual('%g' % 1.0, '1') self.assertEqual('%#g' % 1.0, '1.00000') def test_normal_integers(self): # Ensure the first 256 integers are shared a = 256 b = 128*2 if a is not b: self.fail('256 is not shared') if 12 + 24 != 36: self.fail('int op') if 12 + (-24) != -12: self.fail('int op') if (-12) + 24 != 12: self.fail('int op') if (-12) + (-24) != -36: self.fail('int op') if not 12 < 24: self.fail('int op') if not -24 < -12: self.fail('int op') # Test for a particular bug in integer multiply xsize, ysize, zsize = 238, 356, 4 if not (xsize*ysize*zsize == zsize*xsize*ysize == 338912): self.fail('int mul commutativity') # And another. m = -sys.maxint - 1 for divisor in 1, 2, 4, 8, 16, 32: j = m // divisor prod = divisor * j if prod != m: self.fail("%r * %r == %r != %r" % (divisor, j, prod, m)) if type(prod) is not int: self.fail("expected type(prod) to be int, not %r" % type(prod)) # Check for expected * overflow to long. for divisor in 1, 2, 4, 8, 16, 32: j = m // divisor - 1 prod = divisor * j if type(prod) is not long: self.fail("expected type(%r) to be long, not %r" % (prod, type(prod))) # Check for expected * overflow to long. m = sys.maxint for divisor in 1, 2, 4, 8, 16, 32: j = m // divisor + 1 prod = divisor * j if type(prod) is not long: self.fail("expected type(%r) to be long, not %r" % (prod, type(prod))) def test_long_integers(self): if 12L + 24L != 36L: self.fail('long op') if 12L + (-24L) != -12L: self.fail('long op') if (-12L) + 24L != 12L: self.fail('long op') if (-12L) + (-24L) != -36L: self.fail('long op') if not 12L < 24L: self.fail('long op') if not -24L < -12L: self.fail('long op') x = sys.maxint if int(long(x)) != x: self.fail('long op') try: y = int(long(x)+1L) except OverflowError: self.fail('long op') if not isinstance(y, long): self.fail('long op') x = -x if int(long(x)) != x: self.fail('long op') x = x-1 if int(long(x)) != x: self.fail('long op') try: y = int(long(x)-1L) except OverflowError: self.fail('long op') if not isinstance(y, long): self.fail('long op') try: 5 << -5 except ValueError: pass else: self.fail('int negative shift <<') try: 5L << -5L except ValueError: pass else: self.fail('long negative shift <<') try: 5 >> -5 except ValueError: pass else: self.fail('int negative shift >>') try: 5L >> -5L except ValueError: pass else: self.fail('long negative shift >>') def test_floats(self): if 12.0 + 24.0 != 36.0: self.fail('float op') if 12.0 + (-24.0) != -12.0: self.fail('float op') if (-12.0) + 24.0 != 12.0: self.fail('float op') if (-12.0) + (-24.0) != -36.0: self.fail('float op') if not 12.0 < 24.0: self.fail('float op') if not -24.0 < -12.0: self.fail('float op') def test_strings(self): if len('') != 0: self.fail('len(\'\')') if len('a') != 1: self.fail('len(\'a\')') if len('abcdef') != 6: self.fail('len(\'abcdef\')') if 'xyz' + 'abcde' != 'xyzabcde': self.fail('string concatenation') if 'xyz'*3 != 'xyzxyzxyz': self.fail('string repetition *3') if 0*'abcde' != '': self.fail('string repetition 0*') if min('abc') != 'a' or max('abc') != 'c': self.fail('min/max string') if 'a' in 'abc' and 'b' in 'abc' and 'c' in 'abc' and 'd' not in 'abc': pass else: self.fail('in/not in string') x = 'x'*103 if '%s!'%x != x+'!': self.fail('nasty string formatting bug') #extended slices for strings a = '0123456789' self.assertEqual(a[::], a) self.assertEqual(a[::2], '02468') self.assertEqual(a[1::2], '13579') self.assertEqual(a[::-1],'9876543210') self.assertEqual(a[::-2], '97531') self.assertEqual(a[3::-2], '31') self.assertEqual(a[-100:100:], a) self.assertEqual(a[100:-100:-1], a[::-1]) self.assertEqual(a[-100L:100L:2L], '02468') if have_unicode: a = unicode('0123456789', 'ascii') self.assertEqual(a[::], a) self.assertEqual(a[::2], unicode('02468', 'ascii')) self.assertEqual(a[1::2], unicode('13579', 'ascii')) self.assertEqual(a[::-1], unicode('9876543210', 'ascii')) self.assertEqual(a[::-2], unicode('97531', 'ascii')) self.assertEqual(a[3::-2], unicode('31', 'ascii')) self.assertEqual(a[-100:100:], a) self.assertEqual(a[100:-100:-1], a[::-1]) self.assertEqual(a[-100L:100L:2L], unicode('02468', 'ascii')) def test_type_function(self): self.assertRaises(TypeError, type, 1, 2) self.assertRaises(TypeError, type, 1, 2, 3, 4) def test_buffers(self): self.assertRaises(ValueError, buffer, 'asdf', -1) cmp(buffer("abc"), buffer("def")) # used to raise a warning: tp_compare didn't return -1, 0, or 1 self.assertRaises(TypeError, buffer, None) a = buffer('asdf') hash(a) b = a * 5 if a == b: self.fail('buffers should not be equal') if str(b) != ('asdf' * 5): self.fail('repeated buffer has wrong content') if str(a * 0) != '': self.fail('repeated buffer zero times has wrong content') if str(a + buffer('def')) != 'asdfdef': self.fail('concatenation of buffers yields wrong content') if str(buffer(a)) != 'asdf': self.fail('composing buffers failed') if str(buffer(a, 2)) != 'df': self.fail('specifying buffer offset failed') if str(buffer(a, 0, 2)) != 'as': self.fail('specifying buffer size failed') if str(buffer(a, 1, 2)) != 'sd': self.fail('specifying buffer offset and size failed') self.assertRaises(ValueError, buffer, buffer('asdf', 1), -1) if str(buffer(buffer('asdf', 0, 2), 0)) != 'as': self.fail('composing length-specified buffer failed') if str(buffer(buffer('asdf', 0, 2), 0, 5000)) != 'as': self.fail('composing length-specified buffer failed') if str(buffer(buffer('asdf', 0, 2), 0, -1)) != 'as': self.fail('composing length-specified buffer failed') if str(buffer(buffer('asdf', 0, 2), 1, 2)) != 's': self.fail('composing length-specified buffer failed') try: a[1] = 'g' except TypeError: pass else: self.fail("buffer assignment should raise TypeError") try: a[0:1] = 'g' except TypeError: pass else: self.fail("buffer slice assignment should raise TypeError") # array.array() returns an object that does not implement a char buffer, # something which int() uses for conversion. import array try: int(buffer(array.array('c'))) except TypeError: pass else: self.fail("char buffer (at C level) not working") def test_int__format__(self): def test(i, format_spec, result): # just make sure I'm not accidentally checking longs assert type(i) == int assert type(format_spec) == str self.assertEqual(i.__format__(format_spec), result) self.assertEqual(i.__format__(unicode(format_spec)), result) test(123456789, 'd', '123456789') test(123456789, 'd', '123456789') test(1, 'c', '\01') # sign and aligning are interdependent test(1, "-", '1') test(-1, "-", '-1') test(1, "-3", ' 1') test(-1, "-3", ' -1') test(1, "+3", ' +1') test(-1, "+3", ' -1') test(1, " 3", ' 1') test(-1, " 3", ' -1') test(1, " ", ' 1') test(-1, " ", '-1') # hex test(3, "x", "3") test(3, "X", "3") test(1234, "x", "4d2") test(-1234, "x", "-4d2") test(1234, "8x", " 4d2") test(-1234, "8x", " -4d2") test(1234, "x", "4d2") test(-1234, "x", "-4d2") test(-3, "x", "-3") test(-3, "X", "-3") test(int('be', 16), "x", "be") test(int('be', 16), "X", "BE") test(-int('be', 16), "x", "-be") test(-int('be', 16), "X", "-BE") # octal test(3, "o", "3") test(-3, "o", "-3") test(65, "o", "101") test(-65, "o", "-101") test(1234, "o", "2322") test(-1234, "o", "-2322") test(1234, "-o", "2322") test(-1234, "-o", "-2322") test(1234, " o", " 2322") test(-1234, " o", "-2322") test(1234, "+o", "+2322") test(-1234, "+o", "-2322") # binary test(3, "b", "11") test(-3, "b", "-11") test(1234, "b", "10011010010") test(-1234, "b", "-10011010010") test(1234, "-b", "10011010010") test(-1234, "-b", "-10011010010") test(1234, " b", " 10011010010") test(-1234, " b", "-10011010010") test(1234, "+b", "+10011010010") test(-1234, "+b", "-10011010010") # alternate (#) formatting test(0, "#b", '0b0') test(0, "-#b", '0b0') test(1, "-#b", '0b1') test(-1, "-#b", '-0b1') test(-1, "-#5b", ' -0b1') test(1, "+#5b", ' +0b1') test(100, "+#b", '+0b1100100') test(100, "#012b", '0b0001100100') test(-100, "#012b", '-0b001100100') test(0, "#o", '0o0') test(0, "-#o", '0o0') test(1, "-#o", '0o1') test(-1, "-#o", '-0o1') test(-1, "-#5o", ' -0o1') test(1, "+#5o", ' +0o1') test(100, "+#o", '+0o144') test(100, "#012o", '0o0000000144') test(-100, "#012o", '-0o000000144') test(0, "#x", '0x0') test(0, "-#x", '0x0') test(1, "-#x", '0x1') test(-1, "-#x", '-0x1') test(-1, "-#5x", ' -0x1') test(1, "+#5x", ' +0x1') test(100, "+#x", '+0x64') test(100, "#012x", '0x0000000064') test(-100, "#012x", '-0x000000064') test(123456, "#012x", '0x000001e240') test(-123456, "#012x", '-0x00001e240') test(0, "#X", '0X0') test(0, "-#X", '0X0') test(1, "-#X", '0X1') test(-1, "-#X", '-0X1') test(-1, "-#5X", ' -0X1') test(1, "+#5X", ' +0X1') test(100, "+#X", '+0X64') test(100, "#012X", '0X0000000064') test(-100, "#012X", '-0X000000064') test(123456, "#012X", '0X000001E240') test(-123456, "#012X", '-0X00001E240') # issue 5782, commas with no specifier type test(1234, '010,', '00,001,234') # make sure these are errors # precision disallowed self.assertRaises(ValueError, 3 .__format__, "1.3") # sign not allowed with 'c' self.assertRaises(ValueError, 3 .__format__, "+c") # format spec must be string self.assertRaises(TypeError, 3 .__format__, None) self.assertRaises(TypeError, 3 .__format__, 0) # can't have ',' with 'c' self.assertRaises(ValueError, 3 .__format__, ",c") # ensure that only int and float type specifiers work for format_spec in ([chr(x) for x in range(ord('a'), ord('z')+1)] + [chr(x) for x in range(ord('A'), ord('Z')+1)]): if not format_spec in 'bcdoxXeEfFgGn%': self.assertRaises(ValueError, 0 .__format__, format_spec) self.assertRaises(ValueError, 1 .__format__, format_spec) self.assertRaises(ValueError, (-1) .__format__, format_spec) # ensure that float type specifiers work; format converts # the int to a float for format_spec in 'eEfFgG%': for value in [0, 1, -1, 100, -100, 1234567890, -1234567890]: self.assertEqual(value.__format__(format_spec), float(value).__format__(format_spec)) # Issue 6902 test(123456, "0<20", '12345600000000000000') test(123456, "1<20", '12345611111111111111') test(123456, "*<20", '123456**************') test(123456, "0>20", '00000000000000123456') test(123456, "1>20", '11111111111111123456') test(123456, "*>20", '**************123456') test(123456, "0=20", '00000000000000123456') test(123456, "1=20", '11111111111111123456') test(123456, "*=20", '**************123456') def test_long__format__(self): def test(i, format_spec, result): # make sure we're not accidentally checking ints assert type(i) == long assert type(format_spec) == str self.assertEqual(i.__format__(format_spec), result) self.assertEqual(i.__format__(unicode(format_spec)), result) test(10**100, 'd', '1' + '0' * 100) test(10**100+100, 'd', '1' + '0' * 97 + '100') test(123456789L, 'd', '123456789') test(123456789L, 'd', '123456789') # sign and aligning are interdependent test(1L, "-", '1') test(-1L, "-", '-1') test(1L, "-3", ' 1') test(-1L, "-3", ' -1') test(1L, "+3", ' +1') test(-1L, "+3", ' -1') test(1L, " 3", ' 1') test(-1L, " 3", ' -1') test(1L, " ", ' 1') test(-1L, " ", '-1') test(1L, 'c', '\01') # hex test(3L, "x", "3") test(3L, "X", "3") test(1234L, "x", "4d2") test(-1234L, "x", "-4d2") test(1234L, "8x", " 4d2") test(-1234L, "8x", " -4d2") test(1234L, "x", "4d2") test(-1234L, "x", "-4d2") test(-3L, "x", "-3") test(-3L, "X", "-3") test(long('be', 16), "x", "be") test(long('be', 16), "X", "BE") test(-long('be', 16), "x", "-be") test(-long('be', 16), "X", "-BE") # octal test(3L, "o", "3") test(-3L, "o", "-3") test(65L, "o", "101") test(-65L, "o", "-101") test(1234L, "o", "2322") test(-1234L, "o", "-2322") test(1234L, "-o", "2322") test(-1234L, "-o", "-2322") test(1234L, " o", " 2322") test(-1234L, " o", "-2322") test(1234L, "+o", "+2322") test(-1234L, "+o", "-2322") # binary test(3L, "b", "11") test(-3L, "b", "-11") test(1234L, "b", "10011010010") test(-1234L, "b", "-10011010010") test(1234L, "-b", "10011010010") test(-1234L, "-b", "-10011010010") test(1234L, " b", " 10011010010") test(-1234L, " b", "-10011010010") test(1234L, "+b", "+10011010010") test(-1234L, "+b", "-10011010010") # make sure these are errors # precision disallowed self.assertRaises(ValueError, 3L .__format__, "1.3") # sign not allowed with 'c' self.assertRaises(ValueError, 3L .__format__, "+c") # format spec must be string self.assertRaises(TypeError, 3L .__format__, None) self.assertRaises(TypeError, 3L .__format__, 0) # alternate specifier in wrong place self.assertRaises(ValueError, 1L .__format__, "#+5x") self.assertRaises(ValueError, 1L .__format__, "+5#x") # ensure that only int and float type specifiers work for format_spec in ([chr(x) for x in range(ord('a'), ord('z')+1)] + [chr(x) for x in range(ord('A'), ord('Z')+1)]): if not format_spec in 'bcdoxXeEfFgGn%': self.assertRaises(ValueError, 0L .__format__, format_spec) self.assertRaises(ValueError, 1L .__format__, format_spec) self.assertRaises(ValueError, (-1L) .__format__, format_spec) # ensure that float type specifiers work; format converts # the long to a float for format_spec in 'eEfFgG%': for value in [0L, 1L, -1L, 100L, -100L, 1234567890L, -1234567890L]: self.assertEqual(value.__format__(format_spec), float(value).__format__(format_spec)) # Issue 6902 test(123456L, "0<20", '12345600000000000000') test(123456L, "1<20", '12345611111111111111') test(123456L, "*<20", '123456**************') test(123456L, "0>20", '00000000000000123456') test(123456L, "1>20", '11111111111111123456') test(123456L, "*>20", '**************123456') test(123456L, "0=20", '00000000000000123456') test(123456L, "1=20", '11111111111111123456') test(123456L, "*=20", '**************123456') @run_with_locale('LC_NUMERIC', 'en_US.UTF8') def test_float__format__locale(self): # test locale support for __format__ code 'n' for i in range(-10, 10): x = 1234567890.0 * (10.0 ** i) self.assertEqual(locale.format('%g', x, grouping=True), format(x, 'n')) self.assertEqual(locale.format('%.10g', x, grouping=True), format(x, '.10n')) @run_with_locale('LC_NUMERIC', 'en_US.UTF8') def test_int__format__locale(self): # test locale support for __format__ code 'n' for integers x = 123456789012345678901234567890 for i in range(0, 30): self.assertEqual(locale.format('%d', x, grouping=True), format(x, 'n')) # move to the next integer to test x = x // 10 rfmt = ">20n" lfmt = "<20n" cfmt = "^20n" for x in (1234, 12345, 123456, 1234567, 12345678, 123456789, 1234567890, 12345678900): self.assertEqual(len(format(0, rfmt)), len(format(x, rfmt))) self.assertEqual(len(format(0, lfmt)), len(format(x, lfmt))) self.assertEqual(len(format(0, cfmt)), len(format(x, cfmt))) def test_float__format__(self): # these should be rewritten to use both format(x, spec) and # x.__format__(spec) def test(f, format_spec, result): assert type(f) == float assert type(format_spec) == str self.assertEqual(f.__format__(format_spec), result) self.assertEqual(f.__format__(unicode(format_spec)), result) test(0.0, 'f', '0.000000') # the default is 'g', except for empty format spec test(0.0, '', '0.0') test(0.01, '', '0.01') test(0.01, 'g', '0.01') # test for issue 3411 test(1.23, '1', '1.23') test(-1.23, '1', '-1.23') test(1.23, '1g', '1.23') test(-1.23, '1g', '-1.23') test( 1.0, ' g', ' 1') test(-1.0, ' g', '-1') test( 1.0, '+g', '+1') test(-1.0, '+g', '-1') test(1.1234e200, 'g', '1.1234e+200') test(1.1234e200, 'G', '1.1234E+200') test(1.0, 'f', '1.000000') test(-1.0, 'f', '-1.000000') test( 1.0, ' f', ' 1.000000') test(-1.0, ' f', '-1.000000') test( 1.0, '+f', '+1.000000') test(-1.0, '+f', '-1.000000') # Python versions <= 2.6 switched from 'f' to 'g' formatting for # values larger than 1e50. No longer. f = 1.1234e90 for fmt in 'f', 'F': # don't do a direct equality check, since on some # platforms only the first few digits of dtoa # will be reliable result = f.__format__(fmt) self.assertEqual(len(result), 98) self.assertEqual(result[-7], '.') self.assertIn(result[:12], ('112340000000', '112339999999')) f = 1.1234e200 for fmt in 'f', 'F': result = f.__format__(fmt) self.assertEqual(len(result), 208) self.assertEqual(result[-7], '.') self.assertIn(result[:12], ('112340000000', '112339999999')) test( 1.0, 'e', '1.000000e+00') test(-1.0, 'e', '-1.000000e+00') test( 1.0, 'E', '1.000000E+00') test(-1.0, 'E', '-1.000000E+00') test(1.1234e20, 'e', '1.123400e+20') test(1.1234e20, 'E', '1.123400E+20') # No format code means use g, but must have a decimal # and a number after the decimal. This is tricky, because # a totaly empty format specifier means something else. # So, just use a sign flag test(1e200, '+g', '+1e+200') test(1e200, '+', '+1e+200') test(1.1e200, '+g', '+1.1e+200') test(1.1e200, '+', '+1.1e+200') test(1.1e200, '+g', '+1.1e+200') test(1.1e200, '+', '+1.1e+200') # 0 padding test(1234., '010f', '1234.000000') test(1234., '011f', '1234.000000') test(1234., '012f', '01234.000000') test(-1234., '011f', '-1234.000000') test(-1234., '012f', '-1234.000000') test(-1234., '013f', '-01234.000000') test(-1234.12341234, '013f', '-01234.123412') test(-123456.12341234, '011.2f', '-0123456.12') # issue 5782, commas with no specifier type test(1.2, '010,.2', '0,000,001.2') # 0 padding with commas test(1234., '011,f', '1,234.000000') test(1234., '012,f', '1,234.000000') test(1234., '013,f', '01,234.000000') test(-1234., '012,f', '-1,234.000000') test(-1234., '013,f', '-1,234.000000') test(-1234., '014,f', '-01,234.000000') test(-12345., '015,f', '-012,345.000000') test(-123456., '016,f', '-0,123,456.000000') test(-123456., '017,f', '-0,123,456.000000') test(-123456.12341234, '017,f', '-0,123,456.123412') test(-123456.12341234, '013,.2f', '-0,123,456.12') # % formatting test(-1.0, '%', '-100.000000%') # format spec must be string self.assertRaises(TypeError, 3.0.__format__, None) self.assertRaises(TypeError, 3.0.__format__, 0) # other format specifiers shouldn't work on floats, # in particular int specifiers for format_spec in ([chr(x) for x in range(ord('a'), ord('z')+1)] + [chr(x) for x in range(ord('A'), ord('Z')+1)]): if not format_spec in 'eEfFgGn%': self.assertRaises(ValueError, format, 0.0, format_spec) self.assertRaises(ValueError, format, 1.0, format_spec) self.assertRaises(ValueError, format, -1.0, format_spec) self.assertRaises(ValueError, format, 1e100, format_spec) self.assertRaises(ValueError, format, -1e100, format_spec) self.assertRaises(ValueError, format, 1e-100, format_spec) self.assertRaises(ValueError, format, -1e-100, format_spec) # Alternate formatting is not supported self.assertRaises(ValueError, format, 0.0, '#') self.assertRaises(ValueError, format, 0.0, '#20f') # Issue 6902 test(12345.6, "0<20", '12345.60000000000000') test(12345.6, "1<20", '12345.61111111111111') test(12345.6, "*<20", '12345.6*************') test(12345.6, "0>20", '000000000000012345.6') test(12345.6, "1>20", '111111111111112345.6') test(12345.6, "*>20", '*************12345.6') test(12345.6, "0=20", '000000000000012345.6') test(12345.6, "1=20", '111111111111112345.6') test(12345.6, "*=20", '*************12345.6') def test_format_spec_errors(self): # int, float, and string all share the same format spec # mini-language parser. # Check that we can't ask for too many digits. This is # probably a CPython specific test. It tries to put the width # into a C long. self.assertRaises(ValueError, format, 0, '1'*10000 + 'd') # Similar with the precision. self.assertRaises(ValueError, format, 0, '.' + '1'*10000 + 'd') # And may as well test both. self.assertRaises(ValueError, format, 0, '1'*1000 + '.' + '1'*10000 + 'd') # Make sure commas aren't allowed with various type codes for code in 'xXobns': self.assertRaises(ValueError, format, 0, ',' + code) def test_internal_sizes(self): self.assertGreater(object.__basicsize__, 0) self.assertGreater(tuple.__itemsize__, 0) def test_main(): with check_py3k_warnings( ("buffer.. not supported", DeprecationWarning), ("classic long division", DeprecationWarning)): run_unittest(TypesTests) if __name__ == '__main__': test_main()
gpl-2.0
pombreda/pyfilesystem
fs/tests/zipfs_binary_test.py
14
1089
""" Test case for ZipFS binary file reading/writing Passes ok on Linux, fails on Windows (tested: Win7, 64-bit): AssertionError: ' \r\n' != ' \n' """ import unittest from fs.zipfs import ZipFS import os from six import b class ZipFsBinaryWriteRead(unittest.TestCase): test_content = b(chr(32) + chr(10)) def setUp(self): self.z = ZipFS('test.zip', 'w') def tearDown(self): try: os.remove('test.zip') except: pass def test_binary_write_read(self): # GIVEN zipfs z = self.z # WHEN binary data is written to a test file in zipfs f = z.open('test.data', 'wb') f.write(self.test_content) f.close() z.close() # THEN the same binary data is retrieved when opened again z = ZipFS('test.zip', 'r') f = z.open('test.data', 'rb') content = f.read() f.close() z.close() self.assertEqual(content, self.test_content) if __name__ == '__main__': unittest.main()
bsd-3-clause
nwjs/blink
Tools/Scripts/webkitpy/layout_tests/models/testharness_results.py
28
2255
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Utility module for testharness.""" # const definitions TESTHARNESSREPORT_HEADER = 'This is a testharness.js-based test.' TESTHARNESSREPORT_FOOTER = 'Harness: the test ran to completion.' def is_testharness_output(content_text): """ Returns whether the content_text in parameter is a testharness output. """ # Leading and trailing white spaces are accepted. lines = content_text.strip().splitlines() lines = [line.strip() for line in lines] # A testharness output is defined as containing the header and the footer. found_header = False found_footer = False for line in lines: if line == TESTHARNESSREPORT_HEADER: found_header = True elif line == TESTHARNESSREPORT_FOOTER: found_footer = True return found_header and found_footer def is_testharness_output_passing(content_text): """ Returns whether the content_text in parameter is a passing testharness output. Note: It is expected that the |content_text| is a testharness output. """ # Leading and trailing white spaces are accepted. lines = content_text.strip().splitlines() lines = [line.strip() for line in lines] # The check is very conservative and rejects any unexpected content in the output. for line in lines: # There should be no empty lines. if len(line) == 0: return False # Those lines are expected to be exactly equivalent. if line == TESTHARNESSREPORT_HEADER or \ line == TESTHARNESSREPORT_FOOTER: continue # Those are expected passing output. if line.startswith('CONSOLE') or \ line.startswith('PASS'): continue # Those are expected failing output. if line.startswith('FAIL') or \ line.startswith('TIMEOUT') or \ line.startswith('NOTRUN') or \ line.startswith('Harness Error. harness_status = '): return False # Unexpected output should be considered as a failure. return False return True
bsd-3-clause
jamesylgan/szechuantech
python-scripts/cryptography/hazmat/primitives/ciphers/modes.py
3
6859
# This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. from __future__ import absolute_import, division, print_function import abc import six from cryptography import utils @six.add_metaclass(abc.ABCMeta) class Mode(object): @abc.abstractproperty def name(self): """ A string naming this mode (e.g. "ECB", "CBC"). """ @abc.abstractmethod def validate_for_algorithm(self, algorithm): """ Checks that all the necessary invariants of this (mode, algorithm) combination are met. """ @six.add_metaclass(abc.ABCMeta) class ModeWithInitializationVector(object): @abc.abstractproperty def initialization_vector(self): """ The value of the initialization vector for this mode as bytes. """ @six.add_metaclass(abc.ABCMeta) class ModeWithTweak(object): @abc.abstractproperty def tweak(self): """ The value of the tweak for this mode as bytes. """ @six.add_metaclass(abc.ABCMeta) class ModeWithNonce(object): @abc.abstractproperty def nonce(self): """ The value of the nonce for this mode as bytes. """ @six.add_metaclass(abc.ABCMeta) class ModeWithAuthenticationTag(object): @abc.abstractproperty def tag(self): """ The value of the tag supplied to the constructor of this mode. """ def _check_aes_key_length(self, algorithm): if algorithm.key_size > 256 and algorithm.name == "AES": raise ValueError( "Only 128, 192, and 256 bit keys are allowed for this AES mode" ) def _check_iv_length(self, algorithm): if len(self.initialization_vector) * 8 != algorithm.block_size: raise ValueError("Invalid IV size ({0}) for {1}.".format( len(self.initialization_vector), self.name )) def _check_iv_and_key_length(self, algorithm): _check_aes_key_length(self, algorithm) _check_iv_length(self, algorithm) @utils.register_interface(Mode) @utils.register_interface(ModeWithInitializationVector) class CBC(object): name = "CBC" def __init__(self, initialization_vector): if not isinstance(initialization_vector, bytes): raise TypeError("initialization_vector must be bytes") self._initialization_vector = initialization_vector initialization_vector = utils.read_only_property("_initialization_vector") validate_for_algorithm = _check_iv_and_key_length @utils.register_interface(Mode) @utils.register_interface(ModeWithTweak) class XTS(object): name = "XTS" def __init__(self, tweak): if not isinstance(tweak, bytes): raise TypeError("tweak must be bytes") if len(tweak) != 16: raise ValueError("tweak must be 128-bits (16 bytes)") self._tweak = tweak tweak = utils.read_only_property("_tweak") def validate_for_algorithm(self, algorithm): if algorithm.key_size not in (256, 512): raise ValueError( "The XTS specification requires a 256-bit key for AES-128-XTS" " and 512-bit key for AES-256-XTS" ) @utils.register_interface(Mode) class ECB(object): name = "ECB" validate_for_algorithm = _check_aes_key_length @utils.register_interface(Mode) @utils.register_interface(ModeWithInitializationVector) class OFB(object): name = "OFB" def __init__(self, initialization_vector): if not isinstance(initialization_vector, bytes): raise TypeError("initialization_vector must be bytes") self._initialization_vector = initialization_vector initialization_vector = utils.read_only_property("_initialization_vector") validate_for_algorithm = _check_iv_and_key_length @utils.register_interface(Mode) @utils.register_interface(ModeWithInitializationVector) class CFB(object): name = "CFB" def __init__(self, initialization_vector): if not isinstance(initialization_vector, bytes): raise TypeError("initialization_vector must be bytes") self._initialization_vector = initialization_vector initialization_vector = utils.read_only_property("_initialization_vector") validate_for_algorithm = _check_iv_and_key_length @utils.register_interface(Mode) @utils.register_interface(ModeWithInitializationVector) class CFB8(object): name = "CFB8" def __init__(self, initialization_vector): if not isinstance(initialization_vector, bytes): raise TypeError("initialization_vector must be bytes") self._initialization_vector = initialization_vector initialization_vector = utils.read_only_property("_initialization_vector") validate_for_algorithm = _check_iv_and_key_length @utils.register_interface(Mode) @utils.register_interface(ModeWithNonce) class CTR(object): name = "CTR" def __init__(self, nonce): if not isinstance(nonce, bytes): raise TypeError("nonce must be bytes") self._nonce = nonce nonce = utils.read_only_property("_nonce") def validate_for_algorithm(self, algorithm): _check_aes_key_length(self, algorithm) if len(self.nonce) * 8 != algorithm.block_size: raise ValueError("Invalid nonce size ({0}) for {1}.".format( len(self.nonce), self.name )) @utils.register_interface(Mode) @utils.register_interface(ModeWithInitializationVector) @utils.register_interface(ModeWithAuthenticationTag) class GCM(object): name = "GCM" _MAX_ENCRYPTED_BYTES = (2 ** 39 - 256) // 8 _MAX_AAD_BYTES = (2 ** 64) // 8 def __init__(self, initialization_vector, tag=None, min_tag_length=16): # len(initialization_vector) must in [1, 2 ** 64), but it's impossible # to actually construct a bytes object that large, so we don't check # for it if not isinstance(initialization_vector, bytes): raise TypeError("initialization_vector must be bytes") self._initialization_vector = initialization_vector if tag is not None: if not isinstance(tag, bytes): raise TypeError("tag must be bytes or None") if min_tag_length < 4: raise ValueError("min_tag_length must be >= 4") if len(tag) < min_tag_length: raise ValueError( "Authentication tag must be {0} bytes or longer.".format( min_tag_length) ) self._tag = tag tag = utils.read_only_property("_tag") initialization_vector = utils.read_only_property("_initialization_vector") def validate_for_algorithm(self, algorithm): _check_aes_key_length(self, algorithm)
mit
sourcemash/Sourcemash
tests/pages/base.py
1
2282
# Adapted from Smohapatra/Sailthru-magento from itertools import count import datetime import pytz import random import os class BasePage(object): """This class will be inherited by all page objects. Provides common functions to all page objects and global navigation methods.""" def __init__(self, browser): self.browser = browser prior_page = None counter = count(10) @classmethod def set_absolute_file_path(cls, *path): """Sets the path based on the location of the file.""" return os.path.join(os.path.abspath(os.path.dirname(__file__)), *path) @classmethod def format_locator(cls, locator_tuple, string): """Formats the locator in a locator tuple""" by, locator, description = locator_tuple return by, locator.format(string), description @classmethod def update_prior_page(cls, page): cls.prior_page = page @classmethod def get_next_integer(cls): """Returns the next integer from an infinite counter as a string. Starts at 10.""" return str(next(cls.counter)) @classmethod def get_random_integer(cls, digits = 5): """Returns a random integer, default is a 5 digit number.""" return int(random.random * (10 ** digits)) @classmethod def get_todays_date(cls): return datetime.date.today().strftime("%m/%d/%Y") @classmethod def get_timestamp(cls): """Returns current timestamp including fraction of a second (%f)""" return datetime.datetime.now().strftime("%Y%m%d%H%M%f") @classmethod def get_current_datetime(cls): fmt = '%Y%m%d%H%M%S' d = datetime.datetime.now(pytz.timezone("America/New_York")) d_string = d.strftime(fmt) d2 = datetime.datetime.strptime(d_string, fmt) return d2.strftime(fmt) # run scrit #$("#f_source_list option").attr('selected', null) #$("#f_source_list option[value='10 users']").attr('selected', 'selected'); def set_option_for_javascript_dropdown(self, locator, option): self.browser.execute_script("$('{0} option').attr('selected', null);".format(locator)) self.browser.execute_script("$('{0} option[value=\"{1}\"]').attr('selected', 'selected')".format(locator, option))
gpl-2.0
tomquirk/taxman
taxman/audit.py
1
9612
""" ================= CSS Audit Author: Tom Quirk ================= """ from bs4 import BeautifulSoup as Bs import os class Colour: """ Class for holding ANSI print colours """ HEADER = '\033[95m' OKBLUE = '\033[94m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' class Audit(object): def __init__(self, basedir): self._HTML_file_styles = {} # by file self._all_styles = {} # by style self._css_file_styles = {} # by file self._css_all_styles = {} # by style self._BASEDIR = basedir if basedir[-1] == '/': self._BASEDIR = basedir[:-1] def snoopHTML(self, fpath): """ Generates data structure for given file, describing it's HTML elements that have and associated style. NOTE: Line numbers are sometimes inaccurate. :param fpath: str :return: """ self._HTML_file_styles[fpath] = [] file = open(fpath).read() file_lines = file.split('\n') soup = Bs(file, 'html.parser') tags = soup.find_all() for tag in tags: styles = {'element': '', 'class': [], 'id': [], 'line_no': 0, 'tag': ''} if tag.has_attr('class'): _class = tag['class'] styles['class'].append(_class) elif tag.has_attr('id'): _id = tag['id'] styles['id'].append(_id) # get open tag of element styles['element'] = str(tag).strip().split('\n')[0] # get tag styles['tag'] = tag.name # if has style if len(styles['class']) != 0 or len(styles['id']) != 0: self._HTML_file_styles[fpath].append(styles) # clean up classes clean_classes = [] for cgroup in styles['class']: for cname in cgroup: clean_classes.append('.' + cname) # clean up ids clean_ids = [] for iname in styles['id']: clean_ids.append('#' + iname) styles['class'] = clean_classes styles['id'] = clean_ids # get line number in file for line in enumerate(file_lines): line_no = line[0] + 1 rline = str(line[1].strip()) opTag = '<' + styles['tag'] # check if matched tag on class if len(styles['class']) != 0: if opTag in rline and styles['class'][0][1:] in rline: styles['line_no'] = line_no # check if matched tag on id elif len(styles['id']) != 0: if opTag in rline and styles['id'][0][1:] in rline: styles['line_no'] = line_no def snoopHTML_styles(self, fpath): """ Generates data structure organised by class name; each contains filename, line number, element and tag. :return: """ for tag in self._HTML_file_styles[fpath]: for _class in tag['class']: struct = {'file': fpath, 'line_no': tag['line_no'], 'tag': tag['tag'], 'element': tag['element']} # create new style entry if _class not in self._all_styles: self._all_styles[_class] = [struct] # add to existing style entry else: self._all_styles[_class].append(struct) for _id in tag['id']: struct = {'file': fpath, 'line_no': tag['line_no'], 'tag': tag['tag'], 'element': tag['element']} # create new style entry if _id not in self._all_styles: self._all_styles[_id] = [struct] # add to existing style entry else: self._all_styles[_id].append(struct) def snoopCSS(self, fpath): """ Generates data structure containing style file name, along with its associated styles :param fpath: str :return: """ self._HTML_file_styles[fpath] = [] file = open(fpath).read() file_lines = file.split('\n') class_id = ['.', '&'] # class identifiers for stylus id_id = ['#'] # id identifiers for stylus struct = {'class': [], 'id': []} for line in file_lines: line = line.strip() if len(line) > 0 and line[0] in class_id \ and line not in struct['class']: struct['class'].append(line) elif len(line) > 0 and line[0] in id_id \ and line not in struct['id']: struct['id'].append(line) self._css_file_styles[fpath] = struct def snoopCSS_styles(self, fname): """ Generates key-value pair structure; key = style, value = fname :param fname: :return: """ for _class in self._css_file_styles[fname]['class']: if _class not in self._css_all_styles: self._css_all_styles[_class] = fname for _id in self._css_file_styles[fname]['id']: if _id not in self._css_all_styles: self._css_all_styles[_id] = fname def diffHTML(self): """ Returns list of dictionaries containing file name, line number and element of elements that use undefined style definitions :return: """ diff = [] for style in self._all_styles: if style not in self._css_all_styles and '&' + style not in self._css_all_styles and '>' + style not in self._css_all_styles: obj = {'style': style, 'location': self._all_styles[style]} diff.append(obj) return diff def diffCSS(self): """ Returns style definitions, with file paths, that are not used in HTML :return: """ diff = [] for style in self._css_all_styles: style_and = (style.replace('&.', '.')).replace('>.', '.') if style not in self._all_styles and style_and not in self._all_styles: obj = {'style': style, 'location': self._css_all_styles[style]} diff.append(obj) return diff def get_HTML_file_styles(self): """ Returns Html file styles struct. :return: dict """ return self._HTML_file_styles def get_all_styles(self): """ Returns all styles. :return: dict """ return self._all_styles def crawl(self, cwd): """ Crawls through base directory to generate structs for styles and HTML files. Style file extension defaults to '.styl' :return: """ cwd += '/' os.chdir(cwd) # change current working dir to 'cwd' arg src_files = os.listdir(cwd) src_folders = [] # ignore hidden items for item in src_files: if item[0] == '.' or item == 'env': # env, for dev mode src_files.remove(item) for item in src_files: item_path = cwd + item if os.path.isfile(item_path): if item_path.endswith('.html'): self.snoopHTML(item_path) self.snoopHTML_styles(item_path) elif item_path.endswith('.styl'): self.snoopCSS(item_path) self.snoopCSS_styles(item_path) else: src_folders.append(cwd + item) # hardcore recursion for folder in src_folders: self.crawl(folder) def format_results(self, unused_css, undefined_css): """ Prints prettified audit results :return: """ overview = '\n\n' + '###############' + Colour.WARNING + ' TAXMAN - CSS AUDIT ' + Colour.ENDC + "###############\n\n" overview += '%d unused CSS styles\n' % len(unused_css) overview += '%d undefined CSS styles\n' % len(undefined_css) unused_css_formmated = Colour.WARNING + '\nUNUSED CSS STYLES: \n' undefined_css_formatted = Colour.WARNING + '\nUNDEFINED CSS STYLES: \n' for style in unused_css: x = Colour.OKBLUE + '\n' + style['style'] + '\n\t' + Colour.ENDC x += Colour.BOLD + 'Filepath: ' + Colour.ENDC + style[ 'location'] + '\n' unused_css_formmated += x for style in undefined_css: x = Colour.OKBLUE + '\n' + style['style'] + '\n' + Colour.ENDC for location in style['location']: x += '\t' + Colour.BOLD + 'Filepath: ' + Colour.ENDC + location[ 'file'] + '\n\t' x += Colour.BOLD + 'Element: ' + Colour.ENDC + location[ 'element'] + '\n\t' x += Colour.BOLD + 'Line Number: ' + Colour.ENDC + str( location['line_no']) + '\n\n' undefined_css_formatted += x print(overview) print(unused_css_formmated) print(undefined_css_formatted) def run(self): """ Initial Runner to populate structs :return: """ self.crawl(self._BASEDIR) unused_css = self.diffCSS() undefined_css = self.diffHTML() self.format_results(unused_css, undefined_css)
mit
pLeBlanc93/ArcREST
samples/Workforce/load_assignments_lookup.py
3
8609
""" This sample shows to load assignments from a csv and using the workforce project to lookup the workers ID and the Dispatchers ID. A assignment area layer is required to provide the XY for the assignment location. Python 2.x/3.x ArcREST 3.5 """ from __future__ import print_function import arcrest from arcrest.common.general import Feature from arcresthelper import featureservicetools from arcresthelper import common from arcrest.packages import six import csv from datetime import datetime from arcrest.agol import FeatureLayer def UnicodeDictReader(utf8_data, **kwargs): if six.PY3 == True: csv_reader = csv.DictReader(utf8_data, **kwargs) for row in csv_reader: yield {key: value for key, value in row.items()} else: csv_reader = csv.DictReader(utf8_data, **kwargs) for row in csv_reader: yield {unicode(key, 'utf-8-sig'): unicode(value, 'utf-8-sig') for key, value in row.items()} def trace(): """ trace finds the line, the filename and error message and returns it to the user """ import traceback, inspect, sys tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] filename = inspect.getfile(inspect.currentframe()) # script name + line number line = tbinfo.split(", ")[1] # Get Python syntax error # synerror = traceback.format_exc().splitlines()[-1] return line, filename, synerror def main(): try: proxy_port = None proxy_url = None securityinfo = {} securityinfo['security_type'] = 'Portal'#LDAP, NTLM, OAuth, Portal, PKI securityinfo['username'] = ""#<UserName> securityinfo['password'] = ""#<Password> securityinfo['org_url'] = "http://www.arcgis.com" securityinfo['proxy_url'] = proxy_url securityinfo['proxy_port'] = proxy_port securityinfo['referer_url'] = None securityinfo['token_url'] = None securityinfo['certificatefile'] = None securityinfo['keyfile'] = None securityinfo['client_id'] = None securityinfo['secret_id'] = None workforceProjectID = '' #Workforce project number assignmentAreasID = '' #ID of service to get centroids from assignmentAreaLayerName = ''#layer in servuce assignmentAreaNameField = ''#field with name of id area csvPath = r".\dataToLookup.csv"#<Path with data> workerCol = 'worker' areaCol = 'area' descriptionCol = "description" notesCol = "notes" supervisorCol = "supervisor" assignmentType = 2 status = 1 workerNameToID = {} dispatcherNameToID = {} areaNameToID = {} fst = featureservicetools.featureservicetools(securityinfo) if fst.valid == False: print (fst.message) else: portalAdmin = arcrest.manageorg.Administration(securityHandler=fst.securityhandler) #Get the assignment areas fs = fst.GetFeatureService(itemId=assignmentAreasID,returnURLOnly=False) if not fs is None: fs_url = fst.GetLayerFromFeatureService(fs=fs,layerName=assignmentAreaLayerName,returnURLOnly=True) if not fs_url is None: fl = FeatureLayer( url=fs_url, securityHandler=fst.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) areaResults = fl.query(**{'where':"1=1",'outSR':'102100','out_fields':assignmentAreaNameField,'returnGeometry':False,'returnCentroid':True}) for area in areaResults: arDict = area.asDictionary areaNameToID[arDict['attributes'][assignmentAreaNameField]] = arDict['centroid'] #Get the workers item = portalAdmin.content.getItem(itemId=workforceProjectID) itemData = item.itemData() if 'workers' in itemData: fl = FeatureLayer( url=itemData['workers']['url'], securityHandler=fst.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) workersResults = fl.query(where="1=1",out_fields='OBJECTID, NAME',returnGeometry=False) for worker in workersResults: workerNameToID[worker.get_value('name')] = worker.get_value('OBJECTID') if 'dispatchers' in itemData: fl = FeatureLayer( url=itemData['dispatchers']['url'], securityHandler=fst.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) dispatcherResults = fl.query(where="1=1",out_fields='OBJECTID, NAME',returnGeometry=False) for dispatcher in dispatcherResults: dispatcherNameToID[dispatcher.get_value('name')] = dispatcher.get_value('OBJECTID') if 'assignments' in itemData: features = [] fl = FeatureLayer( url=itemData['assignments']['url'], securityHandler=fst.securityhandler, proxy_port=proxy_port, proxy_url=proxy_url, initialize=True) print(fl.deleteFeatures(where="1=1")) with open(csvPath) as csvfile: reader = UnicodeDictReader(csvfile) for row in reader: json_string={} json_string['geometry'] = {} centroidInfo = areaNameToID[row[areaCol].strip()] json_string['geometry']['x'] = centroidInfo['x'] json_string['geometry']['y'] = centroidInfo['y'] json_string['attributes'] ={} json_string['attributes']['workerId'] = workerNameToID[row[workerCol].strip()] json_string['attributes']['description'] = row[descriptionCol] json_string['attributes']['notes'] = row[notesCol] json_string['attributes']['assignmentType'] = assignmentType json_string['attributes']['status'] = status json_string['attributes']['dispatcherId'] = dispatcherNameToID[row[supervisorCol].strip()] features.append(Feature(json_string=json_string)) results = fl.addFeature(features=features) if 'error' in results: print ("Error in response from server: %s" % results['error']) else: if results['addResults'] is not None: featSucces = 0 for result in results['addResults']: if 'success' in result: if result['success'] == False: if 'error' in result: print ("Error info: %s" % (result['error'])) else: featSucces = featSucces + 1 print ("%s features added to %s" % (featSucces,fl.name)) else: print ("0 features added to %s /n result info %s" % (fl.name,str(results))) except (common.ArcRestHelperError) as e: print ("error in function: %s" % e[0]['function']) print ("error on line: %s" % e[0]['line']) print ("error in file name: %s" % e[0]['filename']) print ("with error message: %s" % e[0]['synerror']) if 'arcpyError' in e[0]: print ("with arcpy message: %s" % e[0]['arcpyError']) except: line, filename, synerror = trace() print ("error on line: %s" % line) print ("error in file name: %s" % filename) print ("with error message: %s" % synerror) if __name__ == "__main__": main()
apache-2.0
benschmaus/catapult
telemetry/telemetry/core/memory_cache_http_server.py
7
9334
# Copyright 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import BaseHTTPServer from collections import namedtuple import errno import gzip import mimetypes import os import SimpleHTTPServer import socket import SocketServer import StringIO import sys import urlparse from telemetry.core import local_server ByteRange = namedtuple('ByteRange', ['from_byte', 'to_byte']) ResourceAndRange = namedtuple('ResourceAndRange', ['resource', 'byte_range']) class MemoryCacheHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): protocol_version = 'HTTP/1.1' # override BaseHTTPServer setting wbufsize = -1 # override StreamRequestHandler (a base class) setting def handle(self): try: BaseHTTPServer.BaseHTTPRequestHandler.handle(self) except socket.error as e: # Connection reset errors happen all the time due to the browser closing # without terminating the connection properly. They can be safely # ignored. if e[0] != errno.ECONNRESET: raise def do_GET(self): """Serve a GET request.""" resource_range = self.SendHead() if not resource_range or not resource_range.resource: return response = resource_range.resource['response'] if not resource_range.byte_range: self.wfile.write(response) return start_index = resource_range.byte_range.from_byte end_index = resource_range.byte_range.to_byte self.wfile.write(response[start_index:end_index + 1]) def do_HEAD(self): """Serve a HEAD request.""" self.SendHead() def log_error(self, fmt, *args): pass def log_request(self, code='-', size='-'): # Don't spam the console unless it is important. pass def SendHead(self): path = os.path.realpath(self.translate_path(self.path)) if path not in self.server.resource_map: self.send_error(404, 'File not found') return None resource = self.server.resource_map[path] total_num_of_bytes = resource['content-length'] byte_range = self.GetByteRange(total_num_of_bytes) if byte_range: # request specified a range, so set response code to 206. self.send_response(206) self.send_header('Content-Range', 'bytes %d-%d/%d' % (byte_range.from_byte, byte_range.to_byte, total_num_of_bytes)) total_num_of_bytes = byte_range.to_byte - byte_range.from_byte + 1 else: self.send_response(200) self.send_header('Content-Length', str(total_num_of_bytes)) self.send_header('Content-Type', resource['content-type']) self.send_header('Last-Modified', self.date_time_string(resource['last-modified'])) if resource['zipped']: self.send_header('Content-Encoding', 'gzip') self.end_headers() return ResourceAndRange(resource, byte_range) def GetByteRange(self, total_num_of_bytes): """Parse the header and get the range values specified. Args: total_num_of_bytes: Total # of bytes in requested resource, used to calculate upper range limit. Returns: A ByteRange namedtuple object with the requested byte-range values. If no Range is explicitly requested or there is a failure parsing, return None. If range specified is in the format "N-", return N-END. Refer to http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html for details. If upper range limit is greater than total # of bytes, return upper index. """ range_header = self.headers.getheader('Range') if range_header is None: return None if not range_header.startswith('bytes='): return None # The range header is expected to be a string in this format: # bytes=0-1 # Get the upper and lower limits of the specified byte-range. # We've already confirmed that range_header starts with 'bytes='. byte_range_values = range_header[len('bytes='):].split('-') from_byte = 0 to_byte = 0 if len(byte_range_values) == 2: # If to_range is not defined return all bytes starting from from_byte. to_byte = (int(byte_range_values[1]) if byte_range_values[1] else total_num_of_bytes - 1) # If from_range is not defined return last 'to_byte' bytes. from_byte = (int(byte_range_values[0]) if byte_range_values[0] else total_num_of_bytes - to_byte) else: return None # Do some validation. if from_byte < 0: return None # Make to_byte the end byte by default in edge cases. if to_byte < from_byte or to_byte >= total_num_of_bytes: to_byte = total_num_of_bytes - 1 return ByteRange(from_byte, to_byte) class _MemoryCacheHTTPServerImpl(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer): # Increase the request queue size. The default value, 5, is set in # SocketServer.TCPServer (the parent of BaseHTTPServer.HTTPServer). # Since we're intercepting many domains through this single server, # it is quite possible to get more than 5 concurrent requests. request_queue_size = 128 # Don't prevent python from exiting when there is thread activity. daemon_threads = True def __init__(self, host_port, handler, paths): BaseHTTPServer.HTTPServer.__init__(self, host_port, handler) self.resource_map = {} for path in paths: if os.path.isdir(path): self.AddDirectoryToResourceMap(path) else: self.AddFileToResourceMap(path) def AddDirectoryToResourceMap(self, directory_path): """Loads all files in directory_path into the in-memory resource map.""" for root, dirs, files in os.walk(directory_path): # Skip hidden files and folders (like .svn and .git). files = [f for f in files if f[0] != '.'] dirs[:] = [d for d in dirs if d[0] != '.'] for f in files: file_path = os.path.join(root, f) if not os.path.exists(file_path): # Allow for '.#' files continue self.AddFileToResourceMap(file_path) def AddFileToResourceMap(self, file_path): """Loads file_path into the in-memory resource map.""" file_path = os.path.realpath(file_path) if file_path in self.resource_map: return with open(file_path, 'rb') as fd: response = fd.read() fs = os.fstat(fd.fileno()) content_type = mimetypes.guess_type(file_path)[0] zipped = False if content_type in ['text/html', 'text/css', 'application/javascript']: zipped = True sio = StringIO.StringIO() gzf = gzip.GzipFile(fileobj=sio, compresslevel=9, mode='wb') gzf.write(response) gzf.close() response = sio.getvalue() sio.close() self.resource_map[file_path] = { 'content-type': content_type, 'content-length': len(response), 'last-modified': fs.st_mtime, 'response': response, 'zipped': zipped } index = 'index.html' if os.path.basename(file_path) == index: dir_path = os.path.dirname(file_path) self.resource_map[dir_path] = self.resource_map[file_path] class MemoryCacheHTTPServerBackend(local_server.LocalServerBackend): def __init__(self): super(MemoryCacheHTTPServerBackend, self).__init__() self._httpd = None def StartAndGetNamedPorts(self, args): base_dir = args['base_dir'] os.chdir(base_dir) paths = args['paths'] for path in paths: if not os.path.realpath(path).startswith(os.path.realpath(os.getcwd())): print >> sys.stderr, '"%s" is not under the cwd.' % path sys.exit(1) server_address = (args['host'], args['port']) MemoryCacheHTTPRequestHandler.protocol_version = 'HTTP/1.1' self._httpd = _MemoryCacheHTTPServerImpl( server_address, MemoryCacheHTTPRequestHandler, paths) return [local_server.NamedPort('http', self._httpd.server_address[1])] def ServeForever(self): return self._httpd.serve_forever() class MemoryCacheHTTPServer(local_server.LocalServer): def __init__(self, paths): super(MemoryCacheHTTPServer, self).__init__(MemoryCacheHTTPServerBackend) self._base_dir = None for path in paths: assert os.path.exists(path), '%s does not exist.' % path paths = list(paths) self._paths = paths self._paths_as_set = set(map(os.path.realpath, paths)) common_prefix = os.path.commonprefix(paths) if os.path.isdir(common_prefix): self._base_dir = common_prefix else: self._base_dir = os.path.dirname(common_prefix) def GetBackendStartupArgs(self): return {'base_dir': self._base_dir, 'paths': self._paths, 'host': self.host_ip, 'port': 0} @property def paths(self): return self._paths_as_set @property def url(self): return 'http://127.0.0.1:%s' % self.port def UrlOf(self, path): if os.path.isabs(path): relative_path = os.path.relpath(path, self._base_dir) else: relative_path = path # Preserve trailing slash or backslash. # It doesn't matter in a file path, but it does matter in a URL. if path.endswith(os.sep) or (os.altsep and path.endswith(os.altsep)): relative_path += '/' return urlparse.urljoin(self.url, relative_path.replace(os.sep, '/'))
bsd-3-clause
orestkreminskyi/taf
taf/testlib/snmpcmd.py
2
23767
# Copyright (c) 2011 - 2017, Intel Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """``snmpcmd.py`` `Module for SNMP specific functionality` """ import re import time from pyasn1.type import univ from pysnmp.smi import builder, view from pysnmp.entity.rfc3413.oneliner import cmdgen from . import helpers from . import loggers class SNMPCmd(object): """`SNMP specific functionality class. Args: config(list[dict]): environment config env_switches(dict): switches dictionary in format {switch_id: switch_object} mib_dir(str): MIB module name """ suite_logger = loggers.ClassLogger() def __init__(self, config, env_switches, mib_dir): """Initialize SNMPCmd class Args: config(list[dict]): environment config env_switches(dict): switches dictionary in format {switch_id: switch_object} mib_dir(str): MIB module name """ self.switches = {} # get community info from config file: for conf in config: if 'get_community' in conf: self.get_community = conf['get_community'] self.mib_dir = mib_dir if 'set_community' in conf: self.set_community = conf['set_community'] # get switches ip addresses and ports for switch_id in list(env_switches.keys()): sw_ipaddr = env_switches[switch_id].ipaddr if 'sshtun_port' in env_switches[switch_id].config: sw_port = 161 if env_switches[switch_id].config['sshtun_port'] != 22: sw_ipaddr = "10.10.{0}.{1}".format(*str(env_switches[switch_id].config['sshtun_port']).split('0')) else: sw_port = int(env_switches[switch_id].port) - 8080 + 4700 self.switches.update({switch_id: {'host': sw_ipaddr, 'port': sw_port}}) self.mib_builder = builder.MibBuilder() mib_path = self.mib_builder.getMibPath() + (mib_dir, ) self.mib_builder.setMibPath(*mib_path) # self.suite_logger.debug("mib_builder __modPathsSeen: %s" % (self.mib_builder._MibBuilder__modPathsSeen, )) # self.suite_logger.debug("mib_builder __modSeen: %s" % (self.mib_builder._MibBuilder__modSeen, )) self.mibViewController = view.MibViewController(self.mib_builder) # loading SNMP types as instances self.suite_logger.debug("Loading basic types from standard MIB modules") self.OctetString, Integer = self.mib_builder.importSymbols('ASN1', 'OctetString', 'Integer')[0:2] Counter32, Unsigned32, Counter64 = self.mib_builder.importSymbols('SNMPv2-SMI', 'Counter32', 'Unsigned32', 'Counter64')[0:3] InetAddressType, self.InetAddress, InetAddressIPv4, InetAddressIPv6, InetAddressIPv4z, InetAddressIPv6z, InetAddressDNS = \ self.mib_builder.importSymbols('INET-ADDRESS-MIB', 'InetAddressType', 'InetAddress', 'InetAddressIPv4', 'InetAddressIPv6', 'InetAddressIPv4z', 'InetAddressIPv6z', 'InetAddressDNS')[0:7] self.__integer = Integer() self.__counter32 = Counter32() self.__unsigned32 = Unsigned32() self.__counter64 = Counter64() self.__octetString = self.OctetString() # creating InetAddress types dict with keys corresponded to InetAddressType named values self.InetAddresses = {'ipv4': InetAddressIPv4(), 'ipv6': InetAddressIPv6(), 'ipv4z': InetAddressIPv4z(), 'ipv6z': InetAddressIPv6z(), 'dns': InetAddressDNS()} def _find_and_load_mib(self, mibs_dict, sym_name): """Find MIB name and load it to MibBuilder. Args: mibs_dict(dict): dictionary that contains MIBs. sym_name(str): MIB symbol name Returns: str: Name of MIB in which symbol name is. 'None' if MIB's name wasn't found. Examples:: self._find_and_load_mib(helpers.MIBS_DICT, 'onsSwitchppControlRouteInterfaceMtu') """ # searching MIB name for specified symbol name in specified MIBs dictionary mod_name = next((name for name, values in mibs_dict.items() if sym_name in values), None) if not mod_name: # symbol name wasn't found in MIBs in MIBs dictionary self.suite_logger.debug("MIB name for << %s >> wasn't found" % (sym_name,)) else: self.suite_logger.debug("MIB name for << %s >> found: << %s >>" % (sym_name, mod_name,)) if mod_name not in self.mib_builder.mibSymbols: # loading found MIB try: self.mib_builder.loadModules(mod_name) self.suite_logger.debug("MIB << %s >> successful loaded" % (mod_name,)) except Exception: self.suite_logger.debug("MIB << %s >> is not loaded" % mod_name) # self.suite_logger.debug("mib_builder __modPathsSeen: %s" % (self.mib_builder._MibBuilder__modPathsSeen, )) # self.suite_logger.debug("mib_builder __modSeen: %s" % (self.mib_builder._MibBuilder__modSeen, )) else: self.suite_logger.debug("MIB << %s >> is already loaded" % (mod_name,)) return mod_name def _get_oid(self, mod_name, sym_name): """Getting values from source by SNMP. Args: mod_name(dict): MIB module name. sym_name(str): MIB symbol name Returns: list: List of MIBs oids. Examples:: self._get_oid('ons_stat', 'onsSnmpAgentStatisticsPortId', 'tests/ui/mibs/') """ mib_node, = self.mib_builder.importSymbols(mod_name, sym_name)[0:1] listed_oid = list(mib_node.getName()) self.suite_logger.debug("Transleted OID: %s" % (listed_oid, )) return listed_oid def _get_previous(self, mod_name, sym_name): """Getting OID and NodeName of previous SNMP element of sequence. Args: mod_name(dict): MIB module name. sym_name(str): MIB symbol name Returns: list: List with OID and symbol name of previous element. Examples:: self._get_previous('ONS-SWITCH-MIB', 'onsSwitchppControlBridgeInfoInbandIpNetMaskInetAddress') """ mib_node, = self.mib_builder.importSymbols(mod_name, sym_name)[0:1] type_oid = list(mib_node.getName()) # decreasing last oid member for 1 type_oid[-1] -= 1 # getting information for previous element oid, prev_names, suffix = self.mibViewController.getNodeNameByOid(tuple(type_oid)) return list(oid), prev_names[-1] def _normalize_result(self, mod_name, sym_name, result, to_oid=False): """Normalize SNMP GET result according syntax from MIB. Args: mod_name(dict): MIB module name. sym_name(str): MIB symbol name result(list): List with one SNMP GET result for all types except InetAddress, for InetAddress - list with two elements ['InetAddress', 'InetAddressType']. to_oid(bool): indicator of formatting given result for OID. Returns: str, int: Normalized result according to syntax. Examples:: self._normalize_result('ONS-SWITCH-MIB', 'onsSwitchppControlBridgeInfoInbandIpNetMaskInetAddress', [OctetString(hexValue='ffffff00'), Integer(1)]) """ mib_node, = self.mib_builder.importSymbols(mod_name, sym_name)[0:1] syntax = mib_node.getSyntax() # branch for basic SNMP types if len(result) == 1 and result != 'None': result = result[0] # formatting digital types if result.isSuperTypeOf(self.__integer) or result.isSuperTypeOf(self.__counter32) \ or result.isSuperTypeOf(self.__unsigned32) or result.isSuperTypeOf(self.__counter64): self.suite_logger.debug("DIGITAL result type found.") if to_oid is False: # if DIGITAL result normalizing not for OID subtype_named_values = syntax.subtype().getNamedValues() if len(subtype_named_values.namedValues) > 0: result = subtype_named_values.getName(result) elif hasattr(syntax, 'displayHint') and syntax.displayHint is not None: self.suite_logger.debug("Formatting result according to DISPLAY-HINT: \"%s\"" % syntax.displayHint) try: result = int(syntax.prettyOut(result)) except ValueError: result = syntax.prettyOut(result) else: result = int(result) elif to_oid is True: # normalizing DIGITAL result for OID self.suite_logger.debug("Formatting result to use in OID") result = int(result) # formatting string types elif result.isSuperTypeOf(self.__octetString) or isinstance(result, self.OctetString): self.suite_logger.debug("OCTET STRING result type found.") if not to_oid: # normalizing OCTET STRING result not for OID if hasattr(syntax, 'displayHint') and syntax.displayHint is not None: self.suite_logger.debug("Formatting result according to DISPLAY-HINT: \"%s\"" % syntax.displayHint) result = syntax.prettyOut(result) else: self.suite_logger.debug("No DISPLAY-HINT found.") result = result.prettyPrint() else: # normalizing OCTET STRING result for OID self.suite_logger.debug("Formatting result to use in OID") res_len = "" if syntax.isFixedLength() else str(len(result)) + "." result = res_len + ".".join(str(number) for number in result.asNumbers()) else: self.suite_logger.debug("Unknown result type. Result \"%s\" didn't normalized." % (result, )) # branch for [InetAddress, InetAddressType] result elif len(result) == 2: if isinstance(syntax, self.InetAddress): self.suite_logger.debug("INET ADDRESS result type found.") addr_type_sym_name = self._get_previous(mod_name, sym_name)[1] addr_type = self._normalize_result(mod_name, addr_type_sym_name, [result[1]]) self.suite_logger.debug("Formatting result according to INET ADDRESS TYPE value: \"%s\" = \"%s\"" % (addr_type_sym_name, addr_type)) result = self.InetAddresses[addr_type].prettyOut(result[0]) # normalizing InetAddress result for OID if to_oid is True: self.suite_logger.debug("Formatting result to use in OID") if addr_type == "dns": result = str(len(result)) + "." + ".".join(str(number) for number in result.asNumbers()) else: result = ".".join(re.findall(r"[\w]+", result)) else: self.suite_logger.debug("Unknown result type. Result \"%s\" didn't normalized." % (result[0], )) return result def _snmp_get_call(self, switch_id, arguments, community, version, to_oid=False, poll_timeout=20): """Getting data from source via SNMP. Args: switch_id(int): ID of switch to get SNMP call to. arguments(list): SNMP call (SNMP symbol name, index). Index can have inserted calls. community(str): SNMP community to read. version(str): version of SNMP protocol to use. to_oid(bool): indicator of returned result's OID format poll_timeout(int): timeout to appearing SNMP data. Returns: str, int: Normalized received SNMP data. Examples:: self._snmp_get_call(1, ['onsSwitchppControlBridgeInfoInbandIpNetMaskInetAddress', "1"], "sppCommunity", "v2", False, 20) self._snmp_get_call(1, ['onsSwitchppControlBridgeInfoInbandIpNetMaskInetAddress', ["1.{}.1", ["PortId", "1.2.3"]]], "sppCommunity", "v2", False, 20) """ sym_name = arguments[0] if isinstance(arguments[1], list): # Making calls inserted to index if "{}" in arguments[1][0]: # formation of OID by calling inserted calls and substitution received values to index values_list = [] for call in arguments[1][1:]: values_list.append(self._snmp_get_call(switch_id, call, community, version, to_oid=True)) arguments[1] = arguments[1][0].format(*values_list) elif isinstance(arguments[1][0], str) and len(arguments[1]) == 2: arguments[1] = self._snmp_get_call(switch_id, arguments[1], community, version, to_oid=True) # finding MIB name by symbol parameter's name self.suite_logger.debug("Get parameter: << %s >>" % (sym_name,)) mod_name = self._find_and_load_mib(helpers.MIBS_DICT, sym_name) # Return 'None' if MIB for parameter wasn't found if not mod_name: return 'None' # getting listed_oid(s): listed_oids = [] listed_oid = self._get_oid(mod_name, sym_name) listed_oids.append(listed_oid) # adding oid of previous element (must be InetAddressType) to list if param type is InetAddress mib_node, = self.mib_builder.importSymbols(mod_name, sym_name)[0:1] if isinstance(mib_node.getSyntax(), self.InetAddress): listed_oids.append(self._get_previous(mod_name, sym_name)[0]) if len(arguments) == 2 or len(arguments) == 5: oid_index = [] for oid_element in str(arguments[1]).split('.'): oid_index.append(int(oid_element)) listed_oids = [(oid + oid_index) for oid in listed_oids] else: self.suite_logger.debug("Wrong number of arguments in call: %s" % arguments) return 'None' # performing snmpget procedure: if version == 'v2': ip_addr = self.switches[switch_id]['host'] port = "161" end_time = time.time() + poll_timeout final_res = [univ.Null()] self.suite_logger.debug("Get OID: %s" % (listed_oids[0], )) while True: if time.time() < end_time: # do while at least one result list member is instance of univ.Null class if any(isinstance(result, univ.Null) for result in final_res): error_indication, error_status, error_index, var_binds = cmdgen.CommandGenerator().getCmd( cmdgen.CommunityData('test-agent', community, 1), cmdgen.UdpTransportTarget((ip_addr, port)), *listed_oids) if len(var_binds) > 0: final_res = list(res[1] for res in var_binds) self.suite_logger.debug("Returned SNMP Data:<<< %s >>>" % (final_res, )) else: self.suite_logger.debug("Returned SNMP response:<<< EMPTY >>>") else: self.suite_logger.debug("SNMP Data is not None:<<< %s >>>" % (final_res, )) break else: self.suite_logger.debug("Timeout exceeded and SNMP data is not appeared") break elif version == 'v3': username = arguments[-3] authpass = arguments[-2][1] privpass = arguments[-1][1] auth = arguments[-2][0] priv = arguments[-1][0] authprtcl = {'MD5': cmdgen.usmHMACMD5AuthProtocol, 'SHA': cmdgen.usmHMACSHAAuthProtocol, 'no_auth': cmdgen.usmNoAuthProtocol} privprtcl = {'DES': cmdgen.usmDESPrivProtocol, 'AES': cmdgen.usmAesCfb128Protocol, 'no_priv': cmdgen.usmNoPrivProtocol} error_indication, error_status, error_index, var_binds = cmdgen.CommandGenerator().getCmd( cmdgen.UsmUserData(username, authKey=authpass, privKey=privpass, authProtocol=authprtcl[auth], privProtocol=privprtcl[priv]), cmdgen.UdpTransportTarget((self.switches[switch_id]['host'], self.switches[switch_id]['port'])), *listed_oids) final_res = list(res[1] for res in var_binds) if any(isinstance(result, univ.Null) for result in final_res): self.suite_logger.debug("Result is \'None\':<<< %s >>>" % (final_res[0].prettyPrint())) final_res = 'None' else: final_res = self._normalize_result(mod_name, sym_name, final_res, to_oid) self.suite_logger.debug("Normalized result:<<< %s >>>" % final_res) return final_res def snmp_get(self, elements_list, community, version, poll_timeout=20): """Walking through list of element to get and calling self._snmp_call() method. Args: elements_list(list): List of (SNMP symbol name, index) pairs. Index can have inserted calls. community(str): SNMP community to read. version(str): version of SNMP protocol to use. poll_timeout(int): timeout to appearing SNMP data. Returns: list: List of SNMP-GET command results. Examples:: self._snmp_get([{"1":[["onsSnmpAgentStatisticsPortId", "1"]]}]) self._snmp_get([{"1":[["onsSnmpAgentStatisticsPortId", ["1.{}.3", ["onsSnmpAgentStatisticsPortKey", "2.4.5"]]]]}], "sppCommunity", "v2") """ if not community: community = self.get_community result = [] result_dict = {} result_list = [] for elements_dict in elements_list: for key in list(elements_dict.keys()): if elements_dict[key] != [["readOnly"]]: for arguments in elements_dict[key]: result.append([self._snmp_get_call(int(key), arguments, community, version, False, poll_timeout)]) else: result = [["readOnly"]] result_dict[key] = result result_list.append(result_dict) return result_list def snmp_set(self, elements_list, community, mib_dir=None): """Setting values by SNMP. Args: elements_list(list): List of (SNMP symbol name, index) pairs. Index can have inserted calls. community(str): SNMP community to read. mib_dir(str): MIB module name. Returns: list: List of SNMP-SET command results. Examples:: self._snmp_get(conf[test]['snmp_set']) """ if not mib_dir: mib_dir = self.mib_dir if not community: community = self.set_community result = [] result_dict = {} result_list = [] # try: for elements_dict in elements_list: for key in list(elements_dict.keys()): if elements_dict[key] != [["readOnly"]]: for arguments in elements_dict[key]: sym_name = str(arguments[0]) if len(arguments) == 4: arg_type = 3 arg_value = 2 else: arg_type = 2 arg_value = 1 if arguments[arg_type] == 'INTEGER': try: set_value = univ.Integer(int(arguments[arg_value])) except Exception: result_dict[key] = [["None"]] return result_list.append(result_dict) else: try: set_value = univ.OctetString(arg_value) except Exception: result_dict[key] = [["None"]] return result_list.append(result_dict) # getting mib-number: listed_oid = self._get_oid(mib_dir, sym_name) if len(arguments) == 4: for oid_element in str(arguments[1]).split('.'): listed_oid.append(int(oid_element)) mib_number = tuple(listed_oid) error_indication, error_status, error_index, var_binds = cmdgen.CommandGenerator().setCmd( cmdgen.CommunityData('test-agent', 'private', 1), cmdgen.UdpTransportTarget((self.switches[int(key)]['host'], self.switches[int(key)]['port'])), (mib_number, set_value)) if error_index != 0: final_res = error_status else: final_res = 0 result.append(final_res) else: result = ["readOnly"] result_dict[key] = [result] result_list.append(result_dict) return result_list @staticmethod def snmp_walk(community, host, port, oid): """Perform SNMP walk for submitted oid. Args: community(str): SNMP community to read. host(str): SNMP host. port(int): SNMP host port. oid(str): SNMP OID. Raises: CustomException """ from testlib.custom_exceptions import CustomException cmd_gen = cmdgen.CommandGenerator() error_indication, error_status, error_index, var_binds = cmd_gen.nextCmd(cmdgen.CommunityData('test-agent', community, 1), cmdgen.UdpTransportTarget((host, port)), oid) # Check for errors and print out results if error_indication: raise CustomException(error_indication) else: if error_status: messages = ('%s at %s' % (error_status.prettyPrint(), # pylint: disable=no-member error_index and var_binds[int(error_index) - 1] or '?')) raise CustomException(messages) else: if var_binds: for name, val in var_binds[0]: messages = '%s = %s' % (name.prettyPrint(), val.prettyPrint()) else: messages = 'Empty Replay' return messages
apache-2.0
erwilan/ansible
lib/ansible/plugins/cache/jsonfile.py
36
1681
# (c) 2014, Brian Coca, Josh Drake, et al # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ''' DOCUMENTATION: cache: jsonfile short_description: File backed, JSON formated. description: - File backed cache that uses JSON as a format, the files are per host. version_added: "1.9" author: Brian Coca (@bcoca) ''' # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import codecs try: import simplejson as json except ImportError: import json from ansible.parsing.utils.jsonify import jsonify from ansible.plugins.cache import BaseFileCacheModule class CacheModule(BaseFileCacheModule): """ A caching module backed by json files. """ def _load(self, filepath): # Valid JSON is always UTF-8 encoded. with codecs.open(filepath, 'r', encoding='utf-8') as f: return json.load(f) def _dump(self, value, filepath): with codecs.open(filepath, 'w', encoding='utf-8') as f: f.write(jsonify(value, format=True))
gpl-3.0
GabrielNicolasAvellaneda/chemlab
chemlab/graphics/postprocessing/fxaa.py
6
3084
import numpy as np import os from OpenGL.GL import * from OpenGL.GL.framebufferobjects import * from OpenGL.arrays import vbo from ..textures import Texture from ..shaders import set_uniform, compileShader from .base import AbstractEffect class FXAAEffect(AbstractEffect): '''Fast Approximate Anti Aliasing. It is an efficient way to add anti-aliasing to your scenes. The reason to have it is to reduce jagged lines. The parameters *span_max*, *reduce_mul*, *reduce_min* are tweakable even if it is suggested to keep them at their default value. .. image:: /_static/fxaa_on_off.png :width: 800px ''' def __init__(self, widget, span_max = 4.0, reduce_mul=1/8.0, reduce_min=1/128.0): self.widget = widget curdir = os.path.dirname(__file__) vert = open(os.path.join(curdir, 'shaders', 'noeffect.vert')).read() frag = open(os.path.join(curdir, 'shaders', 'fxaa.frag')).read() # Compile quad shader vertex = compileShader(vert, GL_VERTEX_SHADER) fragment = compileShader(frag, GL_FRAGMENT_SHADER) self.span_max = span_max self.reduce_mul = reduce_mul self.reduce_min = reduce_min self.quad_program = shaders.compileProgram(vertex, fragment) def render(self, fb, texturedict): glBindFramebuffer(GL_FRAMEBUFFER, fb) glViewport(0, 0, self.widget.width(), self.widget.height()) glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT) glUseProgram(self.quad_program) set_uniform(self.quad_program, 'FXAA_SPAN_MAX', '1f', self.span_max) set_uniform(self.quad_program, 'FXAA_REDUCE_MUL', '1f', self.reduce_mul) set_uniform(self.quad_program, 'FXAA_REDUCE_MIN', '1f', self.reduce_min) qd_id = glGetUniformLocation(self.quad_program, b"textureSampler") texture = texturedict['color'] # Setting up the texture glActiveTexture(GL_TEXTURE0) texture.bind() # Set our "quad_texture" sampler to user Texture Unit 0 glUniform1i(qd_id, 0) # Set resolution res_id = glGetUniformLocation(self.quad_program, b"texcoordOffset") glUniform2f(res_id, 1.0/self.widget.width(), 1.0/self.widget.height()) # # Let's render a quad quad_data = np.array([-1.0, -1.0, 0.0, 1.0, -1.0, 0.0, -1.0, 1.0, 0.0, -1.0, 1.0, 0.0, 1.0, -1.0, 0.0, 1.0, 1.0, 0.0], dtype='float32') vboquad = vbo.VBO(quad_data) vboquad.bind() glVertexPointer(3, GL_FLOAT, 0, None) glEnableClientState(GL_VERTEX_ARRAY) # draw "count" points from the VBO glDrawArrays(GL_TRIANGLES, 0, 6) vboquad.unbind() glDisableClientState(GL_VERTEX_ARRAY) def on_resize(self, w, h): pass
gpl-3.0
allmende/synnefo
snf-cyclades-app/synnefo/vmapi/settings.py
9
1102
# Copyright (C) 2010-2014 GRNET S.A. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.conf import settings from synnefo.cyclades_settings import BASE_URL, BASE_HOST, BASE_PATH CACHE_BACKEND = getattr(settings, 'VMAPI_CACHE_BACKEND', settings.CACHE_BACKEND) CACHE_KEY_PREFIX = getattr(settings, 'VMAPI_CACHE_KEY_PREFIX', 'vmapi') RESET_PARAMS = getattr(settings, 'VMAPI_RESET_PARAMS', True) BASE_HOST = getattr(settings, 'VMAPI_BASE_HOST', BASE_HOST)
gpl-3.0
johnkeepmoving/oss-ftp
python27/win32/Lib/site-packages/setuptools/sandbox.py
38
10430
import os import sys import tempfile import operator import functools import itertools import re import pkg_resources if os.name == "java": import org.python.modules.posix.PosixModule as _os else: _os = sys.modules[os.name] try: _file = file except NameError: _file = None _open = open from distutils.errors import DistutilsError from pkg_resources import working_set from setuptools.compat import builtins __all__ = [ "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", ] def _execfile(filename, globals, locals=None): """ Python 3 implementation of execfile. """ mode = 'rb' # Python 2.6 compile requires LF for newlines, so use deprecated # Universal newlines support. if sys.version_info < (2, 7): mode += 'U' with open(filename, mode) as stream: script = stream.read() if locals is None: locals = globals code = compile(script, filename, 'exec') exec(code, globals, locals) def run_setup(setup_script, args): """Run a distutils setup script, sandboxed in its directory""" old_dir = os.getcwd() save_argv = sys.argv[:] save_path = sys.path[:] setup_dir = os.path.abspath(os.path.dirname(setup_script)) temp_dir = os.path.join(setup_dir,'temp') if not os.path.isdir(temp_dir): os.makedirs(temp_dir) save_tmp = tempfile.tempdir save_modules = sys.modules.copy() pr_state = pkg_resources.__getstate__() try: tempfile.tempdir = temp_dir os.chdir(setup_dir) try: sys.argv[:] = [setup_script]+list(args) sys.path.insert(0, setup_dir) # reset to include setup dir, w/clean callback list working_set.__init__() working_set.callbacks.append(lambda dist:dist.activate()) def runner(): ns = dict(__file__=setup_script, __name__='__main__') _execfile(setup_script, ns) DirectorySandbox(setup_dir).run(runner) except SystemExit: v = sys.exc_info()[1] if v.args and v.args[0]: raise # Normal exit, just return finally: pkg_resources.__setstate__(pr_state) sys.modules.update(save_modules) # remove any modules imported within the sandbox del_modules = [ mod_name for mod_name in sys.modules if mod_name not in save_modules # exclude any encodings modules. See #285 and not mod_name.startswith('encodings.') ] list(map(sys.modules.__delitem__, del_modules)) os.chdir(old_dir) sys.path[:] = save_path sys.argv[:] = save_argv tempfile.tempdir = save_tmp class AbstractSandbox: """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" _active = False def __init__(self): self._attrs = [ name for name in dir(_os) if not name.startswith('_') and hasattr(self,name) ] def _copy(self, source): for name in self._attrs: setattr(os, name, getattr(source,name)) def run(self, func): """Run 'func' under os sandboxing""" try: self._copy(self) if _file: builtins.file = self._file builtins.open = self._open self._active = True return func() finally: self._active = False if _file: builtins.file = _file builtins.open = _open self._copy(_os) def _mk_dual_path_wrapper(name): original = getattr(_os,name) def wrap(self,src,dst,*args,**kw): if self._active: src,dst = self._remap_pair(name,src,dst,*args,**kw) return original(src,dst,*args,**kw) return wrap for name in ["rename", "link", "symlink"]: if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name) def _mk_single_path_wrapper(name, original=None): original = original or getattr(_os,name) def wrap(self,path,*args,**kw): if self._active: path = self._remap_input(name,path,*args,**kw) return original(path,*args,**kw) return wrap if _file: _file = _mk_single_path_wrapper('file', _file) _open = _mk_single_path_wrapper('open', _open) for name in [ "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", "startfile", "mkfifo", "mknod", "pathconf", "access" ]: if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name) def _mk_single_with_return(name): original = getattr(_os,name) def wrap(self,path,*args,**kw): if self._active: path = self._remap_input(name,path,*args,**kw) return self._remap_output(name, original(path,*args,**kw)) return original(path,*args,**kw) return wrap for name in ['readlink', 'tempnam']: if hasattr(_os,name): locals()[name] = _mk_single_with_return(name) def _mk_query(name): original = getattr(_os,name) def wrap(self,*args,**kw): retval = original(*args,**kw) if self._active: return self._remap_output(name, retval) return retval return wrap for name in ['getcwd', 'tmpnam']: if hasattr(_os,name): locals()[name] = _mk_query(name) def _validate_path(self,path): """Called to remap or validate any path, whether input or output""" return path def _remap_input(self,operation,path,*args,**kw): """Called for path inputs""" return self._validate_path(path) def _remap_output(self,operation,path): """Called for path outputs""" return self._validate_path(path) def _remap_pair(self,operation,src,dst,*args,**kw): """Called for path pairs like rename, link, and symlink operations""" return ( self._remap_input(operation+'-from',src,*args,**kw), self._remap_input(operation+'-to',dst,*args,**kw) ) if hasattr(os, 'devnull'): _EXCEPTIONS = [os.devnull,] else: _EXCEPTIONS = [] try: from win32com.client.gencache import GetGeneratePath _EXCEPTIONS.append(GetGeneratePath()) del GetGeneratePath except ImportError: # it appears pywin32 is not installed, so no need to exclude. pass class DirectorySandbox(AbstractSandbox): """Restrict operations to a single subdirectory - pseudo-chroot""" write_ops = dict.fromkeys([ "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", ]) _exception_patterns = [ # Allow lib2to3 to attempt to save a pickled grammar object (#121) '.*lib2to3.*\.pickle$', ] "exempt writing to paths that match the pattern" def __init__(self, sandbox, exceptions=_EXCEPTIONS): self._sandbox = os.path.normcase(os.path.realpath(sandbox)) self._prefix = os.path.join(self._sandbox,'') self._exceptions = [ os.path.normcase(os.path.realpath(path)) for path in exceptions ] AbstractSandbox.__init__(self) def _violation(self, operation, *args, **kw): raise SandboxViolation(operation, args, kw) if _file: def _file(self, path, mode='r', *args, **kw): if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): self._violation("file", path, mode, *args, **kw) return _file(path,mode,*args,**kw) def _open(self, path, mode='r', *args, **kw): if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): self._violation("open", path, mode, *args, **kw) return _open(path,mode,*args,**kw) def tmpnam(self): self._violation("tmpnam") def _ok(self, path): active = self._active try: self._active = False realpath = os.path.normcase(os.path.realpath(path)) return ( self._exempted(realpath) or realpath == self._sandbox or realpath.startswith(self._prefix) ) finally: self._active = active def _exempted(self, filepath): start_matches = ( filepath.startswith(exception) for exception in self._exceptions ) pattern_matches = ( re.match(pattern, filepath) for pattern in self._exception_patterns ) candidates = itertools.chain(start_matches, pattern_matches) return any(candidates) def _remap_input(self, operation, path, *args, **kw): """Called for path inputs""" if operation in self.write_ops and not self._ok(path): self._violation(operation, os.path.realpath(path), *args, **kw) return path def _remap_pair(self, operation, src, dst, *args, **kw): """Called for path pairs like rename, link, and symlink operations""" if not self._ok(src) or not self._ok(dst): self._violation(operation, src, dst, *args, **kw) return (src,dst) def open(self, file, flags, mode=0o777, *args, **kw): """Called for low-level os.open()""" if flags & WRITE_FLAGS and not self._ok(file): self._violation("os.open", file, flags, mode, *args, **kw) return _os.open(file,flags,mode, *args, **kw) WRITE_FLAGS = functools.reduce( operator.or_, [getattr(_os, a, 0) for a in "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] ) class SandboxViolation(DistutilsError): """A setup script attempted to modify the filesystem outside the sandbox""" def __str__(self): return """SandboxViolation: %s%r %s The package setup script has attempted to modify files on your system that are not within the EasyInstall build area, and has been aborted. This package cannot be safely installed by EasyInstall, and may not support alternate installation locations even if you run its setup script by hand. Please inform the package's author and the EasyInstall maintainers to find out if a fix or workaround is available.""" % self.args #
mit
jplusui/jplusui.github.com
apps/node/node_modules/npm/node_modules/node-gyp/gyp/test/variables/commands/gyptest-commands-repeated.py
330
1313
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Test variable expansion of '<!()' syntax commands where they are evaluated more then once.. """ import TestGyp test = TestGyp.TestGyp(format='gypd') expect = test.read('commands-repeated.gyp.stdout').replace('\r\n', '\n') test.run_gyp('commands-repeated.gyp', '--debug', 'variables', stdout=expect, ignore_line_numbers=True) # Verify the commands-repeated.gypd against the checked-in expected contents. # # Normally, we should canonicalize line endings in the expected # contents file setting the Subversion svn:eol-style to native, # but that would still fail if multiple systems are sharing a single # workspace on a network-mounted file system. Consequently, we # massage the Windows line endings ('\r\n') in the output to the # checked-in UNIX endings ('\n'). contents = test.read('commands-repeated.gypd').replace('\r\n', '\n') expect = test.read('commands-repeated.gypd.golden').replace('\r\n', '\n') if not test.match(contents, expect): print "Unexpected contents of `commands-repeated.gypd'" test.diff(expect, contents, 'commands-repeated.gypd ') test.fail_test() test.pass_test()
bsd-3-clause
bsmr-ansible/ansible-modules-extras
source_control/bzr.py
50
6478
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2013, André Paramés <git@andreparames.com> # Based on the Git module by Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = u''' --- module: bzr author: "André Paramés (@andreparames)" version_added: "1.1" short_description: Deploy software (or files) from bzr branches description: - Manage I(bzr) branches to deploy files or software. options: name: required: true aliases: [ 'parent' ] description: - SSH or HTTP protocol address of the parent branch. dest: required: true description: - Absolute path of where the branch should be cloned to. version: required: false default: "head" description: - What version of the branch to clone. This can be the bzr revno or revid. force: required: false default: "no" choices: [ 'yes', 'no' ] description: - If C(yes), any modified files in the working tree will be discarded. Before 1.9 the default value was "yes". executable: required: false default: null version_added: "1.4" description: - Path to bzr executable to use. If not supplied, the normal mechanism for resolving binary paths will be used. ''' EXAMPLES = ''' # Example bzr checkout from Ansible Playbooks - bzr: name=bzr+ssh://foosball.example.org/path/to/branch dest=/srv/checkout version=22 ''' import re class Bzr(object): def __init__(self, module, parent, dest, version, bzr_path): self.module = module self.parent = parent self.dest = dest self.version = version self.bzr_path = bzr_path def _command(self, args_list, cwd=None, **kwargs): (rc, out, err) = self.module.run_command([self.bzr_path] + args_list, cwd=cwd, **kwargs) return (rc, out, err) def get_version(self): '''samples the version of the bzr branch''' cmd = "%s revno" % self.bzr_path rc, stdout, stderr = self.module.run_command(cmd, cwd=self.dest) revno = stdout.strip() return revno def clone(self): '''makes a new bzr branch if it does not already exist''' dest_dirname = os.path.dirname(self.dest) try: os.makedirs(dest_dirname) except: pass if self.version.lower() != 'head': args_list = ["branch", "-r", self.version, self.parent, self.dest] else: args_list = ["branch", self.parent, self.dest] return self._command(args_list, check_rc=True, cwd=dest_dirname) def has_local_mods(self): cmd = "%s status -S" % self.bzr_path rc, stdout, stderr = self.module.run_command(cmd, cwd=self.dest) lines = stdout.splitlines() lines = filter(lambda c: not re.search('^\\?\\?.*$', c), lines) return len(lines) > 0 def reset(self, force): ''' Resets the index and working tree to head. Discards any changes to tracked files in the working tree since that commit. ''' if not force and self.has_local_mods(): self.module.fail_json(msg="Local modifications exist in branch (force=no).") return self._command(["revert"], check_rc=True, cwd=self.dest) def fetch(self): '''updates branch from remote sources''' if self.version.lower() != 'head': (rc, out, err) = self._command(["pull", "-r", self.version], cwd=self.dest) else: (rc, out, err) = self._command(["pull"], cwd=self.dest) if rc != 0: self.module.fail_json(msg="Failed to pull") return (rc, out, err) def switch_version(self): '''once pulled, switch to a particular revno or revid''' if self.version.lower() != 'head': args_list = ["revert", "-r", self.version] else: args_list = ["revert"] return self._command(args_list, check_rc=True, cwd=self.dest) # =========================================== def main(): module = AnsibleModule( argument_spec = dict( dest=dict(required=True, type='path'), name=dict(required=True, aliases=['parent']), version=dict(default='head'), force=dict(default='no', type='bool'), executable=dict(default=None), ) ) dest = module.params['dest'] parent = module.params['name'] version = module.params['version'] force = module.params['force'] bzr_path = module.params['executable'] or module.get_bin_path('bzr', True) bzrconfig = os.path.join(dest, '.bzr', 'branch', 'branch.conf') rc, out, err, status = (0, None, None, None) bzr = Bzr(module, parent, dest, version, bzr_path) # if there is no bzr configuration, do a branch operation # else pull and switch the version before = None local_mods = False if not os.path.exists(bzrconfig): (rc, out, err) = bzr.clone() else: # else do a pull local_mods = bzr.has_local_mods() before = bzr.get_version() (rc, out, err) = bzr.reset(force) if rc != 0: module.fail_json(msg=err) (rc, out, err) = bzr.fetch() if rc != 0: module.fail_json(msg=err) # switch to version specified regardless of whether # we cloned or pulled (rc, out, err) = bzr.switch_version() # determine if we changed anything after = bzr.get_version() changed = False if before != after or local_mods: changed = True module.exit_json(changed=changed, before=before, after=after) # import module snippets from ansible.module_utils.basic import * main()
gpl-3.0
welex91/ansible-modules-core
network/iosxr/iosxr_command.py
13
4909
#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # DOCUMENTATION = """ --- module: iosxr_command version_added: "2.1" author: "Peter Sprygada (@privateip)" short_description: Run arbitrary commands on ios devices. description: - Sends arbitrary commands to an IOSXR node and returns the results read from the device. The M(iosxr_command) module includes an argument that will cause the module to wait for a specific condition before returning or timing out if the condition is not met. extends_documentation_fragment: iosxr options: commands: description: - List of commands to send to the remote ios device over the configured provider. The resulting output from the command is returned. If the I(waitfor) argument is provided, the module is not returned until the condition is satisfied or the number of retires as expired. required: true waitfor: description: - List of conditions to evaluate against the output of the command. The task will wait for a each condition to be true before moving forward. If the conditional is not true within the configured number of retries, the task fails. See examples. required: false default: null retries: description: - Specifies the number of retries a command should by tried before it is considered failed. The command is run on the target device every retry and evaluated against the waitfor conditions. required: false default: 10 interval: description: - Configures the interval in seconds to wait between retries of the command. If the command does not pass the specified conditions, the interval indicates how long to wait before trying the command again. required: false default: 1 """ EXAMPLES = """ - iosxr_command: commands: - show version register: output - iosxr_command: commands: - show version waitfor: - "result[0] contains 6.0.0" - iosxr_command: commands: - show version - show interfaces waitfor: - "result[1] contains MgmtEth0/0/CPU0/0" - "result[0] contains 6.0.0" """ RETURN = """ stdout: description: the set of responses from the commands returned: always type: list sample: ['...', '...'] stdout_lines: description: The value of stdout split into a list returned: always type: list sample: [['...', '...'], ['...'], ['...']] failed_conditions: description: the conditionals that failed retured: failed type: list sample: ['...', '...'] """ import time import shlex import re INDEX_RE = re.compile(r'(\[\d+\])') def iterlines(stdout): for item in stdout: if isinstance(item, basestring): item = str(item).split('\n') yield item def main(): spec = dict( commands=dict(type='list'), waitfor=dict(type='list'), retries=dict(default=10, type='int'), interval=dict(default=1, type='int') ) module = get_module(argument_spec=spec, supports_check_mode=True) commands = module.params['commands'] retries = module.params['retries'] interval = module.params['interval'] try: queue = set() for entry in (module.params['waitfor'] or list()): queue.add(Conditional(entry)) except AttributeError, exc: module.fail_json(msg=exc.message) result = dict(changed=False) while retries > 0: response = module.execute(commands) result['stdout'] = response for item in list(queue): if item(response): queue.remove(item) if not queue: break time.sleep(interval) retries -= 1 else: failed_conditions = [item.raw for item in queue] module.fail_json(msg='timeout waiting for value', failed_conditions=failed_conditions) result['stdout_lines'] = list(iterlines(result['stdout'])) return module.exit_json(**result) from ansible.module_utils.basic import * from ansible.module_utils.urls import * from ansible.module_utils.shell import * from ansible.module_utils.netcfg import * from ansible.module_utils.iosxr import * if __name__ == '__main__': main()
gpl-3.0
TwolDE2/enigma2
lib/python/Components/Pixmap.py
54
4329
from ConditionalWidget import ConditionalWidget from GUIComponent import GUIComponent from enigma import ePixmap, eTimer from Tools.Directories import resolveFilename, fileExists, SCOPE_SKIN_IMAGE, SCOPE_ACTIVE_SKIN, SCOPE_ACTIVE_LCDSKIN from os import path from skin import loadPixmap class Pixmap(GUIComponent): GUI_WIDGET = ePixmap def getSize(self): s = self.instance.size() return s.width(), s.height() class PixmapConditional(ConditionalWidget, Pixmap): def __init__(self, withTimer = True): ConditionalWidget.__init__(self) Pixmap.__init__(self) class MovingPixmap(Pixmap): def __init__(self): Pixmap.__init__(self) self.moving = False # TODO: get real values self.x = 0.0 self.y = 0.0 self.clearPath() self.moveTimer = eTimer() self.moveTimer.callback.append(self.doMove) def clearPath(self, repeated = False): if self.moving: self.moving = False self.moveTimer.stop() self.path = [] self.currDest = 0 self.repeated = repeated def addMovePoint(self, x, y, time = 20): self.path.append((x, y, time)) def moveTo(self, x, y, time = 20): self.clearPath() self.addMovePoint(x, y, time) def startMoving(self): if not self.moving: self.time = self.path[self.currDest][2] self.stepX = (self.path[self.currDest][0] - self.x) / float(self.time) self.stepY = (self.path[self.currDest][1] - self.y) / float(self.time) self.moving = True self.moveTimer.start(100) def stopMoving(self): self.moving = False self.moveTimer.stop() def doMove(self): self.x += self.stepX self.y += self.stepY self.time -= 1 try: self.move(int(self.x), int(self.y)) except: # moving not possible... widget not there any more... stop moving self.stopMoving() if self.time == 0: self.currDest += 1 self.moveTimer.stop() self.moving = False if self.currDest >= len(self.path): # end of path if self.repeated: self.currDest = 0 self.moving = False self.startMoving() else: self.moving = False self.startMoving() class MultiPixmap(Pixmap): def __init__(self): Pixmap.__init__(self) self.pixmaps = [] def applySkin(self, desktop, screen): if self.skinAttributes is not None: skin_path_prefix = getattr(screen, "skin_path", path) pixmap = None attribs = [ ] for (attrib, value) in self.skinAttributes: if attrib == "pixmaps": pixmaps = value.split(',') for p in pixmaps: pngfile = resolveFilename(SCOPE_ACTIVE_SKIN, p, path_prefix=skin_path_prefix) if fileExists(resolveFilename(SCOPE_SKIN_IMAGE, p, path_prefix=skin_path_prefix)): pngfile = resolveFilename(SCOPE_SKIN_IMAGE, p, path_prefix=skin_path_prefix) elif fileExists(resolveFilename(SCOPE_ACTIVE_LCDSKIN, p, path_prefix=skin_path_prefix)): pngfile = resolveFilename(SCOPE_ACTIVE_LCDSKIN, p, path_prefix=skin_path_prefix) if path.exists(pngfile): self.pixmaps.append(loadPixmap(pngfile, desktop)) if not pixmap: pixmap = resolveFilename(SCOPE_ACTIVE_SKIN, pixmaps[0], path_prefix=skin_path_prefix) if fileExists(resolveFilename(SCOPE_SKIN_IMAGE, pixmaps[0], path_prefix=skin_path_prefix)): pixmap = resolveFilename(SCOPE_SKIN_IMAGE, pixmaps[0], path_prefix=skin_path_prefix) elif fileExists(resolveFilename(SCOPE_ACTIVE_LCDSKIN, pixmaps[0], path_prefix=skin_path_prefix)): pixmap = resolveFilename(SCOPE_ACTIVE_LCDSKIN, pixmaps[0], path_prefix=skin_path_prefix) elif attrib == "pixmap": pixmap = resolveFilename(SCOPE_ACTIVE_SKIN, value, path_prefix=skin_path_prefix) if fileExists(resolveFilename(SCOPE_SKIN_IMAGE, value, path_prefix=skin_path_prefix)): pixmap = resolveFilename(SCOPE_SKIN_IMAGE, value, path_prefix=skin_path_prefix) elif fileExists(resolveFilename(SCOPE_ACTIVE_LCDSKIN, value, path_prefix=skin_path_prefix)): pixmap = resolveFilename(SCOPE_ACTIVE_LCDSKIN, value, path_prefix=skin_path_prefix) else: attribs.append((attrib,value)) if pixmap: attribs.append(("pixmap", pixmap)) self.skinAttributes = attribs return GUIComponent.applySkin(self, desktop, screen) def setPixmapNum(self, x): if self.instance: if len(self.pixmaps) > x: self.instance.setPixmap(self.pixmaps[x]) else: print "setPixmapNum(%d) failed! defined pixmaps:" % x, self.pixmaps
gpl-2.0
michaelWagner/oppia
core/domain/activity_services_test.py
11
9898
# coding: utf-8 # # Copyright 2016 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from core.domain import activity_domain from core.domain import activity_services from core.domain import collection_services from core.domain import exp_services from core.domain import rights_manager from core.tests import test_utils import feconf class ActivityServicesTests(test_utils.GenericTestBase): """Test the activity services module.""" EXP_ID_0 = 'EXP_ID_0' EXP_ID_1 = 'EXP_ID_1' COL_ID_2 = 'COL_ID_2' def _create_exploration_reference(self, exploration_id): return activity_domain.ActivityReference( feconf.ACTIVITY_TYPE_EXPLORATION, exploration_id) def _create_collection_reference(self, collection_id): return activity_domain.ActivityReference( feconf.ACTIVITY_TYPE_COLLECTION, collection_id) def _compare_lists(self, reference_list_1, reference_list_2): hashes_1 = [reference.get_hash() for reference in reference_list_1] hashes_2 = [reference.get_hash() for reference in reference_list_2] self.assertEqual(hashes_1, hashes_2) def setUp(self): """Publish two explorations and one collection.""" super(ActivityServicesTests, self).setUp() self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME) self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL) self.signup(self.MODERATOR_EMAIL, self.MODERATOR_USERNAME) self.moderator_id = self.get_user_id_from_email(self.MODERATOR_EMAIL) self.set_moderators([self.MODERATOR_USERNAME]) self.save_new_valid_exploration(self.EXP_ID_0, self.owner_id) self.save_new_valid_exploration(self.EXP_ID_1, self.owner_id) self.save_new_valid_collection( self.COL_ID_2, self.owner_id, exploration_id=self.EXP_ID_0) def test_update_featured_refs_correctly_promotes_activities(self): rights_manager.publish_exploration(self.owner_id, self.EXP_ID_0) rights_manager.publish_collection(self.owner_id, self.COL_ID_2) self._compare_lists( activity_services.get_featured_activity_references(), []) activity_services.update_featured_activity_references([ self._create_exploration_reference(self.EXP_ID_0), self._create_collection_reference(self.COL_ID_2)]) self._compare_lists( activity_services.get_featured_activity_references(), [ self._create_exploration_reference(self.EXP_ID_0), self._create_collection_reference(self.COL_ID_2)]) def test_update_featured_refs_clears_existing_featured_activities(self): rights_manager.publish_exploration(self.owner_id, self.EXP_ID_0) activity_services.update_featured_activity_references([ self._create_exploration_reference(self.EXP_ID_0)]) self._compare_lists( activity_services.get_featured_activity_references(), [ self._create_exploration_reference(self.EXP_ID_0)]) activity_services.update_featured_activity_references([]) self._compare_lists( activity_services.get_featured_activity_references(), []) def test_updating_with_duplicate_refs_raises_exception(self): rights_manager.publish_exploration(self.owner_id, self.EXP_ID_0) rights_manager.publish_collection(self.owner_id, self.COL_ID_2) self._compare_lists( activity_services.get_featured_activity_references(), []) with self.assertRaisesRegexp(Exception, 'should not have duplicates'): activity_services.update_featured_activity_references([ self._create_exploration_reference(self.EXP_ID_0), self._create_exploration_reference(self.EXP_ID_0)]) def test_deleted_activity_is_removed_from_featured_list(self): rights_manager.publish_exploration(self.owner_id, self.EXP_ID_0) rights_manager.publish_exploration(self.owner_id, self.EXP_ID_1) rights_manager.publish_collection(self.owner_id, self.COL_ID_2) activity_services.update_featured_activity_references([ self._create_exploration_reference(self.EXP_ID_0), self._create_collection_reference(self.COL_ID_2)]) self._compare_lists( activity_services.get_featured_activity_references(), [ self._create_exploration_reference(self.EXP_ID_0), self._create_collection_reference(self.COL_ID_2)]) # Deleting an unfeatured activity does not affect the featured list. exp_services.delete_exploration(self.owner_id, self.EXP_ID_1) self._compare_lists( activity_services.get_featured_activity_references(), [ self._create_exploration_reference(self.EXP_ID_0), self._create_collection_reference(self.COL_ID_2)]) # Deleting a featured activity removes it from the featured list. collection_services.delete_collection(self.owner_id, self.COL_ID_2) self._compare_lists( activity_services.get_featured_activity_references(), [ self._create_exploration_reference(self.EXP_ID_0)]) exp_services.delete_exploration(self.owner_id, self.EXP_ID_0) self._compare_lists( activity_services.get_featured_activity_references(), []) def test_unpublished_activity_is_removed_from_featured_list(self): rights_manager.publish_exploration(self.owner_id, self.EXP_ID_0) rights_manager.publish_exploration(self.owner_id, self.EXP_ID_1) rights_manager.publish_collection(self.owner_id, self.COL_ID_2) activity_services.update_featured_activity_references([ self._create_exploration_reference(self.EXP_ID_0), self._create_collection_reference(self.COL_ID_2)]) self._compare_lists( activity_services.get_featured_activity_references(), [ self._create_exploration_reference(self.EXP_ID_0), self._create_collection_reference(self.COL_ID_2)]) # Unpublishing an unfeatured activity does not affect the featured # list. rights_manager.unpublish_exploration(self.moderator_id, self.EXP_ID_1) self._compare_lists( activity_services.get_featured_activity_references(), [ self._create_exploration_reference(self.EXP_ID_0), self._create_collection_reference(self.COL_ID_2)]) # Unpublishing a featured activity removes it from the featured list. rights_manager.unpublish_collection(self.moderator_id, self.COL_ID_2) self._compare_lists( activity_services.get_featured_activity_references(), [ self._create_exploration_reference(self.EXP_ID_0)]) rights_manager.unpublish_exploration(self.moderator_id, self.EXP_ID_0) self._compare_lists( activity_services.get_featured_activity_references(), []) def test_publish_or_publicize_activity_does_not_affect_featured_list(self): self._compare_lists( activity_services.get_featured_activity_references(), []) rights_manager.publish_exploration(self.owner_id, self.EXP_ID_0) self._compare_lists( activity_services.get_featured_activity_references(), []) rights_manager.publicize_exploration(self.moderator_id, self.EXP_ID_0) self._compare_lists( activity_services.get_featured_activity_references(), []) rights_manager.unpublicize_exploration( self.moderator_id, self.EXP_ID_0) self._compare_lists( activity_services.get_featured_activity_references(), []) rights_manager.publish_collection(self.owner_id, self.COL_ID_2) self._compare_lists( activity_services.get_featured_activity_references(), []) rights_manager.publicize_collection(self.moderator_id, self.COL_ID_2) self._compare_lists( activity_services.get_featured_activity_references(), []) rights_manager.unpublicize_collection( self.moderator_id, self.COL_ID_2) self._compare_lists( activity_services.get_featured_activity_references(), []) def test_split_by_type(self): self.assertEqual( activity_services.split_by_type([]), ([], [])) exploration_123 = self._create_exploration_reference('123') self.assertEqual( activity_services.split_by_type([exploration_123]), (['123'], [])) collection_def = self._create_collection_reference('def') self.assertEqual( activity_services.split_by_type([collection_def]), ([], ['def'])) exploration_ab = self._create_exploration_reference('ab') self.assertEqual( activity_services.split_by_type([ exploration_123, collection_def, exploration_ab]), (['123', 'ab'], ['def'])) def test_split_by_type_raises_error_if_given_invalid_activity_ref(self): exploration_123 = self._create_exploration_reference('123') with self.assertRaisesRegexp(Exception, 'Invalid activity reference'): activity_services.split_by_type([ exploration_123, activity_domain.ActivityReference('invalid_type', 'bbb') ])
apache-2.0
JCA-Developpement/Odoo
addons/report_webkit/wizard/__init__.py
431
1482
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com) # All Right Reserved # # Author : Vincent Renaville # # WARNING: This program as such is intended to be used by professional # programmers who take the whole responsability of assessing all potential # consequences resulting from its eventual inadequacies and bugs # End users who are looking for a ready-to-use solution with commercial # garantees and support are strongly adviced to contract a Free Software # Service Company # # This program is Free Software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # ############################################################################## import report_webkit_actions # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
slightlymadphoenix/activityPointsApp
activitypoints/lib/python3.5/site-packages/django/db/backends/mysql/client.py
94
1524
import subprocess from django.db.backends.base.client import BaseDatabaseClient class DatabaseClient(BaseDatabaseClient): executable_name = 'mysql' @classmethod def settings_to_cmd_args(cls, settings_dict): args = [cls.executable_name] db = settings_dict['OPTIONS'].get('db', settings_dict['NAME']) user = settings_dict['OPTIONS'].get('user', settings_dict['USER']) passwd = settings_dict['OPTIONS'].get('passwd', settings_dict['PASSWORD']) host = settings_dict['OPTIONS'].get('host', settings_dict['HOST']) port = settings_dict['OPTIONS'].get('port', settings_dict['PORT']) cert = settings_dict['OPTIONS'].get('ssl', {}).get('ca') defaults_file = settings_dict['OPTIONS'].get('read_default_file') # Seems to be no good way to set sql_mode with CLI. if defaults_file: args += ["--defaults-file=%s" % defaults_file] if user: args += ["--user=%s" % user] if passwd: args += ["--password=%s" % passwd] if host: if '/' in host: args += ["--socket=%s" % host] else: args += ["--host=%s" % host] if port: args += ["--port=%s" % port] if cert: args += ["--ssl-ca=%s" % cert] if db: args += [db] return args def runshell(self): args = DatabaseClient.settings_to_cmd_args(self.connection.settings_dict) subprocess.check_call(args)
mit
CredoReference/edx-platform
lms/djangoapps/course_api/blocks/views.py
16
11563
""" CourseBlocks API views """ from django.core.exceptions import ValidationError from django.http import Http404 from opaque_keys import InvalidKeyError from opaque_keys.edx.keys import CourseKey from rest_framework.generics import ListAPIView from rest_framework.response import Response from six import text_type from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin, view_auth_classes from xmodule.modulestore.django import modulestore from xmodule.modulestore.exceptions import ItemNotFoundError from .api import get_blocks from .forms import BlockListGetForm @view_auth_classes() class BlocksView(DeveloperErrorViewMixin, ListAPIView): """ **Use Case** Returns the blocks within the requested block tree according to the requesting user's access level. **Example requests**: GET /api/courses/v1/blocks/<root_block_usage_id>/?depth=all GET /api/courses/v1/blocks/<usage_id>/? username=anjali &depth=all &requested_fields=graded,format,student_view_multi_device,lti_url,due &block_counts=video &student_view_data=video &block_types_filter=problem,html **Parameters**: * all_blocks: (boolean) Provide a value of "true" to return all blocks. Returns all blocks only if the requesting user has course staff permissions. Blocks that are visible only to specific learners (for example, based on group membership or randomized content) are all included. If all_blocks is not specified, you must specify the username for the user whose course blocks are requested. * username: (string) Required, unless ``all_blocks`` is specified. Specify the username for the user whose course blocks are requested. Only users with course staff permissions can specify other users' usernames. If a username is specified, results include blocks that are visible to that user, including those based on group or cohort membership or randomized content assigned to that user. Example: username=anjali * student_view_data: (list) Indicates for which block types to return student_view_data. Example: student_view_data=video * block_counts: (list) Indicates for which block types to return the aggregate count of the blocks. Example: block_counts=video,problem * requested_fields: (list) Indicates which additional fields to return for each block. For a list of available fields see under `Response Values -> blocks`, below. The following fields are always returned: id, type, display_name Example: requested_fields=graded,format,student_view_multi_device * depth: (integer or all) Indicates how deep to traverse into the blocks hierarchy. A value of all means the entire hierarchy. Default is 0 Example: depth=all * nav_depth: (integer) WARNING: nav_depth is not supported, and may be removed at any time. Indicates how far deep to traverse into the course hierarchy before bundling all the descendants. Default is 3 since typical navigational views of the course show a maximum of chapter->sequential->vertical. Example: nav_depth=3 * return_type (string) Indicates in what data type to return the blocks. Default is dict. Supported values are: dict, list Example: return_type=dict * block_types_filter: (list) Requested types of blocks used to filter the final result of returned blocks. Possible values include sequential, vertical, html, problem, video, and discussion. Example: block_types_filter=vertical,html **Response Values** The following fields are returned with a successful response. * root: The ID of the root node of the requested course block structure. * blocks: A dictionary or list, based on the value of the "return_type" parameter. Maps block usage IDs to a collection of information about each block. Each block contains the following fields. * id: (string) The usage ID of the block. * type: (string) The type of block. Possible values the names of any XBlock type in the system, including custom blocks. Examples are course, chapter, sequential, vertical, html, problem, video, and discussion. * display_name: (string) The display name of the block. * children: (list) If the block has child blocks, a list of IDs of the child blocks. Returned only if "children" is included in the "requested_fields" parameter. * completion: (float or None) The level of completion of the block. Its value can vary between 0.0 and 1.0 or be equal to None if block is not completable. Returned only if "completion" is included in the "requested_fields" parameter. * block_counts: (dict) For each block type specified in the block_counts parameter to the endpoint, the aggregate number of blocks of that type for this block and all of its descendants. * graded (boolean) Whether or not the block or any of its descendants is graded. Returned only if "graded" is included in the "requested_fields" parameter. * format: (string) The assignment type of the block. Possible values can be "Homework", "Lab", "Midterm Exam", and "Final Exam". Returned only if "format" is included in the "requested_fields" parameter. * student_view_data: (dict) The JSON data for this block. Returned only if the "student_view_data" input parameter contains this block's type. * student_view_url: (string) The URL to retrieve the HTML rendering of this block's student view. The HTML could include CSS and Javascript code. This field can be used in combination with the student_view_multi_device field to decide whether to display this content to the user. This URL can be used as a fallback if the student_view_data for this block type is not supported by the client or the block. * student_view_multi_device: (boolean) Whether or not the HTML of the student view that is rendered at "student_view_url" supports responsive web layouts, touch-based inputs, and interactive state management for a variety of device sizes and types, including mobile and touch devices. Returned only if "student_view_multi_device" is included in the "requested_fields" parameter. * lms_web_url: (string) The URL to the navigational container of the xBlock on the web LMS. This URL can be used as a further fallback if the student_view_url and the student_view_data fields are not supported. * lti_url: The block URL for an LTI consumer. Returned only if the "ENABLE_LTI_PROVIDER" Django settign is set to "True". * due: The due date of the block. Returned only if "due" is included in the "requested_fields" parameter. * show_correctness: Whether to show scores/correctness to learners for the current sequence or problem. Returned only if "show_correctness" is included in the "requested_fields" parameter. """ def list(self, request, usage_key_string): # pylint: disable=arguments-differ """ REST API endpoint for listing all the blocks information in the course, while regarding user access and roles. Arguments: request - Django request object usage_key_string - The usage key for a block. """ # validate request parameters requested_params = request.query_params.copy() requested_params.update({'usage_key': usage_key_string}) params = BlockListGetForm(requested_params, initial={'requesting_user': request.user}) if not params.is_valid(): raise ValidationError(params.errors) try: return Response( get_blocks( request, params.cleaned_data['usage_key'], params.cleaned_data['user'], params.cleaned_data['depth'], params.cleaned_data.get('nav_depth'), params.cleaned_data['requested_fields'], params.cleaned_data.get('block_counts', []), params.cleaned_data.get('student_view_data', []), params.cleaned_data['return_type'], params.cleaned_data.get('block_types_filter', None), ) ) except ItemNotFoundError as exception: raise Http404("Block not found: {}".format(text_type(exception))) @view_auth_classes() class BlocksInCourseView(BlocksView): """ **Use Case** Returns the blocks in the course according to the requesting user's access level. **Example requests**: GET /api/courses/v1/blocks/?course_id=<course_id> GET /api/courses/v1/blocks/?course_id=<course_id> &username=anjali &depth=all &requested_fields=graded,format,student_view_multi_device,lti_url &block_counts=video &student_view_data=video &block_types_filter=problem,html **Parameters**: This view redirects to /api/courses/v1/blocks/<root_usage_key>/ for the root usage key of the course specified by course_id. The view accepts all parameters accepted by :class:`BlocksView`, plus the following required parameter * course_id: (string, required) The ID of the course whose block data we want to return **Response Values** Responses are identical to those returned by :class:`BlocksView` when passed the root_usage_key of the requested course. If the course_id is not supplied, a 400: Bad Request is returned, with a message indicating that course_id is required. If an invalid course_id is supplied, a 400: Bad Request is returned, with a message indicating that the course_id is not valid. """ def list(self, request): # pylint: disable=arguments-differ """ Retrieves the usage_key for the requested course, and then returns the same information that would be returned by BlocksView.list, called with that usage key Arguments: request - Django request object """ # convert the requested course_key to the course's root block's usage_key course_key_string = request.query_params.get('course_id', None) if not course_key_string: raise ValidationError('course_id is required.') try: course_key = CourseKey.from_string(course_key_string) course_usage_key = modulestore().make_course_usage_key(course_key) except InvalidKeyError: raise ValidationError("'{}' is not a valid course key.".format(unicode(course_key_string))) return super(BlocksInCourseView, self).list(request, course_usage_key)
agpl-3.0
nicky-ji/edx-nicky
lms/djangoapps/courseware/courses.py
12
12831
from collections import defaultdict from fs.errors import ResourceNotFoundError import logging import inspect from path import path from django.http import Http404 from django.conf import settings from edxmako.shortcuts import render_to_string from xmodule.modulestore import ModuleStoreEnum from opaque_keys.edx.keys import CourseKey from xmodule.modulestore.django import modulestore from xmodule.contentstore.content import StaticContent from xmodule.modulestore.exceptions import ItemNotFoundError from static_replace import replace_static_urls from xmodule.modulestore import ModuleStoreEnum from xmodule.x_module import STUDENT_VIEW from courseware.access import has_access from courseware.model_data import FieldDataCache from courseware.module_render import get_module import branding log = logging.getLogger(__name__) def get_request_for_thread(): """Walk up the stack, return the nearest first argument named "request".""" frame = None try: for f in inspect.stack()[1:]: frame = f[0] code = frame.f_code if code.co_varnames[:1] == ("request",): return frame.f_locals["request"] elif code.co_varnames[:2] == ("self", "request",): return frame.f_locals["request"] finally: del frame def get_course(course_id, depth=0): """ Given a course id, return the corresponding course descriptor. If the course does not exist, raises a ValueError. This is appropriate for internal use. depth: The number of levels of children for the modulestore to cache. None means infinite depth. Default is to fetch no children. """ course = modulestore().get_course(course_id, depth=depth) if course is None: raise ValueError(u"Course not found: {0}".format(course_id)) return course # TODO please rename this function to get_course_by_key at next opportunity! def get_course_by_id(course_key, depth=0): """ Given a course id, return the corresponding course descriptor. If such a course does not exist, raises a 404. depth: The number of levels of children for the modulestore to cache. None means infinite depth """ course = modulestore().get_course(course_key, depth=depth) if course: return course else: raise Http404("Course not found.") def get_course_with_access(user, action, course_key, depth=0): """ Given a course_key, look up the corresponding course descriptor, check that the user has the access to perform the specified action on the course, and return the descriptor. Raises a 404 if the course_key is invalid, or the user doesn't have access. depth: The number of levels of children for the modulestore to cache. None means infinite depth """ assert isinstance(course_key, CourseKey) course = get_course_by_id(course_key, depth=depth) if not has_access(user, action, course, course_key): # Deliberately return a non-specific error message to avoid # leaking info about access control settings raise Http404("Course not found.") return course def get_opt_course_with_access(user, action, course_key): """ Same as get_course_with_access, except that if course_key is None, return None without performing any access checks. """ if course_key is None: return None return get_course_with_access(user, action, course_key) def course_image_url(course): """Try to look up the image url for the course. If it's not found, log an error and return the dead link""" if course.static_asset_path or modulestore().get_modulestore_type(course.id) == ModuleStoreEnum.Type.xml: # If we are a static course with the course_image attribute # set different than the default, return that path so that # courses can use custom course image paths, otherwise just # return the default static path. url = '/static/' + (course.static_asset_path or getattr(course, 'data_dir', '')) if hasattr(course, 'course_image') and course.course_image != course.fields['course_image'].default: url += '/' + course.course_image else: url += '/images/course_image.jpg' else: loc = StaticContent.compute_location(course.id, course.course_image) url = loc.to_deprecated_string() return url def find_file(filesystem, dirs, filename): """ Looks for a filename in a list of dirs on a filesystem, in the specified order. filesystem: an OSFS filesystem dirs: a list of path objects filename: a string Returns d / filename if found in dir d, else raises ResourceNotFoundError. """ for directory in dirs: filepath = path(directory) / filename if filesystem.exists(filepath): return filepath raise ResourceNotFoundError(u"Could not find {0}".format(filename)) def get_course_about_section(course, section_key): """ This returns the snippet of html to be rendered on the course about page, given the key for the section. Valid keys: - overview - title - university - number - short_description - description - key_dates (includes start, end, exams, etc) - video - course_staff_short - course_staff_extended - requirements - syllabus - textbook - faq - more_info - ocw_links """ # Many of these are stored as html files instead of some semantic # markup. This can change without effecting this interface when we find a # good format for defining so many snippets of text/html. # TODO: Remove number, instructors from this list if section_key in ['short_description', 'description', 'key_dates', 'video', 'course_staff_short', 'course_staff_extended', 'requirements', 'syllabus', 'textbook', 'faq', 'more_info', 'number', 'instructors', 'overview', 'effort', 'end_date', 'prerequisites', 'ocw_links']: try: request = get_request_for_thread() loc = course.location.replace(category='about', name=section_key) # Use an empty cache field_data_cache = FieldDataCache([], course.id, request.user) about_module = get_module( request.user, request, loc, field_data_cache, log_if_not_found=False, wrap_xmodule_display=False, static_asset_path=course.static_asset_path ) html = '' if about_module is not None: try: html = about_module.render(STUDENT_VIEW).content except Exception: # pylint: disable=broad-except html = render_to_string('courseware/error-message.html', None) log.exception( u"Error rendering course={course}, section_key={section_key}".format( course=course, section_key=section_key )) return html except ItemNotFoundError: log.warning( u"Missing about section {key} in course {url}".format(key=section_key, url=course.location.to_deprecated_string()) ) return None elif section_key == "title": return course.display_name_with_default elif section_key == "university": return course.display_org_with_default elif section_key == "number": return course.display_number_with_default raise KeyError("Invalid about key " + str(section_key)) def get_course_info_section(request, course, section_key): """ This returns the snippet of html to be rendered on the course info page, given the key for the section. Valid keys: - handouts - guest_handouts - updates - guest_updates """ usage_key = course.id.make_usage_key('course_info', section_key) # Use an empty cache field_data_cache = FieldDataCache([], course.id, request.user) info_module = get_module( request.user, request, usage_key, field_data_cache, log_if_not_found=False, wrap_xmodule_display=False, static_asset_path=course.static_asset_path ) html = '' if info_module is not None: try: html = info_module.render(STUDENT_VIEW).content except Exception: # pylint: disable=broad-except html = render_to_string('courseware/error-message.html', None) log.exception( u"Error rendering course={course}, section_key={section_key}".format( course=course, section_key=section_key )) return html # TODO: Fix this such that these are pulled in as extra course-specific tabs. # arjun will address this by the end of October if no one does so prior to # then. def get_course_syllabus_section(course, section_key): """ This returns the snippet of html to be rendered on the syllabus page, given the key for the section. Valid keys: - syllabus - guest_syllabus """ # Many of these are stored as html files instead of some semantic # markup. This can change without effecting this interface when we find a # good format for defining so many snippets of text/html. if section_key in ['syllabus', 'guest_syllabus']: try: filesys = course.system.resources_fs # first look for a run-specific version dirs = [path("syllabus") / course.url_name, path("syllabus")] filepath = find_file(filesys, dirs, section_key + ".html") with filesys.open(filepath) as html_file: return replace_static_urls( html_file.read().decode('utf-8'), getattr(course, 'data_dir', None), course_id=course.id, static_asset_path=course.static_asset_path, ) except ResourceNotFoundError: log.exception( u"Missing syllabus section {key} in course {url}".format(key=section_key, url=course.location.to_deprecated_string()) ) return "! Syllabus missing !" raise KeyError("Invalid about key " + str(section_key)) def get_courses_by_university(user, domain=None): ''' Returns dict of lists of courses available, keyed by course.org (ie university). Courses are sorted by course.number. ''' # TODO: Clean up how 'error' is done. # filter out any courses that errored. visible_courses = get_courses(user, domain) universities = defaultdict(list) for course in visible_courses: universities[course.org].append(course) return universities def get_courses(user, domain=None): ''' Returns a list of courses available, sorted by course.number ''' courses = branding.get_visible_courses() courses = [c for c in courses if has_access(user, 'see_exists', c)] courses = sorted(courses, key=lambda course: course.number) return courses def sort_by_announcement(courses): """ Sorts a list of courses by their announcement date. If the date is not available, sort them by their start date. """ # Sort courses by how far are they from they start day key = lambda course: course.sorting_score courses = sorted(courses, key=key) return courses def get_cms_course_link(course, page='course'): """ Returns a link to course_index for editing the course in cms, assuming that the course is actually cms-backed. """ # This is fragile, but unfortunately the problem is that within the LMS we # can't use the reverse calls from the CMS return u"//{}/{}/{}".format(settings.CMS_BASE, page, unicode(course.id)) def get_cms_block_link(block, page): """ Returns a link to block_index for editing the course in cms, assuming that the block is actually cms-backed. """ # This is fragile, but unfortunately the problem is that within the LMS we # can't use the reverse calls from the CMS return u"//{}/{}/{}".format(settings.CMS_BASE, page, block.location) def get_studio_url(course_key, page): """ Get the Studio URL of the page that is passed in. """ assert(isinstance(course_key, CourseKey)) course = get_course_by_id(course_key) is_studio_course = course.course_edit_method == "Studio" is_mongo_course = modulestore().get_modulestore_type(course_key) == ModuleStoreEnum.Type.mongo studio_link = None if is_studio_course and is_mongo_course: studio_link = get_cms_course_link(course, page) return studio_link
agpl-3.0
yasharmaster/scancode-toolkit
tests/cluecode/test_copyrights_ics.py
3
832247
# -*- coding: utf-8 -*- # # Copyright (c) 2017 nexB Inc. and others. All rights reserved. # http://nexb.com and https://github.com/nexB/scancode-toolkit/ # The ScanCode software is licensed under the Apache License version 2.0. # Data generated with ScanCode require an acknowledgment. # ScanCode is a trademark of nexB Inc. # # You may not use this software except in compliance with the License. # You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # # When you publish or redistribute any data created with ScanCode or any ScanCode # derivative work, you must accompany this data with the following acknowledgment: # # Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES # OR CONDITIONS OF ANY KIND, either express or implied. No content created from # ScanCode should be considered or used as legal advice. Consult an Attorney # for any legal advice. # ScanCode is a free software code scanning tool from nexB Inc. and others. # Visit https://github.com/nexB/scancode-toolkit/ for support and download. from __future__ import absolute_import, print_function import os.path from unittest.case import expectedFailure from commoncode.testcase import FileBasedTesting from cluecode_assert_utils import check_detection """ This test suite is based a rather large subset of Android ICS, providing a rather diversified sample of a typical Linux-based user space environment. """ class TestCopyright(FileBasedTesting): test_data_dir = os.path.join(os.path.dirname(__file__), 'data') def test_ics_android_mock_android_mk(self): test_file = self.get_test_loc('ics/android-mock/Android.mk') expected = [ u'Copyright (c) 2010 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_android_mock_notice(self): test_file = self.get_test_loc('ics/android-mock/NOTICE') expected = [ u'Copyright (c) 2005-2008, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_android_mock_regenerate_from_source_sh(self): test_file = self.get_test_loc('ics/android-mock/regenerate_from_source.sh') expected = [ u'Copyright (c) 2011 The Android Open Source Project.', ] check_detection(expected, test_file) def test_ics_android_mock_livetests_com_google_android_testing_mocking_test_androidmanifest_xml(self): test_file = self.get_test_loc('ics/android-mock-livetests-com-google-android-testing-mocking-test/AndroidManifest.xml') expected = [ u'Copyright 2010 Google Inc.', ] check_detection(expected, test_file) def test_ics_android_mock_src_com_google_android_testing_mocking_androidmock_java(self): test_file = self.get_test_loc('ics/android-mock-src-com-google-android-testing-mocking/AndroidMock.java') expected = [ u'Copyright 2010 Google Inc.', ] check_detection(expected, test_file) def test_ics_android_mock_src_com_google_android_testing_mocking_generatedmockjar_readme(self): test_file = self.get_test_loc('ics/android-mock-src-com-google-android-testing-mocking/GeneratedMockJar.readme') expected = [ u'Copyright 2010 Google Inc.', ] check_detection(expected, test_file) def test_ics_antlr_android_mk(self): test_file = self.get_test_loc('ics/antlr/Android.mk') expected = [ u'Copyright (c) 2011 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_antlr_src_org_antlr_runtime_antlrfilestream_java(self): test_file = self.get_test_loc('ics/antlr-src-org-antlr-runtime/ANTLRFileStream.java') expected = [ u'Copyright (c) 2005-2009 Terence Parr', ] check_detection(expected, test_file) def test_ics_apache_harmony_notice(self): test_file = self.get_test_loc('ics/apache-harmony/NOTICE') expected = [ u'Copyright 2001-2004 The Apache Software Foundation.', u'Copyright 2001-2006 The Apache Software Foundation.', u'Copyright 2003-2004 The Apache Software Foundation.', u'Copyright 2004 The Apache Software Foundation.', ] check_detection(expected, test_file) def test_ics_apache_http_cleanspec_mk(self): test_file = self.get_test_loc('ics/apache-http/CleanSpec.mk') expected = [ u'Copyright (c) 2007 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_apache_http_thirdpartyproject_prop(self): test_file = self.get_test_loc('ics/apache-http/ThirdPartyProject.prop') expected = [ u'Copyright 2010 Google Inc.', ] check_detection(expected, test_file) def test_ics_apache_http_src_org_apache_commons_codec_binarydecoder_java(self): test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-codec/BinaryDecoder.java') expected = [ u'Copyright 2001-2004 The Apache Software Foundation.', ] check_detection(expected, test_file) def test_ics_apache_http_src_org_apache_commons_codec_overview_html(self): test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-codec/overview.html') expected = [ u'Copyright 2003-2004 The Apache Software Foundation.', ] check_detection(expected, test_file) def test_ics_apache_http_src_org_apache_commons_logging_logfactory_java(self): test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-logging/LogFactory.java') expected = [ u'Copyright 2001-2006 The Apache Software Foundation.', ] check_detection(expected, test_file) def test_ics_apache_http_src_org_apache_commons_logging_package_html(self): test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-logging/package.html') expected = [ u'Copyright 2001-2004 The Apache Software Foundation.', ] check_detection(expected, test_file) def test_ics_apache_http_src_org_apache_commons_logging_impl_weakhashtable_java(self): test_file = self.get_test_loc('ics/apache-http-src-org-apache-commons-logging-impl/WeakHashtable.java') expected = [ u'Copyright 2004 The Apache Software Foundation.', ] check_detection(expected, test_file) def test_ics_apache_xml_notice(self): test_file = self.get_test_loc('ics/apache-xml/NOTICE') expected = [ u'Copyright 1999-2006 The Apache Software Foundation', u'Copyright 1999-2006 The Apache Software Foundation', u'copyright (c) 1999-2002, Lotus Development', u'copyright (c) 2001-2002, Sun Microsystems.', u'copyright (c) 2003, IBM Corporation., http://www.ibm.com.', u'Copyright 1999-2006 The Apache Software Foundation', u'copyright (c) 1999, IBM Corporation., http://www.ibm.com.', u'copyright (c) 1999, Sun Microsystems.', u'iClick, Inc., software copyright (c) 1999.', u'Copyright 2001-2003,2006 The Apache Software Foundation.', u'copyright (c) 1999, IBM Corporation., http://www.ibm.com.', u'copyright (c) 1999, Sun Microsystems.', u'copyright (c) 2000 World Wide Web Consortium', ] check_detection(expected, test_file) @expectedFailure def test_ics_apache_xml_notice_trail_corp_trail_url(self): test_file = self.get_test_loc('ics/apache-xml/NOTICE') expected = [ u'Copyright 1999-2006 The Apache Software Foundation', u'Copyright 1999-2006 The Apache Software Foundation', u'copyright (c) 1999-2002, Lotus Development Corporation.', u'copyright (c) 2001-2002, Sun Microsystems.', u'copyright (c) 2003, IBM Corporation., http://www.ibm.com.', u'Copyright 1999-2006 The Apache Software Foundation', u'copyright (c) 1999, IBM Corporation., http://www.ibm.com.', u'copyright (c) 1999, Sun Microsystems.', u'iClick, Inc., software copyright (c) 1999.', u'Copyright 2001-2003,2006 The Apache Software Foundation.', u'copyright (c) 1999, IBM Corporation., http://www.ibm.com.', u'copyright (c) 1999, Sun Microsystems.', u'copyright (c) 2000 World Wide Web Consortium, http://www.w3.org', ] check_detection(expected, test_file) def test_ics_apache_xml_src_main_java_org_apache_xpath_domapi_xpathstylesheetdom3exception_java(self): test_file = self.get_test_loc('ics/apache-xml-src-main-java-org-apache-xpath-domapi/XPathStylesheetDOM3Exception.java') expected = [ u'Copyright (c) 2002 World Wide Web Consortium, Massachusetts Institute of Technology', ] check_detection(expected, test_file) @expectedFailure def test_ics_apache_xml_src_main_java_org_apache_xpath_domapi_xpathstylesheetdom3exception_java_trail_name(self): test_file = self.get_test_loc('ics/apache-xml-src-main-java-org-apache-xpath-domapi/XPathStylesheetDOM3Exception.java') expected = [ u'Copyright (c) 2002 World Wide Web Consortium, (Massachusetts Institute of Technology, Institut National de Recherche en Informatique et en Automatique, Keio University).', ] check_detection(expected, test_file) def test_ics_astl_android_mk(self): test_file = self.get_test_loc('ics/astl/Android.mk') expected = [ u'Copyright (c) 2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_astl_notice(self): test_file = self.get_test_loc('ics/astl/NOTICE') expected = [ u'Copyright (c) 2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_astl_include_algorithm(self): test_file = self.get_test_loc('ics/astl-include/algorithm') expected = [ u'Copyright (c) 2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_astl_include_basic_ios_h(self): test_file = self.get_test_loc('ics/astl-include/basic_ios.h') expected = [ u'Copyright (c) 2010 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_astl_include_streambuf(self): test_file = self.get_test_loc('ics/astl-include/streambuf') expected = [ u'Copyright (c) 2010 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_astl_include_string(self): test_file = self.get_test_loc('ics/astl-include/string') expected = [ u'Copyright (c) 2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_astl_src_ostream_cpp(self): test_file = self.get_test_loc('ics/astl-src/ostream.cpp') expected = [ u'Copyright (c) 2010 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_astl_tests_test_vector_cpp(self): test_file = self.get_test_loc('ics/astl-tests/test_vector.cpp') expected = [ u'Copyright (c) 2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_bison_aclocal_m4(self): test_file = self.get_test_loc('ics/bison/aclocal.m4') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1999, 2000, 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_android_mk(self): test_file = self.get_test_loc('ics/bison/Android.mk') expected = [ u'Copyright 2006 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_bison_changelog(self): test_file = self.get_test_loc('ics/bison/ChangeLog') expected = [ u'Copyright (c) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_config_log(self): test_file = self.get_test_loc('ics/bison/config.log') expected = [ u'Copyright (c) 2006 Free Software Foundation, Inc.', u'Copyright (c) 2006 Free Software Foundation, Inc.', u'Copyright (c) 2006 Free Software Foundation, Inc.', u'Copyright (c) 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_config_status(self): test_file = self.get_test_loc('ics/bison/config.status') expected = [ u'Copyright (c) 2003 Free Software Foundation, Inc.', u'Copyright (c) 2000, 2001, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_configure(self): test_file = self.get_test_loc('ics/bison/configure') expected = [ u'Copyright (c) 2003 Free Software Foundation, Inc.', u'Copyright (c) 2003 Free Software Foundation, Inc.', u'Copyright (c) 2003 Free Software Foundation, Inc.', u'Copyright (c) 2000, 2001, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_configure_ac(self): test_file = self.get_test_loc('ics/bison/configure.ac') expected = [ u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_copying(self): test_file = self.get_test_loc('ics/bison/COPYING') expected = [ u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_bison_gnumakefile(self): test_file = self.get_test_loc('ics/bison/GNUmakefile') expected = [ u'Copyright (c) 2001, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_install(self): test_file = self.get_test_loc('ics/bison/INSTALL') expected = [ u'Copyright (c) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_makefile(self): test_file = self.get_test_loc('ics/bison/Makefile') expected = [ u'Copyright (c) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_makefile_am(self): test_file = self.get_test_loc('ics/bison/Makefile.am') expected = [ u'Copyright (c) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_makefile_cfg(self): test_file = self.get_test_loc('ics/bison/Makefile.cfg') expected = [ u'Copyright (c) 2003, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_makefile_maint(self): test_file = self.get_test_loc('ics/bison/Makefile.maint') expected = [ u'Copyright (c) 2001-2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_news(self): test_file = self.get_test_loc('ics/bison/NEWS') expected = [ u'Copyright (c) 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_notice(self): test_file = self.get_test_loc('ics/bison/NOTICE') expected = [ u'Copyright (c) 1992-2006 Free Software Foundation, Inc.', u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_bison_packaging(self): test_file = self.get_test_loc('ics/bison/PACKAGING') expected = [ u'Copyright (c) 2002, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_readme(self): test_file = self.get_test_loc('ics/bison/README') expected = [ u'Copyright (c) 1992, 1998, 1999, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_todo(self): test_file = self.get_test_loc('ics/bison/TODO') expected = [ u'Copyright (c) 2001, 2002, 2003, 2004, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_build_aux_config_guess(self): test_file = self.get_test_loc('ics/bison-build-aux/config.guess') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_build_aux_config_rpath(self): test_file = self.get_test_loc('ics/bison-build-aux/config.rpath') expected = [ u'Copyright 1996-2006 Free Software Foundation, Inc.', u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996', ] check_detection(expected, test_file) def test_ics_bison_build_aux_depcomp(self): test_file = self.get_test_loc('ics/bison-build-aux/depcomp') expected = [ u'Copyright (c) 1999, 2000, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_build_aux_install_sh(self): test_file = self.get_test_loc('ics/bison-build-aux/install-sh') expected = [ u'Copyright (c) 1994 X Consortium', ] check_detection(expected, test_file) def test_ics_bison_build_aux_mdate_sh(self): test_file = self.get_test_loc('ics/bison-build-aux/mdate-sh') expected = [ u'Copyright (c) 1995, 1996, 1997, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_build_aux_missing(self): test_file = self.get_test_loc('ics/bison-build-aux/missing') expected = [ u'Copyright (c) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_build_aux_texinfo_tex(self): test_file = self.get_test_loc('ics/bison-build-aux/texinfo.tex') expected = [ u'Copyright (c) 1985, 1986, 1988, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_build_aux_ylwrap(self): test_file = self.get_test_loc('ics/bison-build-aux/ylwrap') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_data_c_m4(self): test_file = self.get_test_loc('ics/bison-data/c.m4') expected = [ u'Copyright (c) 2002, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) $2 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_data_c_m4_2(self): test_file = self.get_test_loc('ics/bison-data/c++.m4') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_data_makefile_am(self): test_file = self.get_test_loc('ics/bison-data/Makefile.am') expected = [ u'Copyright (c) 2002, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_data_readme(self): test_file = self.get_test_loc('ics/bison-data/README') expected = [ u'Copyright (c) 2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_data_m4sugar_m4sugar_m4(self): test_file = self.get_test_loc('ics/bison-data-m4sugar/m4sugar.m4') expected = [ u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_djgpp_config_bat(self): test_file = self.get_test_loc('ics/bison-djgpp/config.bat') expected = [ u'Copyright (c) 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_djgpp_config_sed(self): test_file = self.get_test_loc('ics/bison-djgpp/config.sed') expected = [ u'Copyright (c) 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_djgpp_makefile_maint(self): test_file = self.get_test_loc('ics/bison-djgpp/Makefile.maint') expected = [ u'Copyright (c) 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_djgpp_readme_in(self): test_file = self.get_test_loc('ics/bison-djgpp/README.in') expected = [ u'Copyright (c) 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_djgpp_subpipe_h(self): test_file = self.get_test_loc('ics/bison-djgpp/subpipe.h') expected = [ u'Copyright (c) 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_doc_bison_texinfo(self): test_file = self.get_test_loc('ics/bison-doc/bison.texinfo') expected = [ u'Copyright 1988, 1989, 1990, 1991, 1992, 1993, 1995, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_doc_fdl_texi(self): test_file = self.get_test_loc('ics/bison-doc/fdl.texi') expected = [ u'Copyright 2000,2001,2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_doc_gpl_texi(self): test_file = self.get_test_loc('ics/bison-doc/gpl.texi') expected = [ u'Copyright 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_bison_doc_makefile_am(self): test_file = self.get_test_loc('ics/bison-doc/Makefile.am') expected = [ u'Copyright (c) 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_doc_refcard_tex(self): test_file = self.get_test_loc('ics/bison-doc/refcard.tex') expected = [ u'Copyright (c) 1998, 2001 Free Software Foundation, Inc.', u'Copyright \\copyright\\ \\year\\ Free Software Foundation, Inc.', u'Copyright \\copyright\\ \\year\\ Free Software Foundation, Inc.', ] check_detection(expected, test_file) @expectedFailure def test_ics_bison_doc_refcard_tex_extra_copyright_extra_copyright(self): test_file = self.get_test_loc('ics/bison-doc/refcard.tex') expected = [ u'Copyright (c) 1998, 2001 Free Software Foundation, Inc.', u'copyright\\ \\year\\ Free Software Foundation, Inc.', u'copyright\\ \\year\\ Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_examples_extexi(self): test_file = self.get_test_loc('ics/bison-examples/extexi') expected = [ u'Copyright 1992, 2000, 2001, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_examples_makefile_am(self): test_file = self.get_test_loc('ics/bison-examples/Makefile.am') expected = [ u'Copyright (c) 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_abitset_c(self): test_file = self.get_test_loc('ics/bison-lib/abitset.c') expected = [ u'Copyright (c) 2002, 2003, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_abitset_h(self): test_file = self.get_test_loc('ics/bison-lib/abitset.h') expected = [ u'Copyright (c) 2002, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_argmatch_c(self): test_file = self.get_test_loc('ics/bison-lib/argmatch.c') expected = [ u'Copyright (c) 1990, 1998, 1999, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_argmatch_h(self): test_file = self.get_test_loc('ics/bison-lib/argmatch.h') expected = [ u'Copyright (c) 1990, 1998, 1999, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_basename_c(self): test_file = self.get_test_loc('ics/bison-lib/basename.c') expected = [ u'Copyright (c) 1990, 1998, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_bbitset_h(self): test_file = self.get_test_loc('ics/bison-lib/bbitset.h') expected = [ u'Copyright (c) 2002, 2003, 2004, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_bitset_c(self): test_file = self.get_test_loc('ics/bison-lib/bitset.c') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_bitset_h(self): test_file = self.get_test_loc('ics/bison-lib/bitset.h') expected = [ u'Copyright (c) 2002, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_bitsetv_c(self): test_file = self.get_test_loc('ics/bison-lib/bitsetv.c') expected = [ u'Copyright (c) 2001, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_bitsetv_print_c(self): test_file = self.get_test_loc('ics/bison-lib/bitsetv-print.c') expected = [ u'Copyright (c) 2001, 2002, 2004, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_dirname_c(self): test_file = self.get_test_loc('ics/bison-lib/dirname.c') expected = [ u'Copyright (c) 1990, 1998, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_dirname_h(self): test_file = self.get_test_loc('ics/bison-lib/dirname.h') expected = [ u'Copyright (c) 1998, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_dup_safer_c(self): test_file = self.get_test_loc('ics/bison-lib/dup-safer.c') expected = [ u'Copyright (c) 2001, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_error_c(self): test_file = self.get_test_loc('ics/bison-lib/error.c') expected = [ u'Copyright (c) 1990-1998, 2000-2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_error_h(self): test_file = self.get_test_loc('ics/bison-lib/error.h') expected = [ u'Copyright (c) 1995, 1996, 1997, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_exit_h(self): test_file = self.get_test_loc('ics/bison-lib/exit.h') expected = [ u'Copyright (c) 1995, 2001 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_exitfail_c(self): test_file = self.get_test_loc('ics/bison-lib/exitfail.c') expected = [ u'Copyright (c) 2002, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_get_errno_c(self): test_file = self.get_test_loc('ics/bison-lib/get-errno.c') expected = [ u'Copyright (c) 2002, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_getopt_c(self): test_file = self.get_test_loc('ics/bison-lib/getopt.c') expected = [ u'Copyright (c) 1987,88,89,90,91,92,93,94,95,96,98,99,2000,2001,2002,2003,2004,2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_getopt_h(self): test_file = self.get_test_loc('ics/bison-lib/getopt_.h') expected = [ u'Copyright (c) 1989-1994,1996-1999,2001,2003,2004,2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_getopt_int_h(self): test_file = self.get_test_loc('ics/bison-lib/getopt_int.h') expected = [ u'Copyright (c) 1989-1994,1996-1999,2001,2003,2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_getopt1_c(self): test_file = self.get_test_loc('ics/bison-lib/getopt1.c') expected = [ u'Copyright (c) 1987,88,89,90,91,92,93,94,96,97,98,2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_gettext_h(self): test_file = self.get_test_loc('ics/bison-lib/gettext.h') expected = [ u'Copyright (c) 1995-1998, 2000-2002, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_hard_locale_c(self): test_file = self.get_test_loc('ics/bison-lib/hard-locale.c') expected = [ u'Copyright (c) 1997, 1998, 1999, 2002, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_hard_locale_h(self): test_file = self.get_test_loc('ics/bison-lib/hard-locale.h') expected = [ u'Copyright (c) 1999, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_hash_c(self): test_file = self.get_test_loc('ics/bison-lib/hash.c') expected = [ u'Copyright (c) 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_hash_h(self): test_file = self.get_test_loc('ics/bison-lib/hash.h') expected = [ u'Copyright (c) 1998, 1999, 2001, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_makefile_am(self): test_file = self.get_test_loc('ics/bison-lib/Makefile.am') expected = [ u'Copyright (c) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_malloc_c(self): test_file = self.get_test_loc('ics/bison-lib/malloc.c') expected = [ u'Copyright (c) 1997, 1998 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_mbswidth_c(self): test_file = self.get_test_loc('ics/bison-lib/mbswidth.c') expected = [ u'Copyright (c) 2000-2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_mbswidth_h(self): test_file = self.get_test_loc('ics/bison-lib/mbswidth.h') expected = [ u'Copyright (c) 2000-2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_obstack_c(self): test_file = self.get_test_loc('ics/bison-lib/obstack.c') expected = [ u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_obstack_h(self): test_file = self.get_test_loc('ics/bison-lib/obstack.h') expected = [ u'Copyright (c) 1988-1994,1996-1999,2003,2004,2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_quote_c(self): test_file = self.get_test_loc('ics/bison-lib/quote.c') expected = [ u'Copyright (c) 1998, 1999, 2000, 2001, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_quote_h(self): test_file = self.get_test_loc('ics/bison-lib/quote.h') expected = [ u'Copyright (c) 1998, 1999, 2000, 2001, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_quotearg_c(self): test_file = self.get_test_loc('ics/bison-lib/quotearg.c') expected = [ u'Copyright (c) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_quotearg_h(self): test_file = self.get_test_loc('ics/bison-lib/quotearg.h') expected = [ u'Copyright (c) 1998, 1999, 2000, 2001, 2002, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_stdbool_h(self): test_file = self.get_test_loc('ics/bison-lib/stdbool_.h') expected = [ u'Copyright (c) 2001, 2002, 2003, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_stdio_safer_h(self): test_file = self.get_test_loc('ics/bison-lib/stdio-safer.h') expected = [ u'Copyright (c) 2001, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_stpcpy_c(self): test_file = self.get_test_loc('ics/bison-lib/stpcpy.c') expected = [ u'Copyright (c) 1992, 1995, 1997, 1998 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_stpcpy_h(self): test_file = self.get_test_loc('ics/bison-lib/stpcpy.h') expected = [ u'Copyright (c) 1995, 2001, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_strdup_c(self): test_file = self.get_test_loc('ics/bison-lib/strdup.c') expected = [ u'Copyright (c) 1991, 1996, 1997, 1998, 2002, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_strdup_h(self): test_file = self.get_test_loc('ics/bison-lib/strdup.h') expected = [ u'Copyright (c) 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_strerror_c(self): test_file = self.get_test_loc('ics/bison-lib/strerror.c') expected = [ u'Copyright (c) 1986, 1988, 1989, 1991, 2002, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_stripslash_c(self): test_file = self.get_test_loc('ics/bison-lib/stripslash.c') expected = [ u'Copyright (c) 1990, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_strndup_c(self): test_file = self.get_test_loc('ics/bison-lib/strndup.c') expected = [ u'Copyright (c) 1996, 1997, 1998, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_strndup_h(self): test_file = self.get_test_loc('ics/bison-lib/strndup.h') expected = [ u'Copyright (c) 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_strtol_c(self): test_file = self.get_test_loc('ics/bison-lib/strtol.c') expected = [ u'Copyright (c) 1991, 1992, 1994, 1995, 1996, 1997, 1998, 1999, 2003, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_strtoul_c(self): test_file = self.get_test_loc('ics/bison-lib/strtoul.c') expected = [ u'Copyright (c) 1991, 1997 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_strverscmp_c(self): test_file = self.get_test_loc('ics/bison-lib/strverscmp.c') expected = [ u'Copyright (c) 1997, 2000, 2002, 2004 Free Software Foundation, Inc.', u'Jean-Francois Bignolles <bignolle@ecoledoc.ibp.fr>, 1997.' ] check_detection(expected, test_file) def test_ics_bison_lib_strverscmp_h(self): test_file = self.get_test_loc('ics/bison-lib/strverscmp.h') expected = [ u'Copyright (c) 1997, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_subpipe_c(self): test_file = self.get_test_loc('ics/bison-lib/subpipe.c') expected = [ u'Copyright (c) 2002, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_subpipe_h(self): test_file = self.get_test_loc('ics/bison-lib/subpipe.h') expected = [ u'Copyright (c) 2002, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_timevar_c(self): test_file = self.get_test_loc('ics/bison-lib/timevar.c') expected = [ u'Copyright (c) 2000, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_timevar_h(self): test_file = self.get_test_loc('ics/bison-lib/timevar.h') expected = [ u'Copyright (c) 2000, 2002, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_unistd_safer_h(self): test_file = self.get_test_loc('ics/bison-lib/unistd-safer.h') expected = [ u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_unlocked_io_h(self): test_file = self.get_test_loc('ics/bison-lib/unlocked-io.h') expected = [ u'Copyright (c) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_xalloc_h(self): test_file = self.get_test_loc('ics/bison-lib/xalloc.h') expected = [ u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_lib_xalloc_die_c(self): test_file = self.get_test_loc('ics/bison-lib/xalloc-die.c') expected = [ u'Copyright (c) 1997, 1998, 1999, 2000, 2002, 2003, 2004, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_bison_i18n_m4(self): test_file = self.get_test_loc('ics/bison-m4/bison-i18n.m4') expected = [ u'Copyright (c) 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_c_working_m4(self): test_file = self.get_test_loc('ics/bison-m4/c-working.m4') expected = [ u'Copyright (c) 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_cxx_m4(self): test_file = self.get_test_loc('ics/bison-m4/cxx.m4') expected = [ u'Copyright (c) 2004, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_dirname_m4(self): test_file = self.get_test_loc('ics/bison-m4/dirname.m4') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_dos_m4(self): test_file = self.get_test_loc('ics/bison-m4/dos.m4') expected = [ u'Copyright (c) 2000, 2001, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_error_m4(self): test_file = self.get_test_loc('ics/bison-m4/error.m4') expected = [ u'Copyright (c) 1996, 1997, 1998, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_exitfail_m4(self): test_file = self.get_test_loc('ics/bison-m4/exitfail.m4') expected = [ u'Copyright (c) 2002, 2003, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_extensions_m4(self): test_file = self.get_test_loc('ics/bison-m4/extensions.m4') expected = [ u'Copyright (c) 2003, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_gettext_gl_m4(self): test_file = self.get_test_loc('ics/bison-m4/gettext_gl.m4') expected = [ u'Copyright (c) 1995-2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_iconv_m4(self): test_file = self.get_test_loc('ics/bison-m4/iconv.m4') expected = [ u'Copyright (c) 2000-2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_inttypes_h_gl_m4(self): test_file = self.get_test_loc('ics/bison-m4/inttypes_h_gl.m4') expected = [ u'Copyright (c) 1997-2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_lib_ld_gl_m4(self): test_file = self.get_test_loc('ics/bison-m4/lib-ld_gl.m4') expected = [ u'Copyright (c) 1996-2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_lib_link_m4(self): test_file = self.get_test_loc('ics/bison-m4/lib-link.m4') expected = [ u'Copyright (c) 2001-2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_m4_m4(self): test_file = self.get_test_loc('ics/bison-m4/m4.m4') expected = [ u'Copyright 2000 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_mbrtowc_m4(self): test_file = self.get_test_loc('ics/bison-m4/mbrtowc.m4') expected = [ u'Copyright (c) 2001-2002, 2004-2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_mbstate_t_m4(self): test_file = self.get_test_loc('ics/bison-m4/mbstate_t.m4') expected = [ u'Copyright (c) 2000, 2001, 2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_mbswidth_m4(self): test_file = self.get_test_loc('ics/bison-m4/mbswidth.m4') expected = [ u'Copyright (c) 2000-2002, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_nls_m4(self): test_file = self.get_test_loc('ics/bison-m4/nls.m4') expected = [ u'Copyright (c) 1995-2003, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_obstack_m4(self): test_file = self.get_test_loc('ics/bison-m4/obstack.m4') expected = [ u'Copyright (c) 2002, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_onceonly_m4(self): test_file = self.get_test_loc('ics/bison-m4/onceonly.m4') expected = [ u'Copyright (c) 2002-2003, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_progtest_m4(self): test_file = self.get_test_loc('ics/bison-m4/progtest.m4') expected = [ u'Copyright (c) 1996-2003, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_quotearg_m4(self): test_file = self.get_test_loc('ics/bison-m4/quotearg.m4') expected = [ u'Copyright (c) 2002, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_stdbool_m4(self): test_file = self.get_test_loc('ics/bison-m4/stdbool.m4') expected = [ u'Copyright (c) 2002-2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_stdio_safer_m4(self): test_file = self.get_test_loc('ics/bison-m4/stdio-safer.m4') expected = [ u'Copyright (c) 2002, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_stpcpy_m4(self): test_file = self.get_test_loc('ics/bison-m4/stpcpy.m4') expected = [ u'Copyright (c) 2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_strndup_m4(self): test_file = self.get_test_loc('ics/bison-m4/strndup.m4') expected = [ u'Copyright (c) 2002-2003, 2005-2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_strtol_m4(self): test_file = self.get_test_loc('ics/bison-m4/strtol.m4') expected = [ u'Copyright (c) 2002, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_ulonglong_gl_m4(self): test_file = self.get_test_loc('ics/bison-m4/ulonglong_gl.m4') expected = [ u'Copyright (c) 1999-2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_unlocked_io_m4(self): test_file = self.get_test_loc('ics/bison-m4/unlocked-io.m4') expected = [ u'Copyright (c) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_warning_m4(self): test_file = self.get_test_loc('ics/bison-m4/warning.m4') expected = [ u'Copyright (c) 2001, 2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_m4_xstrndup_m4(self): test_file = self.get_test_loc('ics/bison-m4/xstrndup.m4') expected = [ u'Copyright (c) 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_assoc_c(self): test_file = self.get_test_loc('ics/bison-src/assoc.c') expected = [ u'Copyright (c) 2002, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_closure_c(self): test_file = self.get_test_loc('ics/bison-src/closure.c') expected = [ u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_closure_h(self): test_file = self.get_test_loc('ics/bison-src/closure.h') expected = [ u'Copyright (c) 1984, 1989, 2000, 2001, 2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_complain_c(self): test_file = self.get_test_loc('ics/bison-src/complain.c') expected = [ u'Copyright (c) 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_complain_h(self): test_file = self.get_test_loc('ics/bison-src/complain.h') expected = [ u'Copyright (c) 2000, 2001, 2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_conflicts_c(self): test_file = self.get_test_loc('ics/bison-src/conflicts.c') expected = [ u'Copyright (c) 1984, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_conflicts_h(self): test_file = self.get_test_loc('ics/bison-src/conflicts.h') expected = [ u'Copyright (c) 2000, 2001, 2002, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_derives_c(self): test_file = self.get_test_loc('ics/bison-src/derives.c') expected = [ u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_files_c(self): test_file = self.get_test_loc('ics/bison-src/files.c') expected = [ u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_getargs_c(self): test_file = self.get_test_loc('ics/bison-src/getargs.c') expected = [ u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'(c) d Free Software Foundation, Inc.', ] check_detection(expected, test_file) @expectedFailure def test_ics_bison_src_getargs_c_lead_copy(self): test_file = self.get_test_loc('ics/bison-src/getargs.c') expected = [ u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) d Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_getargs_h(self): test_file = self.get_test_loc('ics/bison-src/getargs.h') expected = [ u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_gram_c(self): test_file = self.get_test_loc('ics/bison-src/gram.c') expected = [ u'Copyright (c) 1984, 1986, 1989, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_gram_h(self): test_file = self.get_test_loc('ics/bison-src/gram.h') expected = [ u'Copyright (c) 1984, 1986, 1989, 1992, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_lalr_c(self): test_file = self.get_test_loc('ics/bison-src/lalr.c') expected = [ u'Copyright (c) 1984, 1986, 1989, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_lalr_h(self): test_file = self.get_test_loc('ics/bison-src/lalr.h') expected = [ u'Copyright (c) 1984, 1986, 1989, 2000, 2002, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_lr0_c(self): test_file = self.get_test_loc('ics/bison-src/LR0.c') expected = [ u'Copyright (c) 1984, 1986, 1989, 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_lr0_h(self): test_file = self.get_test_loc('ics/bison-src/LR0.h') expected = [ u'Copyright 1984, 1986, 1989, 2000, 2001, 2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_main_c(self): test_file = self.get_test_loc('ics/bison-src/main.c') expected = [ u'Copyright (c) 1984, 1986, 1989, 1992, 1995, 2000, 2001, 2002, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_muscle_tab_c(self): test_file = self.get_test_loc('ics/bison-src/muscle_tab.c') expected = [ u'Copyright (c) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_muscle_tab_h(self): test_file = self.get_test_loc('ics/bison-src/muscle_tab.h') expected = [ u'Copyright (c) 2001, 2002, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_nullable_c(self): test_file = self.get_test_loc('ics/bison-src/nullable.c') expected = [ u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_nullable_h(self): test_file = self.get_test_loc('ics/bison-src/nullable.h') expected = [ u'Copyright (c) 2000, 2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_output_c(self): test_file = self.get_test_loc('ics/bison-src/output.c') expected = [ u'Copyright (c) 1984, 1986, 1989, 1992, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_output_h(self): test_file = self.get_test_loc('ics/bison-src/output.h') expected = [ u'Copyright (c) 2000, 2001, 2002, 2003, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_parse_gram_c(self): test_file = self.get_test_loc('ics/bison-src/parse-gram.c') expected = [ u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_parse_gram_h(self): test_file = self.get_test_loc('ics/bison-src/parse-gram.h') expected = [ u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_print_c(self): test_file = self.get_test_loc('ics/bison-src/print.c') expected = [ u'Copyright (c) 1984, 1986, 1989, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_print_h(self): test_file = self.get_test_loc('ics/bison-src/print.h') expected = [ u'Copyright 2000 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_reader_c(self): test_file = self.get_test_loc('ics/bison-src/reader.c') expected = [ u'Copyright (c) 1984, 1986, 1989, 1992, 1998, 2000, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_reader_h(self): test_file = self.get_test_loc('ics/bison-src/reader.h') expected = [ u'Copyright (c) 2000, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_reduce_c(self): test_file = self.get_test_loc('ics/bison-src/reduce.c') expected = [ u'Copyright (c) 1988, 1989, 2000, 2001, 2002, 2003, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_scan_skel_c(self): test_file = self.get_test_loc('ics/bison-src/scan-skel.c') expected = [ u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_scan_skel_l(self): test_file = self.get_test_loc('ics/bison-src/scan-skel.l') expected = [ u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_state_h(self): test_file = self.get_test_loc('ics/bison-src/state.h') expected = [ u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_symtab_c(self): test_file = self.get_test_loc('ics/bison-src/symtab.c') expected = [ u'Copyright (c) 1984, 1989, 2000, 2001, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_symtab_h(self): test_file = self.get_test_loc('ics/bison-src/symtab.h') expected = [ u'Copyright (c) 1984, 1989, 1992, 2000, 2001, 2002, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_system_h(self): test_file = self.get_test_loc('ics/bison-src/system.h') expected = [ u'Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_uniqstr_c(self): test_file = self.get_test_loc('ics/bison-src/uniqstr.c') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_src_vcg_h(self): test_file = self.get_test_loc('ics/bison-src/vcg.h') expected = [ u'Copyright (c) 2001, 2002, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_actions_at(self): test_file = self.get_test_loc('ics/bison-tests/actions.at') expected = [ u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_atconfig(self): test_file = self.get_test_loc('ics/bison-tests/atconfig') expected = [ u'Copyright (c) 2000, 2001, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_atlocal(self): test_file = self.get_test_loc('ics/bison-tests/atlocal') expected = [ u'Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_c_at(self): test_file = self.get_test_loc('ics/bison-tests/c++.at') expected = [ u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_calc_at(self): test_file = self.get_test_loc('ics/bison-tests/calc.at') expected = [ u'Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_conflicts_at(self): test_file = self.get_test_loc('ics/bison-tests/conflicts.at') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_cxx_type_at(self): test_file = self.get_test_loc('ics/bison-tests/cxx-type.at') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_existing_at(self): test_file = self.get_test_loc('ics/bison-tests/existing.at') expected = [ u'Copyright (c) 1989, 1990, 1991, 1992, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_glr_regression_at(self): test_file = self.get_test_loc('ics/bison-tests/glr-regression.at') expected = [ u'Copyright (c) 2002, 2003, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_headers_at(self): test_file = self.get_test_loc('ics/bison-tests/headers.at') expected = [ u'Copyright (c) 2001, 2002, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_local_at(self): test_file = self.get_test_loc('ics/bison-tests/local.at') expected = [ u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_makefile_am(self): test_file = self.get_test_loc('ics/bison-tests/Makefile.am') expected = [ u'Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_output_at(self): test_file = self.get_test_loc('ics/bison-tests/output.at') expected = [ u'Copyright (c) 2000, 2001, 2002, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_sets_at(self): test_file = self.get_test_loc('ics/bison-tests/sets.at') expected = [ u'Copyright (c) 2001, 2002, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_synclines_at(self): test_file = self.get_test_loc('ics/bison-tests/synclines.at') expected = [ u'Copyright (c) 2002, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_testsuite_at(self): test_file = self.get_test_loc('ics/bison-tests/testsuite.at') expected = [ u'Copyright (c) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bison_tests_torture_at(self): test_file = self.get_test_loc('ics/bison-tests/torture.at') expected = [ u'Copyright (c) 2001, 2002, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_blktrace_blkiomon_c(self): test_file = self.get_test_loc('ics/blktrace/blkiomon.c') expected = [ u'Copyright IBM Corp. 2008', ] check_detection(expected, test_file) def test_ics_blktrace_blkiomon_h(self): test_file = self.get_test_loc('ics/blktrace/blkiomon.h') expected = [ u'Copyright IBM Corp. 2008', ] check_detection(expected, test_file) def test_ics_blktrace_blkparse_c(self): test_file = self.get_test_loc('ics/blktrace/blkparse.c') expected = [ u'Copyright (c) 2005 Jens Axboe <axboe@suse.de>', u'Copyright (c) 2006 Jens Axboe <axboe@kernel.dk>', ] check_detection(expected, test_file) def test_ics_blktrace_blkrawverify_c(self): test_file = self.get_test_loc('ics/blktrace/blkrawverify.c') expected = [ u'Copyright (c) 2006 Alan D. Brunelle <Alan.Brunelle@hp.com>', ] check_detection(expected, test_file) def test_ics_blktrace_btrace(self): test_file = self.get_test_loc('ics/blktrace/btrace') expected = [ u'Copyright (c) 2005 Silicon Graphics, Inc.', ] check_detection(expected, test_file) def test_ics_blktrace_btrace_spec(self): test_file = self.get_test_loc('ics/blktrace/btrace.spec') expected = [ u'Copyright (c) 2005 SUSE LINUX Products GmbH, Nuernberg, Germany.', ] check_detection(expected, test_file) def test_ics_blktrace_jhash_h(self): test_file = self.get_test_loc('ics/blktrace/jhash.h') expected = [ u'Copyright (c) 2006. Bob Jenkins (bob_jenkins@burtleburtle.net)', u'Copyright (c) 2009 Jozsef Kadlecsik (kadlec@blackhole.kfki.hu)', ] check_detection(expected, test_file) def test_ics_blktrace_notice(self): test_file = self.get_test_loc('ics/blktrace/NOTICE') expected = [ u'Copyright (c) 1997, 2002, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2005 Jens Axboe <axboe@suse.de>', u'Copyright (c) 2006 Alan D. Brunelle <Alan.Brunelle@hp.com>', u'Copyright (c) 2006 Jens Axboe <axboe@kernel.dk>', u'Copyright (c) 2006. Bob Jenkins (bob_jenkins@burtleburtle.net)', u'Copyright (c) 2009 Jozsef Kadlecsik (kadlec@blackhole.kfki.hu)', u'Copyright IBM Corp. 2008', u'Copyright (c) 2005 SUSE LINUX Products GmbH, Nuernberg, Germany.', u'Copyright (c) 2005 Silicon Graphics, Inc.', u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_blktrace_rbtree_c(self): test_file = self.get_test_loc('ics/blktrace/rbtree.c') expected = [ u'(c) 1999 Andrea Arcangeli <andrea@suse.de>', u'(c) 2002 David Woodhouse <dwmw2@infradead.org>', ] check_detection(expected, test_file) def test_ics_blktrace_rbtree_h(self): test_file = self.get_test_loc('ics/blktrace/rbtree.h') expected = [ u'(c) 1999 Andrea Arcangeli <andrea@suse.de>', ] check_detection(expected, test_file) def test_ics_blktrace_strverscmp_c(self): test_file = self.get_test_loc('ics/blktrace/strverscmp.c') expected = [ u'Copyright (c) 1997, 2002, 2005 Free Software Foundation, Inc.', u'Jean-Francois Bignolles <bignolle@ecoledoc.ibp.fr>, 1997.' ] check_detection(expected, test_file) def test_ics_blktrace_btreplay_btrecord_c(self): test_file = self.get_test_loc('ics/blktrace-btreplay/btrecord.c') expected = [ u'Copyright (c) 2007 Alan D. Brunelle <Alan.Brunelle@hp.com>', ] check_detection(expected, test_file) def test_ics_blktrace_btreplay_btrecord_h(self): test_file = self.get_test_loc('ics/blktrace-btreplay/btrecord.h') expected = [ u'Copyright (c) 2007 Alan D. Brunelle <Alan.Brunelle@hp.com>', ] check_detection(expected, test_file) def test_ics_blktrace_btreplay_doc_abstract_tex(self): test_file = self.get_test_loc('ics/blktrace-btreplay-doc/abstract.tex') expected = [ u'Copyright (c) 2007 Alan D. Brunelle <Alan.Brunelle@hp.com>', ] check_detection(expected, test_file) def test_ics_blktrace_btt_bno_plot_py(self): test_file = self.get_test_loc('ics/blktrace-btt/bno_plot.py') expected = [ u'(c) Copyright 2008 Hewlett-Packard Development Company', ] check_detection(expected, test_file) def test_ics_blktrace_btt_btt_plot_py(self): test_file = self.get_test_loc('ics/blktrace-btt/btt_plot.py') expected = [ u'(c) Copyright 2009 Hewlett-Packard Development Company', u'(c) Average Latencies', u'(c) Average Latencies', ] check_detection(expected, test_file) def test_ics_blktrace_btt_notice(self): test_file = self.get_test_loc('ics/blktrace-btt/NOTICE') expected = [ u'(c) Copyright 2007 Hewlett-Packard Development Company', u'(c) Copyright 2008 Hewlett-Packard Development Company', u'Copyright (c) 2006 Alan D. Brunelle <Alan.Brunelle@hp.com>', u'Copyright (c) 2007 Alan D. Brunelle <Alan.Brunelle@hp.com>', u'(c) Copyright 2008 Hewlett-Packard Development Company', u'(c) Copyright 2009 Hewlett-Packard Development Company', u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_blktrace_btt_plat_c(self): test_file = self.get_test_loc('ics/blktrace-btt/plat.c') expected = [ u'(c) Copyright 2008 Hewlett-Packard Development Company', ] check_detection(expected, test_file) def test_ics_blktrace_btt_q2d_c(self): test_file = self.get_test_loc('ics/blktrace-btt/q2d.c') expected = [ u'(c) Copyright 2007 Hewlett-Packard Development Company', ] check_detection(expected, test_file) def test_ics_blktrace_doc_blktrace_tex(self): test_file = self.get_test_loc('ics/blktrace-doc/blktrace.tex') expected = [ u'Copyright (c) 2005, 2006 Alan D. Brunelle <Alan.Brunelle@hp.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_android_mk(self): test_file = self.get_test_loc('ics/bluetooth-bluez/Android.mk') expected = [ u'Copyright (c) 2008 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_copying_lib(self): test_file = self.get_test_loc('ics/bluetooth-bluez/COPYING.LIB') expected = [ u'Copyright (c) 1991, 1999 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_notice(self): test_file = self.get_test_loc('ics/bluetooth-bluez/NOTICE') expected = [ u'Copyright (c) 2004-2008 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2006-2007 Nokia Corporation', u'Copyright (c) 2006-2009 Nokia Corporation', u'Copyright (c) 2008 Joao Paulo Rechi Vita', u'Copyright (c) 2008-2009 Leonid Movshovich <event.riga@gmail.org>', u'Copyright (c) 2008-2009 Nokia Corporation', u'Copyright (c) 2009 Lennart Poettering', u'Copyright (c) 2009 Intel Corporation', u'Copyright (c) 2009 Joao Paulo Rechi Vita', u'Copyright (c) 2009-2010 Motorola Inc.', u'Copyright (c) 2004-2005 Henryk Ploetz <henryk@ploetzli.ch>', u'Copyright (c) 2004-2008 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2005-2006 Brad Midgley <bmidgley@xmission.com>', u'Copyright (c) 2005-2008 Brad Midgley <bmidgley@xmission.com>', u'Copyright (c) 2006-2007 Nokia Corporation', u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', u'Copyright (c) 1991, 1999 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_readme(self): test_file = self.get_test_loc('ics/bluetooth-bluez/README') expected = [ u'Copyright (c) 2000-2001 Qualcomm Incorporated', u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_attrib_att_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-attrib/att.c') expected = [ u'Copyright (c) 2010 Nokia Corporation', u'Copyright (c) 2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_attrib_gatttool_h(self): test_file = self.get_test_loc('ics/bluetooth-bluez-attrib/gatttool.h') expected = [ u'Copyright (c) 2011 Nokia Corporation', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_audio_a2dp_codecs_h(self): test_file = self.get_test_loc('ics/bluetooth-bluez-audio/a2dp-codecs.h') expected = [ u'Copyright (c) 2006-2010 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_audio_android_audio_hw_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-audio/android_audio_hw.c') expected = [ u'Copyright (c) 2008-2011 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_audio_ctl_bluetooth_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-audio/ctl_bluetooth.c') expected = [ u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_audio_gateway_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-audio/gateway.c') expected = [ u'Copyright (c) 2006-2010 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2008-2009 Leonid Movshovich <event.riga@gmail.org>', u'Copyright (c) 2010 ProFUSION', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_bluez_audio_gateway_c_trail_name(self): test_file = self.get_test_loc('ics/bluetooth-bluez-audio/gateway.c') expected = [ u'Copyright (c) 2006-2010 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2008-2009 Leonid Movshovich <event.riga@gmail.org>', u'Copyright (c) 2010 ProFUSION embedded systems', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_audio_liba2dp_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-audio/liba2dp.c') expected = [ u'Copyright (c) 2006-2007 Nokia Corporation', u'Copyright (c) 2004-2008 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_audio_media_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-audio/media.c') expected = [ u'Copyright (c) 2006-2007 Nokia Corporation', u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_audio_sink_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-audio/sink.c') expected = [ u'Copyright (c) 2006-2007 Nokia Corporation', u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2009-2010 Motorola Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_audio_source_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-audio/source.c') expected = [ u'Copyright (c) 2006-2010 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2009 Joao Paulo Rechi Vita', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_audio_telephony_maemo5_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-audio/telephony-maemo5.c') expected = [ u'Copyright (c) 2008-2010 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_audio_telephony_ofono_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-audio/telephony-ofono.c') expected = [ u'Copyright (c) 2009-2010 Intel Corporation', u'Copyright (c) 2006-2009 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_btio_btio_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-btio/btio.c') expected = [ u'Copyright (c) 2009-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2009-2010 Nokia Corporation', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_common_android_bluez_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-common/android_bluez.c') expected = [ u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_compat_bnep_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-compat/bnep.c') expected = [ u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_compat_fakehid_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-compat/fakehid.c') expected = [ u'Copyright (c) 2003-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_doc_adapter_api_txt(self): test_file = self.get_test_loc('ics/bluetooth-bluez-doc/adapter-api.txt') expected = [ u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2005-2006 Johan Hedberg <johan.hedberg@nokia.com>', u'Copyright (c) 2005-2006 Claudio Takahasi <claudio.takahasi@indt.org.br>', u'Copyright (c) 2006-2007 Luiz von Dentz', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_doc_agent_api_txt(self): test_file = self.get_test_loc('ics/bluetooth-bluez-doc/agent-api.txt') expected = [ u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2005-2006 Johan Hedberg <johan.hedberg@nokia.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_doc_attribute_api_txt(self): test_file = self.get_test_loc('ics/bluetooth-bluez-doc/attribute-api.txt') expected = [ u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_doc_audio_api_txt(self): test_file = self.get_test_loc('ics/bluetooth-bluez-doc/audio-api.txt') expected = [ u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2005-2007 Johan Hedberg <johan.hedberg@nokia.com>', u'Copyright (c) 2005-2006 Brad Midgley <bmidgley@xmission.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_doc_control_api_txt(self): test_file = self.get_test_loc('ics/bluetooth-bluez-doc/control-api.txt') expected = [ u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2007-2008 David Stockwell <dstockwell@frequency-one.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_doc_mgmt_api_txt(self): test_file = self.get_test_loc('ics/bluetooth-bluez-doc/mgmt-api.txt') expected = [ u'Copyright (c) 2008-2009 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_doc_oob_api_txt(self): test_file = self.get_test_loc('ics/bluetooth-bluez-doc/oob-api.txt') expected = [ u'Copyright (c) 2011 Szymon Janc <szymon.janc@tieto.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_doc_sap_api_txt(self): test_file = self.get_test_loc('ics/bluetooth-bluez-doc/sap-api.txt') expected = [ u'Copyright (c) 2010 ST-Ericsson SA', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_gdbus_gdbus_h(self): test_file = self.get_test_loc('ics/bluetooth-bluez-gdbus/gdbus.h') expected = [ u'Copyright (c) 2004-2011 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_gdbus_notice(self): test_file = self.get_test_loc('ics/bluetooth-bluez-gdbus/NOTICE') expected = [ u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_health_hdp_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-health/hdp.c') expected = [ u'Copyright (c) 2010 GSyC/LibreSoft, Universidad Rey Juan Carlos. Authors Santiago Carot Nemesio', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_bluez_health_hdp_c_extra_author(self): test_file = self.get_test_loc('ics/bluetooth-bluez-health/hdp.c') expected = [ u'Copyright (c) 2010 GSyC/LibreSoft, Universidad Rey Juan Carlos.', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_health_mcap_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-health/mcap.c') expected = [ u'Copyright (c) 2010 GSyC/LibreSoft, Universidad Rey Juan Carlos.', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_health_mcap_h(self): test_file = self.get_test_loc('ics/bluetooth-bluez-health/mcap.h') expected = [ u'Copyright (c) 2010 GSyC/LibreSoft, Universidad Rey Juan Carlos.', u'Copyright (c) 2010 Signove', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_lib_bluetooth_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-lib/bluetooth.c') expected = [ u'Copyright (c) 2000-2001 Qualcomm Incorporated', u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_lib_notice(self): test_file = self.get_test_loc('ics/bluetooth-bluez-lib/NOTICE') expected = [ u'Copyright (c) 2000-2001 Qualcomm Incorporated', u'Copyright (c) 2001-2002 Nokia Corporation', u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>', u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2001-2002 Nokia Corporation', u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>', u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_lib_sdp_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-lib/sdp.c') expected = [ u'Copyright (c) 2001-2002 Nokia Corporation', u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_lib_uuid_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-lib/uuid.c') expected = [ u'Copyright (c) 2011 Nokia Corporation', u'Copyright (c) 2011 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_lib_bluetooth_cmtp_h(self): test_file = self.get_test_loc('ics/bluetooth-bluez-lib-bluetooth/cmtp.h') expected = [ u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_plugins_builtin_h(self): test_file = self.get_test_loc('ics/bluetooth-bluez-plugins/builtin.h') expected = [ u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_plugins_dbusoob_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-plugins/dbusoob.c') expected = [ u'Copyright (c) 2011 ST-Ericsson SA', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_sap_main_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sap/main.c') expected = [ u'Copyright (c) 2010 Instituto', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_bluez_sap_main_c_trail_institut(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sap/main.c') expected = [ u'Copyright (c) 2010 Instituto Nokia de Tecnologia - INdT', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_sap_sap_h(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sap/sap.h') expected = [ u'Copyright (c) 2010 Instituto', u'Copyright (c) 2010 ST-Ericsson SA', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_bluez_sap_sap_h_trail_institut(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sap/sap.h') expected = [ u'Copyright (c) 2010 Instituto Nokia de Tecnologia - INdT', u'Copyright (c) 2010 ST-Ericsson SA', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_sap_sap_dummy_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sap/sap-dummy.c') expected = [ u'Copyright (c) 2010 ST-Ericsson SA', u'Copyright (c) 2011 Tieto Poland', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_sap_server_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sap/server.c') expected = [ u'Copyright (c) 2010 Instituto', u'Copyright (c) 2010 ST-Ericsson SA', u'Copyright (c) 2011 Tieto Poland', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_bluez_sap_server_c_trail_institut(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sap/server.c') expected = [ u'Copyright (c) 2010 Instituto Nokia de Tecnologia - INdT', u'Copyright (c) 2010 ST-Ericsson SA', u'Copyright (c) 2011 Tieto Poland', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_sap_server_h(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sap/server.h') expected = [ u'Copyright (c) 2010 ST-Ericsson SA', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_sbc_formats_h(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/formats.h') expected = [ u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_sbc_sbc_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbc.c') expected = [ u'Copyright (c) 2008-2010 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2004-2005 Henryk Ploetz <henryk@ploetzli.ch>', u'Copyright (c) 2005-2008 Brad Midgley <bmidgley@xmission.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_sbc_sbc_h(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbc.h') expected = [ u'Copyright (c) 2008-2010 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2004-2005 Henryk Ploetz <henryk@ploetzli.ch>', u'Copyright (c) 2005-2006 Brad Midgley <bmidgley@xmission.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_sbc_sbc_primitives_iwmmxt_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbc_primitives_iwmmxt.c') expected = [ u'Copyright (c) 2010 Keith Mok <ek9852@gmail.com>', u'Copyright (c) 2008-2010 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2004-2005 Henryk Ploetz <henryk@ploetzli.ch>', u'Copyright (c) 2005-2006 Brad Midgley <bmidgley@xmission.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_sbc_sbcdec_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbcdec.c') expected = [ u'Copyright (c) 2008-2010 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'(c) 2004-2010 Marcel Holtmann', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_bluez_sbc_sbcdec_c_lead_copy(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbcdec.c') expected = [ u'Copyright (c) 2008-2010 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2004-2010 Marcel Holtmann', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_sbc_sbctester_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbctester.c') expected = [ u'Copyright (c) 2008-2010 Nokia Corporation', u'Copyright (c) 2007-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2007-2008 Frederic Dalleau <fdalleau@free.fr>', u'(c) 2007-2010 Marcel Holtmann', u'(c) 2007-2008 Frederic Dalleau', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_bluez_sbc_sbctester_c_lead_copy_lead_copy(self): test_file = self.get_test_loc('ics/bluetooth-bluez-sbc/sbctester.c') expected = [ u'Copyright (c) 2008-2010 Nokia Corporation', u'Copyright (c) 2007-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2007-2008 Frederic Dalleau <fdalleau@free.fr>', u'Copyright (c) 2007-2010 Marcel Holtmann', u'Copyright (c) 2007-2008 Frederic Dalleau', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_src_dbus_common_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-src/dbus-common.c') expected = [ u'Copyright (c) 2006-2010 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2005-2007 Johan Hedberg <johan.hedberg@nokia.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_src_error_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-src/error.c') expected = [ u'Copyright (c) 2006-2010 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2007-2008 Fabien Chevalier <fabchevalier@free.fr>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_src_notice(self): test_file = self.get_test_loc('ics/bluetooth-bluez-src/NOTICE') expected = [ u'Copyright (c) 2000-2001 Qualcomm Incorporated', u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_src_sdp_xml_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-src/sdp-xml.c') expected = [ u'Copyright (c) 2005-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_test_attest_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-test/attest.c') expected = [ u'Copyright (c) 2001-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_test_avtest_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-test/avtest.c') expected = [ u'Copyright (c) 2007-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2009-2010 Nokia Corporation', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_test_gaptest_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-test/gaptest.c') expected = [ u'Copyright (c) 2007-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_test_hciemu_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-test/hciemu.c') expected = [ u'Copyright (c) 2000-2002 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2003-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_test_ipctest_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-test/ipctest.c') expected = [ u'Copyright (c) 2006-2010 Nokia Corporation', u'Copyright (c) 2004-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2009 Lennart Poettering', u'Copyright (c) 2008 Joao Paulo Rechi Vita', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_test_notice(self): test_file = self.get_test_loc('ics/bluetooth-bluez-test/NOTICE') expected = [ u'Copyright (c) 2000-2001 Qualcomm Incorporated', u'Copyright (c) 2000-2002 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2001-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2003-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2005-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2007-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2009 Nokia Corporation', u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_tools_hciattach_ath3k_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-tools/hciattach_ath3k.c') expected = [ u'Copyright (c) 2009-2010 Atheros Communications Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_tools_hciattach_qualcomm_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-tools/hciattach_qualcomm.c') expected = [ u'Copyright (c) 2005-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2010, Code Aurora Forum.', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_tools_hciattach_ti_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-tools/hciattach_ti.c') expected = [ u'Copyright (c) 2007-2008 Texas Instruments, Inc.', u'Copyright (c) 2005-2010 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_tools_hid2hci_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-tools/hid2hci.c') expected = [ u'Copyright (c) 2003-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2008-2009 Mario Limonciello <mario_limonciello@dell.com>', u'Copyright (c) 2009-2011 Kay Sievers <kay.sievers@vrfy.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_tools_lexer_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-tools/lexer.c') expected = [ u'Copyright (c) 2002-2008 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_tools_notice(self): test_file = self.get_test_loc('ics/bluetooth-bluez-tools/NOTICE') expected = [ u'Copyright (c) 2000-2001 Qualcomm Incorporated', u'Copyright (c) 2001-2002 Nokia Corporation', u'Copyright (c) 2002-2003 Jean Tourrilhes <jt@hpl.hp.com>', u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>', u'Copyright (c) 2002-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2003-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2004-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2005-2009 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2006-2007 Nokia Corporation', u'Copyright (c) 2007-2008 Texas Instruments, Inc.', u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_tools_sdptool_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-tools/sdptool.c') expected = [ u'Copyright (c) 2001-2002 Nokia Corporation', u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2002-2010 Marcel Holtmann <marcel@holtmann.org>', u'Copyright (c) 2002-2003 Stephen Crane <steve.crane@rococosoft.com>', u'Copyright (c) 2002-2003 Jean Tourrilhes <jt@hpl.hp.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_bluez_tools_ubcsp_c(self): test_file = self.get_test_loc('ics/bluetooth-bluez-tools/ubcsp.c') expected = [ u'Copyright (c) 2000-2005 CSR Ltd.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_acinclude_m4(self): test_file = self.get_test_loc('ics/bluetooth-glib/acinclude.m4') expected = [ u'Copyright (c) 2001-2002 Free Software Foundation, Inc.', u'Copyright (c) 1999-2003 Free Software Foundation, Inc.', u'Copyright (c) 2002 Free Software Foundation, Inc.', u'Copyright (c) 2002 Free Software Foundation, Inc.', u'Copyright (c) 2003 Free Software Foundation, Inc.', u'Copyright (c) 1997-2002 Free Software Foundation, Inc.', u'Copyright (c) 1997-2002 Free Software Foundation, Inc.', u'Copyright (c) 1997-2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_copying(self): test_file = self.get_test_loc('ics/bluetooth-glib/COPYING') expected = [ u'Copyright (c) 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_h(self): test_file = self.get_test_loc('ics/bluetooth-glib/glib.h') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gettextize_in(self): test_file = self.get_test_loc('ics/bluetooth-glib/glib-gettextize.in') expected = [ u'Copyright (c) 1995-1998, 2000, 2001 Free Software Foundation, Inc.', u'Copyright (c) 1995-1998, 2000, 2001 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_docs_reference_glib_regex_syntax_sgml(self): test_file = self.get_test_loc('ics/bluetooth-glib-docs-reference-glib/regex-syntax.sgml') expected = [ u'Copyright (c) 1997-2006 University of Cambridge.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_gappinfo_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio/gappinfo.c') expected = [ u'Copyright (c) 2006-2007 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_gbufferedinputstream_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio/gbufferedinputstream.c') expected = [ u'Copyright (c) 2006-2007 Red Hat, Inc.', u'Copyright (c) 2007 Jurg Billeter', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_gdatainputstream_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio/gdatainputstream.c') expected = [ u'Copyright (c) 2006-2007 Red Hat, Inc.', u'Copyright (c) 2007 Jurg Billeter', u'Copyright (c) 2009 Codethink Limited', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_gdesktopappinfo_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio/gdesktopappinfo.c') expected = [ u'Copyright (c) 2006-2007 Red Hat, Inc.', u'Copyright (c) 2007 Ryan Lortie', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_gemblem_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio/gemblem.c') expected = [ u'Copyright (c) 2008 Clemens N. Buss <cebuzz@gmail.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_gmount_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio/gmount.c') expected = [ u'Copyright (c) 2006-2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_gwin32mount_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio/gwin32mount.c') expected = [ u'Copyright (c) 2006-2007 Red Hat, Inc.', u'Copyright (c) 2008 Hans Breuer', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_fam_fam_module_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-fam/fam-module.c') expected = [ u'Copyright (c) 2006-2007 Red Hat, Inc.', u'Copyright (c) 2007 Sebastian Droge.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_fen_fen_data_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-fen/fen-data.c') expected = [ u'Copyright (c) 2008 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_fen_gfendirectorymonitor_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-fen/gfendirectorymonitor.c') expected = [ u'Copyright (c) 2006-2007 Red Hat, Inc.', u'Copyright (c) 2007 Sebastian Droge.', u'Copyright (c) 2008 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_inotify_inotify_diag_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-inotify/inotify-diag.c') expected = [ u'Copyright (c) 2005 John McCutchan', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_inotify_inotify_diag_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-inotify/inotify-diag.h') expected = [ u'Copyright (c) 2006 John McCutchan <john@johnmccutchan.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_inotify_inotify_helper_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-inotify/inotify-helper.c') expected = [ u'Copyright (c) 2007 John McCutchan', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_inotify_inotify_path_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-inotify/inotify-path.c') expected = [ u'Copyright (c) 2006 John McCutchan', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_tests_buffered_input_stream_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/buffered-input-stream.c') expected = [ u'Copyright (c) 2008 Red Hat, Inc. Authors', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_glib_gio_tests_buffered_input_stream_c_extra_author(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/buffered-input-stream.c') expected = [ u'Copyright (c) 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_tests_desktop_app_info_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/desktop-app-info.c') expected = [ u'Copyright (c) 2008 Red Hat, Inc', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_tests_filter_streams_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/filter-streams.c') expected = [ u'Copyright (c) 2009 Codethink Limited', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_tests_memory_input_stream_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/memory-input-stream.c') expected = [ u'Copyright (c) 2007 Imendio AB Authors Tim Janik', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_glib_gio_tests_memory_input_stream_c_extra_author(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/memory-input-stream.c') expected = [ u'Copyright (c) 2007 Imendio AB', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_tests_simple_async_result_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-tests/simple-async-result.c') expected = [ u'Copyright (c) 2009 Ryan Lortie', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_win32_gwinhttpfile_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-win32/gwinhttpfile.c') expected = [ u'Copyright (c) 2006-2007 Red Hat, Inc.', u'Copyright (c) 2008 Novell, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_win32_winhttp_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-win32/winhttp.h') expected = [ u'Copyright (c) 2007 Francois Gouget', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_xdgmime_test_mime_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/test-mime.c') expected = [ u'Copyright (c) 2003,2004 Red Hat, Inc.', u'Copyright (c) 2003,2004 Jonathan Blandford <jrb@alum.mit.edu>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_xdgmime_xdgmime_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmime.h') expected = [ u'Copyright (c) 2003 Red Hat, Inc.', u'Copyright (c) 2003 Jonathan Blandford <jrb@alum.mit.edu>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_xdgmime_xdgmimealias_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimealias.c') expected = [ u'Copyright (c) 2004 Red Hat, Inc.', u'Copyright (c) 2004 Matthias Clasen <mclasen@redhat.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_xdgmime_xdgmimealias_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimealias.h') expected = [ u'Copyright (c) 2004 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_xdgmime_xdgmimecache_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimecache.c') expected = [ u'Copyright (c) 2005 Matthias Clasen <mclasen@redhat.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_xdgmime_xdgmimeicon_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimeicon.c') expected = [ u'Copyright (c) 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gio_xdgmime_xdgmimemagic_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gio-xdgmime/xdgmimemagic.c') expected = [ u'Copyright (c) 2003 Red Hat, Inc.', u'Copyright (c) 2003 Jonathan Blandford <jrb@alum.mit.edu>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gatomic_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gatomic.c') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', u'Copyright (c) 2003 Sebastian Wilhelmi', u'Copyright (c) 2007 Nokia Corporation', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gatomic_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gatomic.h') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', u'Copyright (c) 2003 Sebastian Wilhelmi', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gbase64_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gbase64.h') expected = [ u'Copyright (c) 2005 Alexander Larsson <alexl@redhat.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gbookmarkfile_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gbookmarkfile.h') expected = [ u'Copyright (c) 2005-2006 Emmanuele Bassi', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gbsearcharray_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gbsearcharray.h') expected = [ u'Copyright (c) 2000-2003 Tim Janik', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gchecksum_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gchecksum.h') expected = [ u'Copyright (c) 2007 Emmanuele Bassi <ebassi@gnome.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gconvert_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gconvert.c') expected = [ u'Copyright Red Hat Inc., 2000', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gdataset_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gdataset.c') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', u'Copyright (c) 1998 Tim Janik', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gdatasetprivate_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gdatasetprivate.h') expected = [ u'Copyright (c) 2005 Red Hat', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gdir_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gdir.c') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', u'Copyright 2001 Hans Breuer', u'Copyright 2004 Tor Lillqvist', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gdir_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gdir.h') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', u'Copyright 2001 Hans Breuer', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gerror_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gerror.h') expected = [ u'Copyright 2000 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gfileutils_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gfileutils.c') expected = [ u'Copyright 2000 Red Hat, Inc.', u'Copyright (c) 1991,92,93,94,95,96,97,98,99 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gi18n_lib_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gi18n-lib.h') expected = [ u'Copyright (c) 1995-1997, 2002 Peter Mattis, Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_giochannel_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/giochannel.c') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', u'Copyright 1998 Owen Taylor', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gkeyfile_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gkeyfile.c') expected = [ u'Copyright 2004 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gkeyfile_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gkeyfile.h') expected = [ u'Copyright 2004 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_glib_object_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/glib-object.h') expected = [ u'Copyright (c) 1998, 1999, 2000 Tim Janik and Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gmain_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gmain.h') expected = [ u'Copyright (c) 1998-2000 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gmappedfile_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gmappedfile.h') expected = [ u'Copyright 2005 Matthias Clasen', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_goption_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/goption.c') expected = [ u'Copyright (c) 1999, 2003 Red Hat Software', u'Copyright (c) 2004 Anders Carlsson <andersca@gnome.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_goption_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/goption.h') expected = [ u'Copyright (c) 2004 Anders Carlsson <andersca@gnome.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gpattern_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gpattern.c') expected = [ u'Copyright (c) 1995-1997, 1999 Peter Mattis, Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gpoll_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gpoll.c') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', u'Copyright 1998 Owen Taylor', u'Copyright 2008 Red Hat, Inc.', u'Copyright (c) 1994, 1996, 1997 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gqsort_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gqsort.c') expected = [ u'Copyright (c) 1991, 1992, 1996, 1997,1999,2004 Free Software Foundation, Inc.', u'Copyright (c) 2000 Eazel, Inc.', u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gregex_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gregex.h') expected = [ u'Copyright (c) 1999, 2000 Scott Wimer', u'Copyright (c) 2004, Matthias Clasen <mclasen@redhat.com>', u'Copyright (c) 2005 - 2007, Marco Barisione <marco@barisione.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gsequence_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gsequence.h') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005, 2006, 2007 Soeren Sandmann (sandmann@daimi.au.dk)', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gslice_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gslice.c') expected = [ u'Copyright (c) 2005 Tim Janik', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gstdio_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gstdio.c') expected = [ u'Copyright 2004 Tor Lillqvist', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gstrfuncs_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gstrfuncs.c') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', u'Copyright (c) 1991,92,94,95,96,97,98,99,2000,01,02 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gstring_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gstring.c') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gtestutils_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gtestutils.c') expected = [ u'Copyright (c) 2007 Imendio AB Authors Tim Janik, Sven Herzberg', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_glib_glib_gtestutils_c_extra_author(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gtestutils.c') expected = [ u'Copyright (c) 2007 Imendio AB', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gthread_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gthread.c') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', u'Copyright 1998 Sebastian Wilhelmi University of Karlsruhe Owen Taylor', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gthreadprivate_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gthreadprivate.h') expected = [ u'Copyright (c) 2003 Sebastian Wilhelmi', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gunicode_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gunicode.h') expected = [ u'Copyright (c) 1999, 2000 Tom Tromey', u'Copyright 2000, 2005 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gunicodeprivate_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gunicodeprivate.h') expected = [ u'Copyright (c) 2003 Noah Levitt', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gunidecomp_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gunidecomp.c') expected = [ u'Copyright (c) 1999, 2000 Tom Tromey', u'Copyright 2000 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_guniprop_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/guniprop.c') expected = [ u'Copyright (c) 1999 Tom Tromey', u'Copyright (c) 2000 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gutils_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib/gutils.c') expected = [ u'Copyright (c) 1995-1998 Peter Mattis, Spencer Kimball and Josh MacDonald', u'Copyright (c) 2007 Red Hat Inc.', u'Copyright (c) 1995, 1996, 1997, 1998 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gnulib_asnprintf_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/asnprintf.c') expected = [ u'Copyright (c) 1999, 2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gnulib_g_gnulib_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/g-gnulib.h') expected = [ u'Copyright (c) 2003 Matthias Clasen', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gnulib_printf_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/printf.c') expected = [ u'Copyright (c) 2003 Matthias Clasen', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gnulib_printf_args_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/printf-args.c') expected = [ u'Copyright (c) 1999, 2002-2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gnulib_printf_parse_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/printf-parse.c') expected = [ u'Copyright (c) 1999-2000, 2002-2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_glib_gnulib_vasnprintf_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-glib-gnulib/vasnprintf.h') expected = [ u'Copyright (c) 2002-2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gmodule_gmodule_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule.c') expected = [ u'Copyright (c) 1998 Tim Janik', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gmodule_gmodule_rc_in(self): test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule.rc.in') expected = [ u'Copyright (c) 1998-2000 Tim Janik.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gmodule_gmodule_ar_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-ar.c') expected = [ u'Copyright (c) 1998, 2000 Tim Janik', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gmodule_gmodule_beos_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-beos.c') expected = [ u'Copyright (c) 1998, 2000 Tim Janik', u'Copyright (c) 1999 Richard', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_glib_gmodule_gmodule_beos_c_trail_name(self): test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-beos.c') expected = [ u'Copyright (c) 1998, 2000 Tim Janik', u'Copyright (C) 1999 Richard Offer and Shawn T. Amundson (amundson@gtk.org)', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gmodule_gmodule_dyld_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-dyld.c') expected = [ u'Copyright (c) 1998, 2000 Tim Janik', u'Copyright (c) 2001 Dan Winship', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gmodule_gmodule_win32_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gmodule/gmodule-win32.c') expected = [ u'Copyright (c) 1998, 2000 Tim Janik', u'Copyright (c) 1998 Tor Lillqvist', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gobject_gboxed_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gboxed.c') expected = [ u'Copyright (c) 2000-2001 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gobject_gclosure_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gclosure.c') expected = [ u'Copyright (c) 2000-2001 Red Hat, Inc.', u'Copyright (c) 2005 Imendio AB', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gobject_genums_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gobject/genums.c') expected = [ u'Copyright (c) 1998-1999, 2000-2001 Tim Janik and Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gobject_gobject_rc_in(self): test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gobject.rc.in') expected = [ u'Copyright (c) 1998-2004 Tim Janik and Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gobject_gparam_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gparam.c') expected = [ u'Copyright (c) 1997-1999, 2000-2001 Tim Janik and Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gobject_gsourceclosure_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gsourceclosure.c') expected = [ u'Copyright (c) 2001 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gobject_gtypemodule_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gobject/gtypemodule.c') expected = [ u'Copyright (c) 2000 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gobject_makefile_am(self): test_file = self.get_test_loc('ics/bluetooth-glib-gobject/Makefile.am') expected = [ u'Copyright (c) 1997,98,99,2000 Tim Janik and Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gobject_tests_threadtests_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gobject-tests/threadtests.c') expected = [ u'Copyright (c) 2008 Imendio AB Authors Tim Janik', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_glib_gobject_tests_threadtests_c_extra_author(self): test_file = self.get_test_loc('ics/bluetooth-glib-gobject-tests/threadtests.c') expected = [ u'Copyright (c) 2008 Imendio AB', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gthread_gthread_rc_in(self): test_file = self.get_test_loc('ics/bluetooth-glib-gthread/gthread.rc.in') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball', u'Copyright (c) 1998 Sebastian Wilhelmi.', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_glib_gthread_gthread_rc_in_trail_name(self): test_file = self.get_test_loc('ics/bluetooth-glib-gthread/gthread.rc.in') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald.', u'Copyright (c) 1998 Sebastian Wilhelmi.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_gthread_gthread_win32_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-gthread/gthread-win32.c') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', u'Copyright 1998-2001 Sebastian Wilhelmi University of Karlsruhe', u'Copyright 2001 Hans Breuer', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_m4macros_glib_gettext_m4(self): test_file = self.get_test_loc('ics/bluetooth-glib-m4macros/glib-gettext.m4') expected = [ u'Copyright (c) 1995-2002 Free Software Foundation, Inc.', u'Copyright (c) 2001-2003,2004 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_po_makefile_in_in(self): test_file = self.get_test_loc('ics/bluetooth-glib-po/Makefile.in.in') expected = [ u'Copyright (c) 1995, 1996, 1997 by Ulrich Drepper <drepper@gnu.ai.mit.edu>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_po_po2tbl_sed_in(self): test_file = self.get_test_loc('ics/bluetooth-glib-po/po2tbl.sed.in') expected = [ u'Copyright (c) 1995 Free Software Foundation, Inc. Ulrich Drepper <drepper@gnu.ai.mit.edu>, 1995.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_gio_test_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests/gio-test.c') expected = [ u'Copyright (c) 2000 Tor Lillqvist', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_hash_test_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests/hash-test.c') expected = [ u'Copyright (c) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald', u'Copyright (c) 1999 The Free Software Foundation', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_mapping_test_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests/mapping-test.c') expected = [ u'Copyright (c) 2005 Matthias Clasen', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_markup_collect_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests/markup-collect.c') expected = [ u'Copyright (c) 2007 Ryan Lortie', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_onceinit_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests/onceinit.c') expected = [ u'Copyright (c) 2007 Tim Janik', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_patterntest_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests/patterntest.c') expected = [ u'Copyright (c) 2001 Matthias Clasen <matthiasc@poet.de>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_regex_test_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests/regex-test.c') expected = [ u'Copyright (c) 2005 - 2006, Marco Barisione <marco@barisione.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_scannerapi_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests/scannerapi.c') expected = [ u'Copyright (c) 2007 Patrick Hulin', u'Copyright (c) 2007 Imendio AB Authors Tim Janik', ] check_detection(expected, test_file) @expectedFailure def test_ics_bluetooth_glib_tests_scannerapi_c_extra_author(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests/scannerapi.c') expected = [ u'Copyright (c) 2007 Patrick Hulin', u'Copyright (c) 2007 Imendio AB', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_slice_concurrent_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests/slice-concurrent.c') expected = [ u'Copyright (c) 2006 Stefan Westerfeld', u'Copyright (c) 2007 Tim Janik', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_testingbase64_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests/testingbase64.c') expected = [ u'Copyright (c) 2008 Asbjoern Pettersen', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_gobject_accumulator_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/accumulator.c') expected = [ u'Copyright (c) 2001, 2003 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_gobject_deftype_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/deftype.c') expected = [ u'Copyright (c) 2006 Behdad Esfahbod', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_gobject_override_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/override.c') expected = [ u'Copyright (c) 2001, James Henstridge', u'Copyright (c) 2003, Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_gobject_references_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/references.c') expected = [ u'Copyright (c) 2005 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_gobject_singleton_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/singleton.c') expected = [ u'Copyright (c) 2006 Imendio AB', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_gobject_testcommon_h(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests-gobject/testcommon.h') expected = [ u'Copyright (c) 2003 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_bluetooth_glib_tests_refcount_closures_c(self): test_file = self.get_test_loc('ics/bluetooth-glib-tests-refcount/closures.c') expected = [ u'Copyright (c) 2005 Imendio AB', ] check_detection(expected, test_file) def test_ics_bluetooth_hcidump_readme(self): test_file = self.get_test_loc('ics/bluetooth-hcidump/README') expected = [ u'Copyright (c) 2000-2002 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_hcidump_parser_att_c(self): test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/att.c') expected = [ u'Copyright (c) 2011 Andre Dieb Martins <andre.dieb@gmail.com>', ] check_detection(expected, test_file) def test_ics_bluetooth_hcidump_parser_bnep_c(self): test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/bnep.c') expected = [ u'Copyright (c) 2002-2003 Takashi Sasai <sasai@sm.sony.co.jp>', u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_hcidump_parser_cmtp_c(self): test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/cmtp.c') expected = [ u'Copyright (c) 2002-2011 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_hcidump_parser_hci_c(self): test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/hci.c') expected = [ u'Copyright (c) 2000-2002 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_hcidump_parser_hidp_c(self): test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/hidp.c') expected = [ u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_hcidump_parser_rfcomm_c(self): test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/rfcomm.c') expected = [ u'Copyright (c) 2001-2002 Wayne Lee <waynelee@qualcomm.com>', u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bluetooth_hcidump_parser_sdp_c(self): test_file = self.get_test_loc('ics/bluetooth-hcidump-parser/sdp.c') expected = [ u'Copyright (c) 2001-2002 Ricky Yuen <ryuen@qualcomm.com>', u'Copyright (c) 2003-2011 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_bouncycastle_notice(self): test_file = self.get_test_loc('ics/bouncycastle/NOTICE') expected = [ u'Copyright (c) 2000-2010 The Legion Of The Bouncy Castle', ] check_detection(expected, test_file) def test_ics_bouncycastle_src_main_java_org_bouncycastle_crypto_digests_openssldigest_java(self): test_file = self.get_test_loc('ics/bouncycastle-src-main-java-org-bouncycastle-crypto-digests/OpenSSLDigest.java') expected = [ u'Copyright (c) 2008 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_bsdiff_bsdiff_1(self): test_file = self.get_test_loc('ics/bsdiff/bsdiff.1') expected = [ u'Copyright 2003-2005 Colin Percival', ] check_detection(expected, test_file) def test_ics_bsdiff_bsdiff_c(self): test_file = self.get_test_loc('ics/bsdiff/bsdiff.c') expected = [ u'Copyright 2003-2005 Colin Percival', ] check_detection(expected, test_file) def test_ics_bzip2_blocksort_c(self): test_file = self.get_test_loc('ics/bzip2/blocksort.c') expected = [ u'Copyright (c) 1996-2010 Julian Seward <jseward@bzip.org>', ] check_detection(expected, test_file) def test_ics_bzip2_bzip2_c(self): test_file = self.get_test_loc('ics/bzip2/bzip2.c') expected = [ u'Copyright (c) 1996-2010 Julian Seward <jseward@bzip.org>', u'Copyright (c) 1996-2010 by Julian Seward.', ] check_detection(expected, test_file) def test_ics_bzip2_license(self): test_file = self.get_test_loc('ics/bzip2/LICENSE') expected = [ u'copyright (c) 1996-2010 Julian R Seward.', ] check_detection(expected, test_file) def test_ics_bzip2_makefile(self): test_file = self.get_test_loc('ics/bzip2/Makefile') expected = [ u'Copyright (c) 1996-2010 Julian Seward <jseward@bzip.org>', ] check_detection(expected, test_file) def test_ics_bzip2_manual_html(self): test_file = self.get_test_loc('ics/bzip2/manual.html') expected = [ u'Copyright (c) 1996-2010 Julian Seward', u'copyright (c) 1996-2010 Julian Seward.', ] check_detection(expected, test_file) def test_ics_bzip2_xmlproc_sh(self): test_file = self.get_test_loc('ics/bzip2/xmlproc.sh') expected = [ u'Copyright (c) 1996-2010 Julian Seward <jseward@bzip.org>', ] check_detection(expected, test_file) def test_ics_chromium_license(self): test_file = self.get_test_loc('ics/chromium/LICENSE') expected = [ u'Copyright (c) 2010 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_android_execinfo_cc(self): test_file = self.get_test_loc('ics/chromium-android/execinfo.cc') expected = [ u'Copyright (c) 2011 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_android_prefix_h(self): test_file = self.get_test_loc('ics/chromium-android/prefix.h') expected = [ u'Copyright 2010, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_chromium_android_jni_jni_utils_cc(self): test_file = self.get_test_loc('ics/chromium-android-jni/jni_utils.cc') expected = [ u'Copyright (c) 2010 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_android_ui_base_l10n_l10n_util_cc(self): test_file = self.get_test_loc('ics/chromium-android-ui-base-l10n/l10n_util.cc') expected = [ u'Copyright 2010, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_chromium_app_sql_init_status_h(self): test_file = self.get_test_loc('ics/chromium-app-sql/init_status.h') expected = [ u'Copyright (c) 2009 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_base_atomicops_internals_x86_gcc_cc(self): test_file = self.get_test_loc('ics/chromium-base/atomicops_internals_x86_gcc.cc') expected = [ u'Copyright (c) 2006-2008 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_base_atomicops_internals_x86_gcc_h(self): test_file = self.get_test_loc('ics/chromium-base/atomicops_internals_x86_gcc.h') expected = [ u'Copyright (c) 2006-2008 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_base_base_gyp(self): test_file = self.get_test_loc('ics/chromium-base/base.gyp') expected = [ u'Copyright (c) 2011 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_base_compat_execinfo_h(self): test_file = self.get_test_loc('ics/chromium-base/compat_execinfo.h') expected = [ u'Copyright (c) 2006-2009 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_base_file_version_info_h(self): test_file = self.get_test_loc('ics/chromium-base/file_version_info.h') expected = [ u'Copyright (c) 2011 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_base_file_version_info_mac_mm(self): test_file = self.get_test_loc('ics/chromium-base/file_version_info_mac.mm') expected = [ u'Copyright (c) 2011 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_base_foundation_utils_mac_h(self): test_file = self.get_test_loc('ics/chromium-base/foundation_utils_mac.h') expected = [ u'Copyright (c) 2008 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_base_md5_cc(self): test_file = self.get_test_loc('ics/chromium-base/md5.cc') expected = [ u'Copyright 2006 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_base_string_tokenizer_h(self): test_file = self.get_test_loc('ics/chromium-base/string_tokenizer.h') expected = [ u'Copyright (c) 2010 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_base_allocator_allocator_gyp(self): test_file = self.get_test_loc('ics/chromium-base-allocator/allocator.gyp') expected = [ u'Copyright (c) 2009 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_base_i18n_icu_string_conversions_cc(self): test_file = self.get_test_loc('ics/chromium-base-i18n/icu_string_conversions.cc') expected = [ u'Copyright (c) 2009 The Chromium Authors.', u'Copyright (c) 1995-2006 International Business Machines Corporation', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_base_i18n_icu_string_conversions_cc_trail_other(self): test_file = self.get_test_loc('ics/chromium-base-i18n/icu_string_conversions.cc') expected = [ u'Copyright (c) 2009 The Chromium Authors.', u'Copyright (c) 1995-2006 International Business Machines Corporation and others', ] check_detection(expected, test_file) def test_ics_chromium_base_third_party_dmg_fp_dtoa_cc(self): test_file = self.get_test_loc('ics/chromium-base-third_party-dmg_fp/dtoa.cc') expected = [ u'Copyright (c) 1991, 2000, 2001 by Lucent Technologies.', ] check_detection(expected, test_file) def test_ics_chromium_base_third_party_dmg_fp_g_fmt_cc(self): test_file = self.get_test_loc('ics/chromium-base-third_party-dmg_fp/g_fmt.cc') expected = [ u'Copyright (c) 1991, 1996 by Lucent Technologies.', ] check_detection(expected, test_file) def test_ics_chromium_base_third_party_dmg_fp_license(self): test_file = self.get_test_loc('ics/chromium-base-third_party-dmg_fp/LICENSE') expected = [ u'Copyright (c) 1991, 2000, 2001 by Lucent Technologies.', ] check_detection(expected, test_file) def test_ics_chromium_base_third_party_dmg_fp_thirdpartyproject_prop(self): test_file = self.get_test_loc('ics/chromium-base-third_party-dmg_fp/ThirdPartyProject.prop') expected = [ u'Copyright 2011 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_base_third_party_dynamic_annotations_dynamic_annotations_c(self): test_file = self.get_test_loc('ics/chromium-base-third_party-dynamic_annotations/dynamic_annotations.c') expected = [ u'Copyright (c) 2008-2009, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_base_third_party_dynamic_annotations_dynamic_annotations_gyp(self): test_file = self.get_test_loc('ics/chromium-base-third_party-dynamic_annotations/dynamic_annotations.gyp') expected = [ u'Copyright (c) 2010 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_base_third_party_icu_icu_utf_cc(self): test_file = self.get_test_loc('ics/chromium-base-third_party-icu/icu_utf.cc') expected = [ u'Copyright (c) 1999-2006, International Business Machines Corporation', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_base_third_party_icu_icu_utf_cc_trail_other(self): test_file = self.get_test_loc('ics/chromium-base-third_party-icu/icu_utf.cc') expected = [ u'Copyright (c) 1999-2006, International Business Machines Corporation and others', ] check_detection(expected, test_file) def test_ics_chromium_base_third_party_icu_icu_utf_h(self): test_file = self.get_test_loc('ics/chromium-base-third_party-icu/icu_utf.h') expected = [ u'Copyright (c) 1999-2004, International Business Machines Corporation', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_base_third_party_icu_icu_utf_h_trail_other(self): test_file = self.get_test_loc('ics/chromium-base-third_party-icu/icu_utf.h') expected = [ u'Copyright (c) 1999-2004, International Business Machines Corporation and others', ] check_detection(expected, test_file) def test_ics_chromium_base_third_party_icu_license(self): test_file = self.get_test_loc('ics/chromium-base-third_party-icu/LICENSE') expected = [ u'Copyright (c) 1995-2009 International Business Machines Corporation', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_base_third_party_icu_license_trail_other(self): test_file = self.get_test_loc('ics/chromium-base-third_party-icu/LICENSE') expected = [ u'Copyright (c) 1995-2009 International Business Machines Corporation and others', ] check_detection(expected, test_file) def test_ics_chromium_base_third_party_nspr_license(self): test_file = self.get_test_loc('ics/chromium-base-third_party-nspr/LICENSE') expected = [ u'Copyright (c) 1998-2000 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_base_third_party_nspr_prcpucfg_h(self): test_file = self.get_test_loc('ics/chromium-base-third_party-nspr/prcpucfg.h') expected = [ u'Copyright 2008, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_base_third_party_nspr_prtime_cc(self): test_file = self.get_test_loc('ics/chromium-base-third_party-nspr/prtime.cc') expected = [ u'Copyright (c) 2011 Google Inc', u'Copyright (c) 1998-2000 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_build_branding_value_sh(self): test_file = self.get_test_loc('ics/chromium-build/branding_value.sh') expected = [ u'Copyright (c) 2008 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_build_install_build_deps_sh(self): test_file = self.get_test_loc('ics/chromium-build/install-build-deps.sh') expected = [ u'Copyright (c) 2011 The Chromium Authors.', u'Copyright 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.', u'Copyright 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.', u'Copyright 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.', u'Copyright 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_chromium_build_whitespace_file_txt(self): test_file = self.get_test_loc('ics/chromium-build/whitespace_file.txt') expected = [ u'Copyright (c) 2011 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_build_mac_strip_from_xcode(self): test_file = self.get_test_loc('ics/chromium-build-mac/strip_from_xcode') expected = [ u'Copyright (c) 2008 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_nacl_loader_sb(self): test_file = self.get_test_loc('ics/chromium-chrome-browser/nacl_loader.sb') expected = [ u'Copyright (c) 2011 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_chromeos_panels_panel_scroller_container_cc(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-chromeos-panels/panel_scroller_container.cc') expected = [ u'Copyright (c) 2009 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_cocoa_authorization_util_mm(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-cocoa/authorization_util.mm') expected = [ u'Copyright (c) 2009 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_download_download_extensions_cc(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-download/download_extensions.cc') expected = [ u'Copyright (c) 2010 The Chromium Authors.', u'Copyright (c) 1998-1999 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_importer_firefox_profile_lock_cc(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/firefox_profile_lock.cc') expected = [ u'Copyright (c) 2010 The Chromium Authors.', u'Copyright (c) 2002 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_importer_firefox_profile_lock_posix_cc(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/firefox_profile_lock_posix.cc') expected = [ u'Copyright (c) 2009 The Chromium Authors.', u'Copyright (c) 2002 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_importer_firefox_profile_lock_win_cc(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/firefox_profile_lock_win.cc') expected = [ u'Copyright (c) 2008 The Chromium Authors.', u'Copyright (c) 2002 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_importer_mork_reader_cc(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/mork_reader.cc') expected = [ u'Copyright (c) 2006 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_importer_nss_decryptor_cc(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/nss_decryptor.cc') expected = [ u'Copyright (c) 2011 The Chromium Authors.', u'Copyright (c) 1994-2000 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_importer_nss_decryptor_mac_h(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/nss_decryptor_mac.h') expected = [ u'Copyright (c) 2010 The Chromium Authors.', u'Copyright (c) 1994-2000 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_importer_nss_decryptor_win_h(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-importer/nss_decryptor_win.h') expected = [ u'Copyright (c) 2009 The Chromium Authors.', u'Copyright (c) 1994-2000 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_metrics_system_metrics_proto(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-metrics/system_metrics.proto') expected = [ u'Copyright (c) 2009 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_renderer_host_render_widget_host_view_mac_mm(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-renderer_host/render_widget_host_view_mac.mm') expected = [ u'Copyright (c) 2011 The Chromium Authors.', u'Copyright (c) 2005, 2006, 2007, 2008, 2009 Apple Inc.', u'(c) 2006, 2007 Graham Dennis (graham.dennis@gmail.com)', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_resources_about_credits_html(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-resources/about_credits.html') expected = [ u'Copyright (c) 1991, 2000, 2001 by Lucent Technologies.', u'Copyright (c) 2008-2009, Google Inc.', u'Copyright (c) 1998-2000 the Initial Developer.', u'Copyright (c) 1994-2000 the Initial Developer.', u'(c) Copyright IBM Corporation. 2006, 2006.', u'Copyright (c) 2006, Google Inc.', u'Copyright (c) 2000-2008 Julian Seward.', u'Copyright (c) 2007 Red Hat, inc', u'Copyright 2003-2005 Colin Percival', u'Copyright (c) 2000 the Initial Developer.', u'Copyright 1993 by OpenVision Technologies, Inc.', u'Copyright 2007 Google Inc.', u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Alexander Kellett, Alexey Proskuryakov, Alex Mathews, Allan Sandfeld Jensen, Alp Toker, Anders Carlsson, Andrew Wellington, Antti', u'Copyright (c) 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', u'Copyright (c) 1991, 1999 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', u'Copyright (c) 2002-2010 The ANGLE Project', u'Copyright (c) 2009 Apple Inc.', u'Portions Copyright (c) 1999-2007 Apple Inc.', u'copyright (c) 1996-2010 Julian R Seward.', u'Copyright (c) 2010 The Chromium Authors.', u'Copyright (c) 1998-1999 Netscape Communications Corporation.', u'Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd and Clark Cooper', u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Expat maintainers.', u'Copyright (c) 2008 The Khronos Group Inc.', u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002 by Remco Treffkorn', u'Copyright (c) 2005 by Eric S. Raymond.', u'Copyright (c) 2007, 2010 Linux Foundation', u'Copyright (c) 2006 IBM Corporation', u'Copyright (c) 2000, 2006 Sun Microsystems, Inc.', u'copyright (c) 1991-1998, Thomas G. Lane.', u'Copyright (c) 1995-2009 International Business Machines Corporation', u'(c) 1999 TaBE Project.', u'Copyright (c) 1999 Pai-Hsiang Hsiao.', u'Copyright (c) 1999 Computer', u'Copyright 1996 Chih-Hao Tsai Beckman Institute, University of Illinois', u'Copyright 2000, 2001, 2002, 2003 Nara', u'Copyright (c) 2002 the Initial Developer.', u'Copyright (c) 2006-2008 Jason Evans', u'COPYRIGHT HOLDER(S) AS', u'Copyright (c) International Business Machines Corp., 2002,2007', u'Copyright 2000-2007 Niels Provos', u'Copyright 2007-2009 Niels Provos and Nick Mathewson', u'Copyright (c) 2004 2005, Google Inc.', u'copyright (c) 1991-1998, Thomas G. Lane.', u'copyright by the Free Software Foundation', u'Copyright (c) 1998-2005 Julian Smart, Robert Roebling', u'Copyright (c) 2004, 2006-2009 Glenn Randers-Pehrson', u'Copyright (c) 2000-2002 Glenn Randers-Pehrson', u'Copyright (c) 1998, 1999 Glenn Randers-Pehrson', u'Copyright (c) 1996, 1997 Andreas Dilger', u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.', u'Copyright (c) 2001-2006 Cisco Systems, Inc.', u'Copyright (c) 2010, Google Inc.', u'Copyright (c) 2010, Google Inc.', u'Copyright (c) 1998-2003 Daniel Veillard.', u'Copyright (c) 2001-2002 Daniel Veillard.', u'Copyright (c) 2001-2002 Thomas Broyer, Charlie Bozeman and Daniel Veillard.', u'Copyright (c) 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', u'Copyright (c) 2005, 2006 Nick Galbreath', u'Copyright 2008 MolokoCacao', u'Copyright (c) 2004-2009 Sergey Lyubka', u'Portions Copyright (c) 2009 Gilbert Wellisch', u'Copyright (c) 2002 the Initial Developer.', u'Copyright (c) 1998 the Initial Developer.', u'Copyright (c) 2004-2009 by Mulle Kybernetik.', u'Copyright (c) 2008 The Khronos Group Inc.', u'Copyright (c) 1998-2008 The OpenSSL Project.', u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 2009 The Chromium Authors.', u'Copyright 2007 Google Inc.', u'Copyright (c) 2010 The Chromium Authors.', u'Copyright 2008, Google Inc.', u'Copyright (c) 2007 Giampaolo Rodola', u'Copyright 2009, Google Inc.', u'Copyright (c) 2009 Mozilla Corporation', u'Copyright (c) 1998-2007 Marti Maria', u'Copyright (c) 1994-1996 SunSoft, Inc.', u'Copyright 2009 Google Inc.', u'Copyright (c) 2006 Bob Ippolito', u'Copyright 2002-2008 Xiph.org', u'Copyright 2002-2008 Jean-Marc Valin', u'Copyright 2005-2007 Analog Devices Inc.', u'Copyright 2005-2008 Commonwealth', u'Copyright 1993, 2002, 2006 David Rowe', u'Copyright 2003 EpicGames', u'Copyright 1992-1994 Jutta Degener, Carsten Bormann', u'Copyright (c) 1995-1998 The University of Utah and the Regents of the University of California', u'Copyright (c) 1998-2005 University of Chicago.', u'Copyright (c) 2005-2006 Arizona Board of Regents (University of Arizona).', u'Copyright (c) Andrew Tridgell 2004-2005', u'Copyright (c) Stefan Metzmacher 2006', u'Copyright (c) 2005, Google Inc.', u'Copyright (c) 2007 Free Software Foundation, Inc.', u'Copyright (c) 1998-1999 Netscape Communications Corporation.', u'Copyright (c) 2001-2010 Peter Johnson and other Yasm', # developers. u'Copyright (c) 1995-2010 Jean-loup Gailly and Mark Adler', u'Copyright (c) 1994-2006 Sun Microsystems Inc.', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_chrome_browser_resources_about_credits_corrected(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-resources/about_credits.html') expected = [ u'Copyright (c) 1991, 2000, 2001 by Lucent Technologies.', u'Copyright (c) 2008-2009, Google Inc.', u'Copyright (c) 1998-2000 the Initial Developer.', u'Copyright (c) 1994-2000 the Initial Developer.', u'(c) Copyright IBM Corporation. 2006, 2006.', u'Copyright (c) 2006, Google Inc.', u'Copyright (c) 2000-2008 Julian Seward.', u'Copyright (c) 2007 Red Hat, inc', u'Copyright 2003-2005 Colin Percival', u'Copyright (c) 2000 the Initial Developer.', u'Copyright 1993 by OpenVision Technologies, Inc.', u'Copyright 2007 Google Inc.', u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Alexander Kellett, Alexey Proskuryakov, Alex Mathews, Allan Sandfeld Jensen, Alp Toker, Anders Carlsson, Andrew Wellington, Antti', u'Copyright (c) 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', u'Copyright (c) 1991, 1999 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', u'Copyright (c) 2002-2010 The ANGLE Project', u'Copyright (c) 2009 Apple Inc.', u'Copyright (c) 1999-2007 Apple Inc.', u'copyright (c) 1996-2010 Julian R Seward.', u'Copyright (c) 2010 The Chromium Authors.', u'Copyright (c) 1998-1999 Netscape Communications Corporation.', u'Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd and Clark Cooper', u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Expat maintainers.', u'Copyright (c) 2008 The Khronos Group Inc.', u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002 by Remco Treffkorn', u'Copyright (c) 2005 by Eric S. Raymond.', u'Copyright (c) 2007, 2010 Linux Foundation', u'Copyright (c) 2006 IBM Corporation', u'Copyright (c) 2000, 2006 Sun Microsystems, Inc.', u'copyright (c) 1991-1998, Thomas G. Lane.', u'Copyright (c) 1995-2009 International Business Machines Corporation and others', u'(c) 1999 TaBE Project.', u'Copyright (c) 1999 Pai-Hsiang Hsiao.', u'Copyright (c) 1999 Computer Systems and Communication Lab, Institute of Information Science, Academia Sinica.', u'Copyright 1996 Chih-Hao Tsai Beckman Institute, University of Illinois', u'Copyright 2000, 2001, 2002, 2003 Nara Institute of Science and Technology. ', u'Copyright (c) 2002 the Initial Developer.', u'Copyright (c) 2006-2008 Jason Evans', u'COPYRIGHT HOLDER(S) AS', u'Copyright (c) International Business Machines Corp., 2002,2007', u'Copyright 2000-2007 Niels Provos', u'Copyright 2007-2009 Niels Provos and Nick Mathewson', u'Copyright (c) 2004 2005, Google Inc.', u'copyright (c) 1991-1998, Thomas G. Lane.', u'copyright by the Free Software Foundation', u'Copyright (c) 1998-2005 Julian Smart, Robert Roebling', u'Copyright (c) 2004, 2006-2009 Glenn Randers-Pehrson', u'Copyright (c) 2000-2002 Glenn Randers-Pehrson', u'Copyright (c) 1998, 1999 Glenn Randers-Pehrson', u'Copyright (c) 1996, 1997 Andreas Dilger', u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.', u'Copyright (c) 2001-2006 Cisco Systems, Inc.', u'Copyright (c) 2010, Google Inc.', u'Copyright (c) 2010, Google Inc.', u'Copyright (c) 1998-2003 Daniel Veillard.', u'Copyright (c) 2001-2002 Daniel Veillard.', u'Copyright (c) 2001-2002 Thomas Broyer, Charlie Bozeman and Daniel Veillard.', u'Copyright (c) 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', u'Copyright (c) 2005, 2006 Nick Galbreath', u'Copyright 2008 MolokoCacao', u'Copyright (c) 2004-2009 Sergey Lyubka', u'Copyright (c) 2009 Gilbert Wellisch', u'Copyright (c) 2002 the Initial Developer.', u'Copyright (c) 1998 the Initial Developer.', u'Copyright (c) 2004-2009 by Mulle Kybernetik.', u'Copyright (c) 2008 The Khronos Group Inc.', u'Copyright (c) 1998-2008 The OpenSSL Project.', u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 2009 The Chromium Authors.', u'Copyright 2007 Google Inc.', u'Copyright (c) 2010 The Chromium Authors.', u'Copyright 2008, Google Inc.', u'Copyright (c) 2007 Giampaolo Rodola', u'Copyright 2009, Google Inc.', u'Copyright (c) 2009 Mozilla Corporation', u'Copyright (c) 1998-2007 Marti Maria', u'Copyright (c) 1994-1996 SunSoft, Inc.', u'Copyright 2009 Google Inc.', u'Copyright (c) 2006 Bob Ippolito', u'Copyright 2002-2008 Xiph.org', u'Copyright 2002-2008 Jean-Marc Valin', u'Copyright 2005-2007 Analog Devices Inc.', u'Copyright 2005-2008 Commonwealth Scientific and Industrial Research Organisation (CSIRO)', u'Copyright 1993, 2002, 2006 David Rowe', u'Copyright 2003 EpicGames', u'Copyright 1992-1994 Jutta Degener, Carsten Bormann', u'Copyright (c) 1995-1998 The University of Utah and the Regents of the University of California', u'Copyright (c) 1998-2005 University of Chicago.', u'Copyright (c) 2005-2006 Arizona Board of Regents (University of Arizona).', u'Copyright (c) Andrew Tridgell 2004-2005', u'Copyright (c) Stefan Metzmacher 2006', u'Copyright (c) 2005, Google Inc.', u'Copyright (c) 2007 Free Software Foundation, Inc.', u'Copyright (c) 1998-1999 Netscape Communications Corporation.', u'Copyright (c) 2001-2010 Peter Johnson', u'Copyright (c) 1995-2010 Jean-loup Gailly and Mark Adler', u'Copyright (c) 1994-2006 Sun Microsystems Inc.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_resources_gpu_internals_html(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-resources/gpu_internals.html') expected = [ u'Copyright (c) 2010 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_resources_keyboard_overlay_js(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-resources/keyboard_overlay.js') expected = [ u'Copyright (c) 2011 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_resources_file_manager_harness_html(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-resources-file_manager/harness.html') expected = [ u'Copyright (c) 2011 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_resources_file_manager_css_file_manager_css(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-resources-file_manager-css/file_manager.css') expected = [ u'Copyright (c) 2011 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_sync_engine_change_reorder_buffer_cc(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-sync-engine/change_reorder_buffer.cc') expected = [ u'Copyright (c) 2006-2009 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_sync_engine_clear_data_command_h(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-sync-engine/clear_data_command.h') expected = [ u'Copyright (c) 2006-2010 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_ui_cocoa_applescript_examples_advanced_tab_manipulation_applescript(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-ui-cocoa-applescript-examples/advanced_tab_manipulation.applescript') expected = [ u'Copyright (c) 2010 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_userfeedback_proto_annotations_proto(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-userfeedback-proto/annotations.proto') expected = [ u'Copyright 2009 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_browser_userfeedback_proto_chrome_proto(self): test_file = self.get_test_loc('ics/chromium-chrome-browser-userfeedback-proto/chrome.proto') expected = [ u'Copyright 2010 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_api_i18n_cld_background_html(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-api-i18n-cld/background.html') expected = [ u'Copyright (c) 2009 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_api_notifications_background_html(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-api-notifications/background.html') expected = [ u'Copyright 2010 the Chromium Authors', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_java_hellolicenseservlet_java(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-java/HelloLicenseServlet.java') expected = [ u'Copyright 2010 the Chromium Authors', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_php_notice(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-php/NOTICE') expected = [ u'Copyright 2009 Google Inc.', u'Copyright (c) 2010 John Resig', u'Copyright (c) 2007 Andy Smith', u'Copyright (c) 2010, Mewp', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_php_popuplib_js(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-php/popuplib.js') expected = [ u'Copyright 2009 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_php_lib_oauth_license_txt(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-php-lib-oauth/LICENSE.txt') expected = [ u'Copyright (c) 2007 Andy Smith', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_python_notice(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-python/NOTICE') expected = [ u'Copyright (c) 2007 Leah Culver', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_python_httplib2_init_py(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-python-httplib2/__init__.py') expected = [ u'Copyright 2006, Joe Gregorio contributors', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_python_httplib2_init_py_extra_contributors(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-python-httplib2/__init__.py') expected = [ u'Copyright 2006, Joe Gregorio', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_apps_hello_python_oauth2_init_py(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-apps-hello-python-oauth2/__init__.py') expected = [ u'Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_benchmark_jquery_jquery_1_4_2_min_js(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-benchmark-jquery/jquery-1.4.2.min.js') expected = [ u'Copyright 2010, John Resig', u'Copyright 2010, The Dojo Foundation', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_benchmark_jst_jsevalcontext_js(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-benchmark-jst/jsevalcontext.js') expected = [ u'Copyright 2006 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_benchmark_util_sorttable_js(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-benchmark-util/sorttable.js') expected = [ u'Copyright 2006, Dean Edwards', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_gdocs_chrome_ex_oauthsimple_js(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-gdocs/chrome_ex_oauthsimple.js') expected = [ u'copyright unitedHeroes.net', u'Copyright (c) 2009, unitedHeroes.net', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_imageinfo_notice(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-imageinfo/NOTICE') expected = [ u'Copyright (c) 2008 Jacob Seidelin, jseidelin@nihilogic.dk', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_imageinfo_imageinfo_binaryajax_js(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-imageinfo-imageinfo/binaryajax.js') expected = [ u'Copyright (c) 2008 Jacob Seidelin, cupboy@gmail.com', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_imageinfo_imageinfo_imageinfo_js(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-imageinfo-imageinfo/imageinfo.js') expected = [ u'Copyright (c) 2008 Jacob Seidelin, jseidelin@nihilogic.dk', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_oauth_contacts_notice(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-oauth_contacts/NOTICE') expected = [ u'copyright unitedHeroes.net', u'Copyright (c) 2009, unitedHeroes.net', u'Copyright Paul Johnston 2000 - 2002.', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_proxy_configuration_test_jsunittest_js(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-proxy_configuration-test/jsunittest.js') expected = [ u'(c) 2008 Dr Nic Williams', ] check_detection(expected, test_file) def test_ics_chromium_chrome_common_extensions_docs_examples_extensions_wave_background_html(self): test_file = self.get_test_loc('ics/chromium-chrome-common-extensions-docs-examples-extensions-wave/background.html') expected = [ u'Copyright 2010 Google', ] check_detection(expected, test_file) def test_ics_chromium_crypto_third_party_nss_blapi_h(self): test_file = self.get_test_loc('ics/chromium-crypto-third_party-nss/blapi.h') expected = [ u'Copyright (c) 1994-2000 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_crypto_third_party_nss_sha256_h(self): test_file = self.get_test_loc('ics/chromium-crypto-third_party-nss/sha256.h') expected = [ u'Copyright (c) 2002 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_googleurl_license_txt(self): test_file = self.get_test_loc('ics/chromium-googleurl/LICENSE.txt') expected = [ u'Copyright 2007, Google Inc.', u'Copyright (c) 1998 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_googleurl_base_basictypes_h(self): test_file = self.get_test_loc('ics/chromium-googleurl-base/basictypes.h') expected = [ u'Copyright 2001 - 2003 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_googleurl_base_logging_cc(self): test_file = self.get_test_loc('ics/chromium-googleurl-base/logging.cc') expected = [ u'Copyright 2007, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_googleurl_base_logging_h(self): test_file = self.get_test_loc('ics/chromium-googleurl-base/logging.h') expected = [ u'Copyright 2006 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_googleurl_base_scoped_ptr_h(self): test_file = self.get_test_loc('ics/chromium-googleurl-base/scoped_ptr.h') expected = [ u'(c) Copyright Greg Colvin and Beman Dawes 1998, 1999.', u'Copyright (c) 2001, 2002 Peter Dimov', ] check_detection(expected, test_file) def test_ics_chromium_googleurl_src_gurl_unittest_cc(self): test_file = self.get_test_loc('ics/chromium-googleurl-src/gurl_unittest.cc') expected = [ u'Copyright 2007 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_googleurl_src_url_canon_ip_cc(self): test_file = self.get_test_loc('ics/chromium-googleurl-src/url_canon_ip.cc') expected = [ u'Copyright 2009, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_googleurl_src_url_common_h(self): test_file = self.get_test_loc('ics/chromium-googleurl-src/url_common.h') expected = [ u'Copyright 2010, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_googleurl_src_url_parse_cc(self): test_file = self.get_test_loc('ics/chromium-googleurl-src/url_parse.cc') expected = [ u'Copyright (c) 1998 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_googleurl_src_url_test_utils_h(self): test_file = self.get_test_loc('ics/chromium-googleurl-src/url_test_utils.h') expected = [ u'Copyright 2007 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_net_base_cookie_monster_cc(self): test_file = self.get_test_loc('ics/chromium-net-base/cookie_monster.cc') expected = [ u'Copyright (c) 2011 The Chromium Authors.', u'Copyright (c) 2003 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_net_base_effective_tld_names_dat(self): test_file = self.get_test_loc('ics/chromium-net-base/effective_tld_names.dat') expected = [ u'Copyright (c) 2007 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_net_base_ssl_false_start_blacklist_process_cc(self): test_file = self.get_test_loc('ics/chromium-net-base/ssl_false_start_blacklist_process.cc') expected = [ u'Copyright (c) 2010 The Chromium Authors.', u'Copyright (c) 2010 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_net_base_x509_cert_types_mac_unittest_cc(self): test_file = self.get_test_loc('ics/chromium-net-base/x509_cert_types_mac_unittest.cc') expected = [ u'Copyright (c) 2010 The Chromium Authors.', u'(c) Kasm 2005', u'(c) 1999 Entrust.net', u'(c) Kasm 2005', u"(c) 1999 Entrust.net Limited', entrust.organization_unit_names", ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_net_base_x509_cert_types_mac_unittest_cc_trail_limited_extra_junk(self): test_file = self.get_test_loc('ics/chromium-net-base/x509_cert_types_mac_unittest.cc') expected = [ u'Copyright (c) 2010 The Chromium Authors.', u'(c) Kasm 2005', u'(c) 1999 Entrust.net Limited', u'(c) Kasm 2005', u'(c) 1999 Entrust.net', ] check_detection(expected, test_file) def test_ics_chromium_net_base_x509_certificate_unittest_cc(self): test_file = self.get_test_loc('ics/chromium-net-base/x509_certificate_unittest.cc') expected = [ u'Copyright (c) 2011 The Chromium Authors.', u"(c) 06', issuer.organization_unit_names", ] check_detection(expected, test_file) def test_ics_chromium_net_data_proxy_resolver_perftest_no_ads_pac(self): test_file = self.get_test_loc('ics/chromium-net-data-proxy_resolver_perftest/no-ads.pac') expected = [ u'Copyright 1996-2004, John', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_net_data_proxy_resolver_perftest_no_ads_pac_trail_name(self): test_file = self.get_test_loc('ics/chromium-net-data-proxy_resolver_perftest/no-ads.pac') expected = [ u'Copyright 1996-2004, John LoVerso.', ] check_detection(expected, test_file) def test_ics_chromium_net_disk_cache_sparse_control_cc(self): test_file = self.get_test_loc('ics/chromium-net-disk_cache/sparse_control.cc') expected = [ u'Copyright (c) 2009-2010 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_net_ftp_ftp_network_layer_cc(self): test_file = self.get_test_loc('ics/chromium-net-ftp/ftp_network_layer.cc') expected = [ u'Copyright (c) 2008 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_net_http_des_cc(self): test_file = self.get_test_loc('ics/chromium-net-http/des.cc') expected = [ u'Copyright (c) 2011 The Chromium Authors.', u'Copyright (c) 2003 IBM Corporation.', ] check_detection(expected, test_file) def test_ics_chromium_net_http_http_auth_handler_ntlm_portable_cc(self): test_file = self.get_test_loc('ics/chromium-net-http/http_auth_handler_ntlm_portable.cc') expected = [ u'Copyright (c) 2010 The Chromium Authors.', u'Copyright (c) 2003 IBM Corporation.', ] check_detection(expected, test_file) def test_ics_chromium_net_http_http_chunked_decoder_cc(self): test_file = self.get_test_loc('ics/chromium-net-http/http_chunked_decoder.cc') expected = [ u'Copyright (c) 2010 The Chromium Authors.', u'Copyright (c) 2001 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_net_http_md4_cc(self): test_file = self.get_test_loc('ics/chromium-net-http/md4.cc') expected = [ u'Copyright (c) 2003 IBM Corporation.', ] check_detection(expected, test_file) def test_ics_chromium_net_socket_ssl_client_socket_nss_cc(self): test_file = self.get_test_loc('ics/chromium-net-socket/ssl_client_socket_nss.cc') expected = [ u'Copyright (c) 2011 The Chromium Authors.', u'Copyright (c) 2000 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_net_third_party_gssapi_gssapi_h(self): test_file = self.get_test_loc('ics/chromium-net-third_party-gssapi/gssapi.h') expected = [ u'Copyright 1993 by OpenVision Technologies, Inc.', ] check_detection(expected, test_file) def test_ics_chromium_net_third_party_gssapi_license(self): test_file = self.get_test_loc('ics/chromium-net-third_party-gssapi/LICENSE') expected = [ u'Copyright 1993 by OpenVision Technologies, Inc.', ] check_detection(expected, test_file) def test_ics_chromium_net_tools_spdyshark_makefile_am(self): test_file = self.get_test_loc('ics/chromium-net-tools-spdyshark/Makefile.am') expected = [ u'Copyright 1998 Gerald Combs', ] check_detection(expected, test_file) def test_ics_chromium_net_tools_spdyshark_packet_spdy_c(self): test_file = self.get_test_loc('ics/chromium-net-tools-spdyshark/packet-spdy.c') expected = [ u'Copyright 2010, Google Inc. Eric Shienbrood <ers@google.com>', u'Copyright 1998 Gerald Combs', ] check_detection(expected, test_file) def test_ics_chromium_net_tools_spdyshark_plugin_rc_in(self): test_file = self.get_test_loc('ics/chromium-net-tools-spdyshark/plugin.rc.in') expected = [ u'Copyright (c) 1998 Gerald Combs <gerald@wireshark.org>, Gilbert Ramirez <gram@alumni.rice.edu>', ] check_detection(expected, test_file) def test_ics_chromium_net_tools_testserver_chromiumsync_py(self): test_file = self.get_test_loc('ics/chromium-net-tools-testserver/chromiumsync.py') expected = [ u'Copyright (c) 2010 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_net_tools_tld_cleanup_tld_cleanup_cc(self): test_file = self.get_test_loc('ics/chromium-net-tools-tld_cleanup/tld_cleanup.cc') expected = [ u'Copyright (c) 2006-2008 The Chromium Authors.', u'Copyright (c) 2009 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_aclocal_m4(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/aclocal.m4') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 2002, 2003, 2005, 2006, 2007 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2008 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2006 Free Software Foundation, Inc.', u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_compile(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/compile') expected = [ u'Copyright 1999, 2000 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_configure(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/configure') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_copying(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/COPYING') expected = [ u'Copyright (c) 2008, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_depcomp(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/depcomp') expected = [ u'Copyright (c) 1999, 2000, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_install(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/INSTALL') expected = [ u'Copyright (c) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_ltmain_sh(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/ltmain.sh') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996', u'Copyright (c) 2008 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_missing(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff/missing') expected = [ u'Copyright (c) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_man_vcdiff_1(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-man/vcdiff.1') expected = [ u'Copyright (c) 2008, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_addrcache_cc(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/addrcache.cc') expected = [ u'Copyright 2007 Google Inc. Author Lincoln Smith', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_sdch_open_vcdiff_src_addrcache_cc_extra_author(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/addrcache.cc') expected = [ u'Copyright 2007 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_adler32_c(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/adler32.c') expected = [ u'Copyright (c) 1995-2004 Mark Adler', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_blockhash_cc(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/blockhash.cc') expected = [ u'Copyright 2006, 2008 Google Inc. Authors Chandra Chereddi, Lincoln Smith', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_sdch_open_vcdiff_src_blockhash_cc_extra_author(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/blockhash.cc') expected = [ u'Copyright 2006, 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_blockhash_test_cc(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/blockhash_test.cc') expected = [ u'Copyright 2008 Google Inc. Author Lincoln Smith', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_sdch_open_vcdiff_src_blockhash_test_cc_extra_author(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/blockhash_test.cc') expected = [ u'Copyright 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_codetablewriter_interface_h(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/codetablewriter_interface.h') expected = [ u'Copyright 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_gflags_cc(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/gflags.cc') expected = [ u'Copyright (c) 2006, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_mutex_h(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/mutex.h') expected = [ u'Copyright (c) 2007, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_rolling_hash_h(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/rolling_hash.h') expected = [ u'Copyright 2007, 2008 Google Inc. Authors Jeff Dean, Sanjay Ghemawat, Lincoln Smith', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_sdch_open_vcdiff_src_rolling_hash_h_extra_author(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/rolling_hash.h') expected = [ u'Copyright 2007, 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_vcdiff_test_sh(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/vcdiff_test.sh') expected = [ u'Copyright 2008 Google Inc. Author Lincoln Smith', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_sdch_open_vcdiff_src_vcdiff_test_sh_extra_author(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/vcdiff_test.sh') expected = [ u'Copyright 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_zconf_h(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/zconf.h') expected = [ u'Copyright (c) 1995-2005 Jean-loup Gailly.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_zlib_h(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src/zlib.h') expected = [ u'Copyright (c) 1995-2005 Jean-loup Gailly and Mark Adler', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_google_output_string_h(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src-google/output_string.h') expected = [ u'Copyright 2008 Google Inc. Author Lincoln Smith', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_sdch_open_vcdiff_src_google_output_string_h_extra_author(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src-google/output_string.h') expected = [ u'Copyright 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_gtest_gtest_cc(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src-gtest/gtest.cc') expected = [ u'Copyright 2005, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_src_gtest_gtest_main_cc(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-src-gtest/gtest_main.cc') expected = [ u'Copyright 2006, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_sdch_open_vcdiff_vsprojects_vcdiff_test_bat(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-vsprojects/vcdiff_test.bat') expected = [ u'Copyright 2008 Google Inc. Author Lincoln Smith', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_sdch_open_vcdiff_vsprojects_vcdiff_test_bat_extra_author(self): test_file = self.get_test_loc('ics/chromium-sdch-open-vcdiff-vsprojects/vcdiff_test.bat') expected = [ u'Copyright 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_generate_gmock_mutant_py(self): test_file = self.get_test_loc('ics/chromium-testing/generate_gmock_mutant.py') expected = [ u'Copyright (c) 2009 The Chromium Authors.', u'Copyright (c) 2009 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gmock_copying(self): test_file = self.get_test_loc('ics/chromium-testing-gmock/COPYING') expected = [ u'Copyright 2008, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gmock_include_gmock_gmock_cardinalities_h(self): test_file = self.get_test_loc('ics/chromium-testing-gmock-include-gmock/gmock-cardinalities.h') expected = [ u'Copyright 2007, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gmock_scripts_fuse_gmock_files_py(self): test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts/fuse_gmock_files.py') expected = [ u'Copyright 2009, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gmock_scripts_gmock_doctor_py(self): test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts/gmock_doctor.py') expected = [ u'Copyright 2008, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gmock_scripts_upload_py(self): test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts/upload.py') expected = [ u'Copyright 2007 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gmock_scripts_generator_gmock_gen_py(self): test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts-generator/gmock_gen.py') expected = [ u'Copyright 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gmock_scripts_generator_cpp_ast_py(self): test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts-generator-cpp/ast.py') expected = [ u'Copyright 2007 Neal Norwitz', u'Portions Copyright 2007 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gmock_scripts_generator_cpp_gmock_class_test_py(self): test_file = self.get_test_loc('ics/chromium-testing-gmock-scripts-generator-cpp/gmock_class_test.py') expected = [ u'Copyright 2009 Neal Norwitz', u'Portions Copyright 2009 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gmock_test_gmock_test_utils_py(self): test_file = self.get_test_loc('ics/chromium-testing-gmock-test/gmock_test_utils.py') expected = [ u'Copyright 2006, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gtest_include_gtest_internal_gtest_linked_ptr_h(self): test_file = self.get_test_loc('ics/chromium-testing-gtest-include-gtest-internal/gtest-linked_ptr.h') expected = [ u'Copyright 2003 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gtest_include_gtest_internal_gtest_tuple_h(self): test_file = self.get_test_loc('ics/chromium-testing-gtest-include-gtest-internal/gtest-tuple.h') expected = [ u'Copyright 2009 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gtest_samples_sample10_unittest_cc(self): test_file = self.get_test_loc('ics/chromium-testing-gtest-samples/sample10_unittest.cc') expected = [ u'Copyright 2009 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gtest_scripts_gen_gtest_pred_impl_py(self): test_file = self.get_test_loc('ics/chromium-testing-gtest-scripts/gen_gtest_pred_impl.py') expected = [ u'Copyright 2006, Google Inc.', u'Copyright 2006, Google Inc.', u'Copyright 2006, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gtest_src_gtest_port_cc(self): test_file = self.get_test_loc('ics/chromium-testing-gtest-src/gtest-port.cc') expected = [ u'Copyright 2008, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gtest_test_gtest_catch_exceptions_test_py(self): test_file = self.get_test_loc('ics/chromium-testing-gtest-test/gtest_catch_exceptions_test.py') expected = [ u'Copyright 2010 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gtest_test_gtest_filter_unittest_py(self): test_file = self.get_test_loc('ics/chromium-testing-gtest-test/gtest_filter_unittest.py') expected = [ u'Copyright 2005 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gtest_test_gtest_shuffle_test_py(self): test_file = self.get_test_loc('ics/chromium-testing-gtest-test/gtest_shuffle_test.py') expected = [ u'Copyright 2009 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_testing_gtest_test_gtest_linked_ptr_test_cc(self): test_file = self.get_test_loc('ics/chromium-testing-gtest-test/gtest-linked_ptr_test.cc') expected = [ u'Copyright 2003, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_buffer_c(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/buffer.c') expected = [ u'Copyright (c) 2002, 2003 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_config_guess(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/config.guess') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_configure(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/configure') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 2008 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_devpoll_c(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/devpoll.c') expected = [ u'Copyright 2000-2004 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_epoll_c(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/epoll.c') expected = [ u'Copyright 2000-2003 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_epoll_sub_c(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/epoll_sub.c') expected = [ u'Copyright 2003 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_evbuffer_c(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/evbuffer.c') expected = [ u'Copyright (c) 2002-2004 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_evdns_3(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/evdns.3') expected = [ u'Copyright (c) 2006 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_evdns_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/evdns.h') expected = [ u'Copyright (c) 2006 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_event_3(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/event.3') expected = [ u'Copyright (c) 2000 Artur Grabowski <art@openbsd.org>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_event_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/event.h') expected = [ u'Copyright (c) 2000-2007 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_event_rpcgen_py(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/event_rpcgen.py') expected = [ u'Copyright (c) 2005 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_event_tagging_c(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/event_tagging.c') expected = [ u'Copyright (c) 2003, 2004 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_event_internal_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/event-internal.h') expected = [ u'Copyright (c) 2000-2004 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_evport_c(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/evport.c') expected = [ u'Copyright (c) 2007 Sun Microsystems.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_evsignal_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/evsignal.h') expected = [ u'Copyright 2000-2002 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_evutil_c(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/evutil.c') expected = [ u'Copyright (c) 2007 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_http_c(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/http.c') expected = [ u'Copyright (c) 2002-2006 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_http_internal_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/http-internal.h') expected = [ u'Copyright 2001 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_license(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/LICENSE') expected = [ u'Copyright 2000-2007 Niels Provos <provos@citi.umich.edu>', u'Copyright 2007-2009 Niels Provos and Nick Mathewson', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_log_c(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/log.c') expected = [ u'Copyright (c) 2005 Nick Mathewson <nickm@freehaven.net>', u'Copyright (c) 2000 Dug Song <dugsong@monkey.org>', u'Copyright (c) 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_min_heap_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/min_heap.h') expected = [ u'Copyright (c) 2006 Maxim Yegorushkin <maxim.yegorushkin@gmail.com>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_missing(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/missing') expected = [ u'Copyright (c) 1996, 1997, 1999, 2000, 2002, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_strlcpy_c(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent/strlcpy.c') expected = [ u'Copyright (c) 1998 Todd C. Miller <Todd.Miller@courtesan.com>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_compat_sys_libevent_time_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent-compat-sys/_libevent_time.h') expected = [ u'Copyright (c) 1982, 1986, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_compat_sys_queue_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent-compat-sys/queue.h') expected = [ u'Copyright (c) 1991, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libevent_test_regress_dns_c(self): test_file = self.get_test_loc('ics/chromium-third_party-libevent-test/regress_dns.c') expected = [ u'Copyright (c) 2003-2006 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_overrides_talk_base_logging_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-overrides-talk-base/logging.h') expected = [ u'Copyright 2004 2005, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_copying(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source/COPYING') expected = [ u'Copyright (c) 2004 2005, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_base_asyncfile_cc(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/asyncfile.cc') expected = [ u'Copyright 2010, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_base_asyncfile_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/asyncfile.h') expected = [ u'Copyright 2004 2010, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_base_base64_cc(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/base64.cc') expected = [ u'Copyright (c) 1999, Bob Withers', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_base_base64_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/base64.h') expected = [ u'Copyright (c) 1999, Bob Withers', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_base_basicpacketsocketfactory_cc(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/basicpacketsocketfactory.cc') expected = [ u'Copyright 2011, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_base_buffer_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/buffer.h') expected = [ u'Copyright 2004-2010, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_base_event_cc(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/event.cc') expected = [ u'Copyright 2004 2008, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_base_fileutils_cc(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/fileutils.cc') expected = [ u'Copyright 2004 2006, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_base_httpbase_cc(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/httpbase.cc') expected = [ u'Copyright 2004 2005, Google Inc.', u'Copyright 2005 Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_base_macconversion_cc(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/macconversion.cc') expected = [ u'Copyright 2004 2009, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_base_macutils_cc(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/macutils.cc') expected = [ u'Copyright 2007 2009, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_base_socketstream_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/socketstream.h') expected = [ u'Copyright 2005 2010, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_base_stringutils_cc(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-base/stringutils.cc') expected = [ u'Copyright 2004 2005, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_session_phone_call_cc(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/call.cc') expected = [ u'Copyright 2004 2007, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_session_phone_codec_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/codec.h') expected = [ u'Copyright 2004 2007, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_session_phone_mediamonitor_cc(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/mediamonitor.cc') expected = [ u'Copyright 2005 2007, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_session_phone_mediamonitor_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/mediamonitor.h') expected = [ u'Copyright 2005 2008, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_session_phone_srtpfilter_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/srtpfilter.h') expected = [ u'Copyright 2009, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_session_phone_v4llookup_cc(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/v4llookup.cc') expected = [ u'Copyright 2009, Google Inc. Author lexnikitin@google.com', ] check_detection(expected, test_file) @expectedFailure def test_ics_chromium_third_party_libjingle_source_talk_session_phone_v4llookup_cc_extra_author(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/v4llookup.cc') expected = [ u'Copyright 2009, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_session_phone_videocommon_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-session-phone/videocommon.h') expected = [ u'Copyright 2011, Google Inc.', ] check_detection(expected, test_file) def test_ics_chromium_third_party_libjingle_source_talk_third_party_libudev_libudev_h(self): test_file = self.get_test_loc('ics/chromium-third_party-libjingle-source-talk-third_party-libudev/libudev.h') expected = [ u'Copyright (c) 2008-2010 Kay Sievers <kay.sievers@vrfy.org>', ] check_detection(expected, test_file) def test_ics_chromium_third_party_modp_b64_license(self): test_file = self.get_test_loc('ics/chromium-third_party-modp_b64/LICENSE') expected = [ u'Copyright (c) 2005, 2006 Nick Galbreath', ] check_detection(expected, test_file) def test_ics_chromium_third_party_modp_b64_modp_b64_cc(self): test_file = self.get_test_loc('ics/chromium-third_party-modp_b64/modp_b64.cc') expected = [ u'Copyright (c) 2005, 2006 Nick Galbreath', ] check_detection(expected, test_file) def test_ics_chromium_third_party_modp_b64_modp_b64_h(self): test_file = self.get_test_loc('ics/chromium-third_party-modp_b64/modp_b64.h') expected = [ u'Copyright (c) 2005, 2006, Nick Galbreath', ] check_detection(expected, test_file) def test_ics_chromium_webkit_glue_inspector_strings_grd(self): test_file = self.get_test_loc('ics/chromium-webkit-glue/inspector_strings.grd') expected = [ u'Copyright (c) 2007, 2008 Apple Inc.', ] check_detection(expected, test_file) def test_ics_chromium_webkit_glue_multipart_response_delegate_h(self): test_file = self.get_test_loc('ics/chromium-webkit-glue/multipart_response_delegate.h') expected = [ u'Copyright (c) 2006-2009 The Chromium Authors.', u'Copyright (c) 1998 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_webkit_glue_webcursor_gtk_data_h(self): test_file = self.get_test_loc('ics/chromium-webkit-glue/webcursor_gtk_data.h') expected = [ u'Copyright (c) 2001 Tim Copperfield <timecop@network.email.ne.jp>', u'Copyright (c) 2007 Christian Dywan <christian@twotoasts.de>', ] check_detection(expected, test_file) def test_ics_chromium_webkit_glue_webkit_strings_grd(self): test_file = self.get_test_loc('ics/chromium-webkit-glue/webkit_strings.grd') expected = [ u'Copyright (c) 2007 Apple Inc.', u'Copyright (c) 2001 the Initial Developer.', ] check_detection(expected, test_file) def test_ics_chromium_webkit_glue_resources_readme_txt(self): test_file = self.get_test_loc('ics/chromium-webkit-glue-resources/README.txt') expected = [ u'Copyright (c) 1998 the Initial Developer.', u'Copyright (c) 2005 Apple Computer, Inc.', ] check_detection(expected, test_file) def test_ics_clang_notice(self): test_file = self.get_test_loc('ics/clang/NOTICE') expected = [ u'Copyright (c) 2007-2011 University of Illinois', ] check_detection(expected, test_file) @expectedFailure def test_ics_clang_notice_trail_place(self): test_file = self.get_test_loc('ics/clang/NOTICE') expected = [ u'Copyright (c) 2007-2011 University of Illinois at Urbana-Champaign.', ] check_detection(expected, test_file) def test_ics_clang_docs_block_abi_apple_txt(self): test_file = self.get_test_loc('ics/clang-docs/Block-ABI-Apple.txt') expected = [ u'Copyright 2008-2010 Apple, Inc.', ] check_detection(expected, test_file) def test_ics_clang_docs_blocklanguagespec_txt(self): test_file = self.get_test_loc('ics/clang-docs/BlockLanguageSpec.txt') expected = [ u'Copyright 2008-2009 Apple, Inc.', ] check_detection(expected, test_file) def test_ics_clang_include_clang_basic_convertutf_h(self): test_file = self.get_test_loc('ics/clang-include-clang-Basic/ConvertUTF.h') expected = [ u'Copyright 2001-2004 Unicode, Inc.', ] check_detection(expected, test_file) def test_ics_clang_lib_headers_iso646_h(self): test_file = self.get_test_loc('ics/clang-lib-Headers/iso646.h') expected = [ u'Copyright (c) 2008 Eli Friedman', ] check_detection(expected, test_file) def test_ics_clang_lib_headers_limits_h(self): test_file = self.get_test_loc('ics/clang-lib-Headers/limits.h') expected = [ u'Copyright (c) 2009 Chris Lattner', ] check_detection(expected, test_file) def test_ics_clang_lib_headers_tgmath_h(self): test_file = self.get_test_loc('ics/clang-lib-Headers/tgmath.h') expected = [ u'Copyright (c) 2009 Howard Hinnant', ] check_detection(expected, test_file) def test_ics_collada_license_txt(self): test_file = self.get_test_loc('ics/collada/license.txt') expected = [ u'Copyright 2006 Sony Computer Entertainment Inc.', ] check_detection(expected, test_file) def test_ics_collada_include_dae_h(self): test_file = self.get_test_loc('ics/collada-include/dae.h') expected = [ u'Copyright 2006 Sony Computer Entertainment Inc.', ] check_detection(expected, test_file) def test_ics_collada_include_dae_daezaeuncompresshandler_h(self): test_file = self.get_test_loc('ics/collada-include-dae/daeZAEUncompressHandler.h') expected = [ u'Copyright 2008 Netallied Systems GmbH.', ] check_detection(expected, test_file) def test_ics_collada_src_1_4_dom_domasset_cpp(self): test_file = self.get_test_loc('ics/collada-src-1.4-dom/domAsset.cpp') expected = [ u'Copyright 2006 Sony Computer Entertainment Inc.', ] check_detection(expected, test_file) def test_ics_dbus_acinclude_m4(self): test_file = self.get_test_loc('ics/dbus/acinclude.m4') expected = [ u'Copyright (c) 2004 Scott James Remnant <scott@netsplit.com>.', u'(c) 2003, 2004, 2005 Thomas Vander Stichele', ] check_detection(expected, test_file) def test_ics_dbus_configure_in(self): test_file = self.get_test_loc('ics/dbus/configure.in') expected = [ u'Copyright (c) 2000-2002, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_copying(self): test_file = self.get_test_loc('ics/dbus/COPYING') expected = [ u'Copyright (c) 2003-2004 Lawrence E. Rosen.', u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_dbus_bus_activation_c(self): test_file = self.get_test_loc('ics/dbus-bus/activation.c') expected = [ u'Copyright (c) 2003 CodeFactory AB', u'Copyright (c) 2003 Red Hat, Inc.', u'Copyright (c) 2004 Imendio HB', ] check_detection(expected, test_file) def test_ics_dbus_bus_activation_h(self): test_file = self.get_test_loc('ics/dbus-bus/activation.h') expected = [ u'Copyright (c) 2003 CodeFactory AB', ] check_detection(expected, test_file) def test_ics_dbus_bus_activation_exit_codes_h(self): test_file = self.get_test_loc('ics/dbus-bus/activation-exit-codes.h') expected = [ u'Copyright (c) 2007 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_bus_bus_c(self): test_file = self.get_test_loc('ics/dbus-bus/bus.c') expected = [ u'Copyright (c) 2003, 2004 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_bus_config_parser_trivial_c(self): test_file = self.get_test_loc('ics/dbus-bus/config-parser-trivial.c') expected = [ u'Copyright (c) 2003, 2004, 2007 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_bus_connection_c(self): test_file = self.get_test_loc('ics/dbus-bus/connection.c') expected = [ u'Copyright (c) 2003 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_bus_connection_h(self): test_file = self.get_test_loc('ics/dbus-bus/connection.h') expected = [ u'Copyright (c) 2003, 2004 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_bus_dbus_daemon_1_in(self): test_file = self.get_test_loc('ics/dbus-bus/dbus-daemon.1.in') expected = [ u'Copyright (c) 2003,2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_bus_desktop_file_c(self): test_file = self.get_test_loc('ics/dbus-bus/desktop-file.c') expected = [ u'Copyright (c) 2003 CodeFactory AB', u'Copyright (c) 2003 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_bus_dir_watch_inotify_c(self): test_file = self.get_test_loc('ics/dbus-bus/dir-watch-inotify.c') expected = [ u'Copyright (c) 2003 Red Hat, Inc.', u'(c) 2006 Mandriva', ] check_detection(expected, test_file) def test_ics_dbus_bus_dispatch_c(self): test_file = self.get_test_loc('ics/dbus-bus/dispatch.c') expected = [ u'Copyright (c) 2003 CodeFactory AB', u'Copyright (c) 2003, 2004, 2005 Red Hat, Inc.', u'Copyright (c) 2004 Imendio HB', ] check_detection(expected, test_file) def test_ics_dbus_bus_driver_c(self): test_file = self.get_test_loc('ics/dbus-bus/driver.c') expected = [ u'Copyright (c) 2003 CodeFactory AB', u'Copyright (c) 2003, 2004, 2005 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_bus_main_c(self): test_file = self.get_test_loc('ics/dbus-bus/main.c') expected = [ u'Copyright (c) 2003 Red Hat, Inc.', u'Copyright (c) 2002, 2003 Red Hat, Inc., CodeFactory AB', ] check_detection(expected, test_file) @expectedFailure def test_ics_dbus_bus_main_c_trail_other(self): test_file = self.get_test_loc('ics/dbus-bus/main.c') expected = [ u'Copyright (c) 2003 Red Hat, Inc.', u'Copyright (C) 2002, 2003 Red Hat, Inc., CodeFactory AB, and others', ] check_detection(expected, test_file) def test_ics_dbus_bus_messagebus_config_in(self): test_file = self.get_test_loc('ics/dbus-bus/messagebus-config.in') expected = [ u'Copyright 2009 Yaakov Selkowitz', ] check_detection(expected, test_file) def test_ics_dbus_bus_services_c(self): test_file = self.get_test_loc('ics/dbus-bus/services.c') expected = [ u'Copyright (c) 2003 Red Hat, Inc.', u'Copyright (c) 2003 CodeFactory AB', ] check_detection(expected, test_file) def test_ics_dbus_bus_signals_c(self): test_file = self.get_test_loc('ics/dbus-bus/signals.c') expected = [ u'Copyright (c) 2003, 2005 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_bus_utils_c(self): test_file = self.get_test_loc('ics/dbus-bus/utils.c') expected = [ u'Copyright (c) 2003 CodeFactory AB', u'Copyright (c) 2003 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_cmake_bus_dbus_daemon_xml(self): test_file = self.get_test_loc('ics/dbus-cmake-bus/dbus-daemon.xml') expected = [ u'Copyright (c) 2003 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_cmake_modules_win32macros_cmake(self): test_file = self.get_test_loc('ics/dbus-cmake-modules/Win32Macros.cmake') expected = [ u'Copyright (c) 2006-2007, Ralf Habacker', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_address_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-address.c') expected = [ u'Copyright (c) 2003 CodeFactory AB', u'Copyright (c) 2004,2005 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_auth_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-auth.h') expected = [ u'Copyright (c) 2002 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_auth_util_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-auth-util.c') expected = [ u'Copyright (c) 2002, 2003, 2004 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_connection_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-connection.c') expected = [ u'Copyright (c) 2002-2006 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_connection_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-connection.h') expected = [ u'Copyright (c) 2002, 2003 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_credentials_util_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-credentials-util.c') expected = [ u'Copyright (c) 2007 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_errors_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-errors.c') expected = [ u'Copyright (c) 2002, 2004 Red Hat Inc.', u'Copyright (c) 2003 CodeFactory AB', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_errors_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-errors.h') expected = [ u'Copyright (c) 2002 Red Hat Inc.', u'Copyright (c) 2003 CodeFactory AB', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_file_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-file.h') expected = [ u'Copyright (c) 2002, 2003 Red Hat, Inc.', u'Copyright (c) 2003 CodeFactory AB', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_file_unix_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-file-unix.c') expected = [ u'Copyright (c) 2002, 2003, 2006 Red Hat, Inc.', u'Copyright (c) 2003 CodeFactory AB', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_hash_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-hash.c') expected = [ u'Copyright (c) 2002 Red Hat, Inc.', u'Copyright (c) 1991-1993 The Regents of the University of California.', u'Copyright (c) 1994 Sun Microsystems, Inc.', u'Copyright (c) 1991-1993 The Regents of the University of California.', u'Copyright (c) 1994 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_hash_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-hash.h') expected = [ u'Copyright (c) 2002 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_internals_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-internals.c') expected = [ u'Copyright (c) 2002, 2003 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_internals_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-internals.h') expected = [ u'Copyright (c) 2002, 2003 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_keyring_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-keyring.c') expected = [ u'Copyright (c) 2003, 2004 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_keyring_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-keyring.h') expected = [ u'Copyright (c) 2003 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_marshal_basic_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-marshal-basic.c') expected = [ u'Copyright (c) 2002 CodeFactory AB', u'Copyright (c) 2003, 2004, 2005 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_marshal_basic_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-marshal-basic.h') expected = [ u'Copyright (c) 2002 CodeFactory AB', u'Copyright (c) 2004, 2005 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_marshal_recursive_util_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-marshal-recursive-util.c') expected = [ u'Copyright (c) 2004, 2005 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_md5_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-md5.c') expected = [ u'Copyright (c) 2003 Red Hat Inc.', u'Copyright (c) 1999, 2000 Aladdin Enterprises.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_memory_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-memory.c') expected = [ u'Copyright (c) 2002, 2003 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_message_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-message.h') expected = [ u'Copyright (c) 2002, 2003, 2005 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_message_factory_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-message-factory.c') expected = [ u'Copyright (c) 2005 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_message_private_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-message-private.h') expected = [ u'Copyright (c) 2005 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_message_util_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-message-util.c') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat Inc.', u'Copyright (c) 2002, 2003 CodeFactory AB', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_misc_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-misc.c') expected = [ u'Copyright (c) 2006 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_nonce_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-nonce.c') expected = [ u'Copyright (c) 2009 Klaralvdalens Datakonsult AB, a KDAB Group company', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_nonce_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-nonce.h') expected = [ u'Copyright (c) 2009 Klaralvdalens Datakonsult AB, a KDAB Group company', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_object_tree_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-object-tree.c') expected = [ u'Copyright (c) 2003, 2005 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_protocol_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-protocol.h') expected = [ u'Copyright (c) 2002, 2003 CodeFactory AB', u'Copyright (c) 2004, 2005 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_server_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-server.c') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_server_debug_pipe_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-debug-pipe.c') expected = [ u'Copyright (c) 2003 CodeFactory AB', u'Copyright (c) 2003, 2004 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_server_socket_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-socket.c') expected = [ u'Copyright (c) 2002, 2003, 2004, 2006 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_server_socket_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-socket.h') expected = [ u'Copyright (c) 2002, 2006 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_server_win_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-win.c') expected = [ u'Copyright (c) 2002, 2003, 2004 Red Hat Inc.', u'Copyright (c) 2007 Ralf Habacker <ralf.habacker@freenet.de>', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_server_win_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-server-win.h') expected = [ u'Copyright (c) 2002 Red Hat Inc.', u'Copyright (c) 2007 Ralf Habacker <ralf.habacker@freenet.de>', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_sha_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-sha.c') expected = [ u'Copyright (c) 2003 Red Hat Inc.', u'Copyright (c) 1995 A. M. Kuchling', u'Copyright (c) 1995, A.M.', ] check_detection(expected, test_file) @expectedFailure def test_ics_dbus_dbus_dbus_sha_c_trail_name(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-sha.c') expected = [ u'Copyright (c) 2003 Red Hat Inc.', u'Copyright (c) 1995 A. M. Kuchling', u'Copyright (c) 1995 A. M. Kuchling', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_sockets_win_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-sockets-win.h') expected = [ u'Copyright (c) 2005 Novell, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_spawn_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-spawn.c') expected = [ u'Copyright (c) 2002, 2003, 2004 Red Hat, Inc.', u'Copyright (c) 2003 CodeFactory AB', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_spawn_win_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-spawn-win.c') expected = [ u'Copyright (c) 2002, 2003, 2004 Red Hat, Inc.', u'Copyright (c) 2003 CodeFactory AB', u'Copyright (c) 2005 Novell, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_string_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-string.h') expected = [ u'Copyright (c) 2002, 2003 Red Hat, Inc.', u'Copyright (c) 2006 Ralf Habacker <ralf.habacker@freenet.de>', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_string_util_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-string-util.c') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat, Inc.', u'Copyright (c) 2006 Ralf Habacker <ralf.habacker@freenet.de>', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_sysdeps_pthread_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-pthread.c') expected = [ u'Copyright (c) 2002, 2003, 2006 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_sysdeps_util_unix_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-util-unix.c') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat, Inc.', u'Copyright (c) 2003 CodeFactory AB', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_sysdeps_util_win_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-util-win.c') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005 Red Hat, Inc.', u'Copyright (c) 2003 CodeFactory AB', u'Copyright (c) 2000 Werner Almesberger', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_sysdeps_win_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-win.c') expected = [ u'Copyright (c) 2002, 2003 Red Hat, Inc.', u'Copyright (c) 2003 CodeFactory AB', u'Copyright (c) 2005 Novell, Inc.', u'Copyright (c) 2006 Ralf Habacker <ralf.habacker@freenet.de>', u'Copyright (c) 2006 Peter Kummel <syntheticpp@gmx.net>', u'Copyright (c) 2006 Christian Ehrlicher <ch.ehrlicher@gmx.de>', u'Copyright (c) 2002, 2003 Red Hat, Inc.', u'Copyright (c) 2003 CodeFactory AB', u'Copyright (c) 2005 Novell, Inc.', u'Copyright 2004 Eric Poech', u'Copyright 2004 Robert Shearman', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_sysdeps_win_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-win.h') expected = [ u'Copyright (c) 2002, 2003 Red Hat, Inc.', u'Copyright (c) 2003 CodeFactory AB', u'Copyright (c) 2005 Novell, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_sysdeps_wince_glue_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-sysdeps-wince-glue.c') expected = [ u'Copyright (c) 2002, 2003 Red Hat, Inc.', u'Copyright (c) 2003 CodeFactory AB', u'Copyright (c) 2005 Novell, Inc.', u'Copyright (c) 2006 Ralf Habacker <ralf.habacker@freenet.de>', u'Copyright (c) 2006 Peter Kummel <syntheticpp@gmx.net>', u'Copyright (c) 2006 Christian Ehrlicher <ch.ehrlicher@gmx.de>', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_threads_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-threads.c') expected = [ u'Copyright (c) 2002, 2003, 2006 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_threads_internal_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-threads-internal.h') expected = [ u'Copyright (c) 2002, 2005 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_transport_protected_h(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-transport-protected.h') expected = [ u'Copyright (c) 2002, 2004 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_dbus_userdb_util_c(self): test_file = self.get_test_loc('ics/dbus-dbus/dbus-userdb-util.c') expected = [ u'Copyright (c) 2003, 2004, 2005 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_dbus_sd_daemon_c(self): test_file = self.get_test_loc('ics/dbus-dbus/sd-daemon.c') expected = [ u'Copyright 2010 Lennart Poettering', ] check_detection(expected, test_file) def test_ics_dbus_dbus_versioninfo_rc_in(self): test_file = self.get_test_loc('ics/dbus-dbus/versioninfo.rc.in') expected = [ u'Copyright (c) 2005 g10 Code GmbH', u'Copyright (c) 2009 FreeDesktop.org', ] check_detection(expected, test_file) def test_ics_dbus_doc_introspect_dtd(self): test_file = self.get_test_loc('ics/dbus-doc/introspect.dtd') expected = [ u'(c) 2005-02-02 David A. Wheeler', ] check_detection(expected, test_file) def test_ics_dbus_doc_introspect_xsl(self): test_file = self.get_test_loc('ics/dbus-doc/introspect.xsl') expected = [ u'Copyright (c) 2005 Lennart Poettering.', ] check_detection(expected, test_file) def test_ics_dbus_test_decode_gcov_c(self): test_file = self.get_test_loc('ics/dbus-test/decode-gcov.c') expected = [ u'Copyright (c) 2003 Red Hat Inc.', u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_tools_dbus_cleanup_sockets_1(self): test_file = self.get_test_loc('ics/dbus-tools/dbus-cleanup-sockets.1') expected = [ u'Copyright (c) 2003 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_tools_dbus_cleanup_sockets_c(self): test_file = self.get_test_loc('ics/dbus-tools/dbus-cleanup-sockets.c') expected = [ u'Copyright (c) 2003 Red Hat, Inc.', u'Copyright (c) 2002 Michael Meeks', u'Copyright (c) 2003 Red Hat, Inc.', u'Copyright (c) 2002 Michael Meeks', ] check_detection(expected, test_file) def test_ics_dbus_tools_dbus_launch_c(self): test_file = self.get_test_loc('ics/dbus-tools/dbus-launch.c') expected = [ u'Copyright (c) 2003, 2006 Red Hat, Inc.', u'Copyright (c) 2006 Thiago Macieira <thiago@kde.org>', u'Copyright (c) 2003 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_tools_dbus_launch_win_c(self): test_file = self.get_test_loc('ics/dbus-tools/dbus-launch-win.c') expected = [ u'Copyright (c) 2007 Ralf Habacker <ralf.habacker@freenet.de>', ] check_detection(expected, test_file) def test_ics_dbus_tools_dbus_launch_x11_c(self): test_file = self.get_test_loc('ics/dbus-tools/dbus-launch-x11.c') expected = [ u'Copyright (c) 2006 Thiago Macieira <thiago@kde.org>', ] check_detection(expected, test_file) def test_ics_dbus_tools_dbus_monitor_c(self): test_file = self.get_test_loc('ics/dbus-tools/dbus-monitor.c') expected = [ u'Copyright (c) 2003 Philip Blundell <philb@gnu.org>', ] check_detection(expected, test_file) def test_ics_dbus_tools_dbus_print_message_c(self): test_file = self.get_test_loc('ics/dbus-tools/dbus-print-message.c') expected = [ u'Copyright (c) 2003 Philip Blundell <philb@gnu.org>', u'Copyright (c) 2003 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_tools_dbus_uuidgen_1(self): test_file = self.get_test_loc('ics/dbus-tools/dbus-uuidgen.1') expected = [ u'Copyright (c) 2006 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_tools_dbus_uuidgen_c(self): test_file = self.get_test_loc('ics/dbus-tools/dbus-uuidgen.c') expected = [ u'Copyright (c) 2006 Red Hat, Inc.', u'Copyright (c) 2006 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_tools_dbus_viewer_c(self): test_file = self.get_test_loc('ics/dbus-tools/dbus-viewer.c') expected = [ u'Copyright (c) 2003 Red Hat, Inc.', u'Copyright (c) 2003 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_dbus_tools_strtoll_c(self): test_file = self.get_test_loc('ics/dbus-tools/strtoll.c') expected = [ u'Copyright (c) 1992, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_dhcpcd_arp_c(self): test_file = self.get_test_loc('ics/dhcpcd/arp.c') expected = [ u'Copyright (c) 2006-2008 Roy Marples <roy@marples.name>', ] check_detection(expected, test_file) def test_ics_dhcpcd_bind_c(self): test_file = self.get_test_loc('ics/dhcpcd/bind.c') expected = [ u'Copyright (c) 2006-2010 Roy Marples <roy@marples.name>', ] check_detection(expected, test_file) def test_ics_dhcpcd_bpf_filter_h(self): test_file = self.get_test_loc('ics/dhcpcd/bpf-filter.h') expected = [ u'Copyright (c) 2006-2008 Roy Marples <roy@marples.name>', u'Copyright (c) 2004,2007 by Internet Systems Consortium, Inc.', u'Copyright (c) 1996-2003 by Internet Software Consortium', ] check_detection(expected, test_file) def test_ics_dhcpcd_client_c(self): test_file = self.get_test_loc('ics/dhcpcd/client.c') expected = [ u'Copyright 2006-2008 Roy Marples <roy@marples.name>', ] check_detection(expected, test_file) def test_ics_dhcpcd_common_c(self): test_file = self.get_test_loc('ics/dhcpcd/common.c') expected = [ u'Copyright (c) 2006-2009 Roy Marples <roy@marples.name>', ] check_detection(expected, test_file) def test_ics_dhcpcd_dhcpcd_8(self): test_file = self.get_test_loc('ics/dhcpcd/dhcpcd.8') expected = [ u'Copyright (c) 2006-2010 Roy Marples', ] check_detection(expected, test_file) def test_ics_dhcpcd_dhcpcd_c(self): test_file = self.get_test_loc('ics/dhcpcd/dhcpcd.c') expected = [ u'Copyright (c) 2006-2010 Roy Marples <roy@marples.name>', u'Copyright (c) 2006-2010 Roy Marples', ] check_detection(expected, test_file) def test_ics_dhcpcd_ifaddrs_c(self): test_file = self.get_test_loc('ics/dhcpcd/ifaddrs.c') expected = [ u'Copyright 2011, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_dhcpcd_if_linux_wireless_c(self): test_file = self.get_test_loc('ics/dhcpcd/if-linux-wireless.c') expected = [ u'Copyright (c) 2009-2010 Roy Marples <roy@marples.name>', ] check_detection(expected, test_file) def test_ics_dhcpcd_notice(self): test_file = self.get_test_loc('ics/dhcpcd/NOTICE') expected = [ u'Copyright 2006-2008 Roy Marples <roy@marples.name>', u'Copyright (c) 2004,2007 by Internet Systems Consortium, Inc.', u'Copyright (c) 1996-2003 by Internet Software Consortium', ] check_detection(expected, test_file) def test_ics_dhcpcd_readme(self): test_file = self.get_test_loc('ics/dhcpcd/README') expected = [ u'Copyright (c) 2006-2010 Roy Marples <roy@marples.name>', ] check_detection(expected, test_file) def test_ics_dhcpcd_compat_arc4random_c(self): test_file = self.get_test_loc('ics/dhcpcd-compat/arc4random.c') expected = [ u'Copyright 1996 David Mazieres <dm@lcs.mit.edu>.', ] check_detection(expected, test_file) def test_ics_dhcpcd_compat_linkaddr_c(self): test_file = self.get_test_loc('ics/dhcpcd-compat/linkaddr.c') expected = [ u'Copyright (c) 1990, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_dhcpcd_mk_cc_mk(self): test_file = self.get_test_loc('ics/dhcpcd-mk/cc.mk') expected = [ u'Copyright 2008 Roy Marples <roy@marples.name>', ] check_detection(expected, test_file) def test_ics_dhcpcd_mk_dist_mk(self): test_file = self.get_test_loc('ics/dhcpcd-mk/dist.mk') expected = [ u'Copyright 2008-2009 Roy Marples <roy@marples.name>', ] check_detection(expected, test_file) def test_ics_dnsmasq_copying_v3(self): test_file = self.get_test_loc('ics/dnsmasq/COPYING-v3') expected = [ u'Copyright (c) 2007 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_dnsmasq_makefile(self): test_file = self.get_test_loc('ics/dnsmasq/Makefile') expected = [ u'Copyright (c) 2000-2009 Simon Kelley', ] check_detection(expected, test_file) def test_ics_dnsmasq_contrib_suse_dnsmasq_suse_spec(self): test_file = self.get_test_loc('ics/dnsmasq-contrib-Suse/dnsmasq-suse.spec') expected = [ u'Copyright GPL Group', ] check_detection(expected, test_file) def test_ics_dnsmasq_contrib_wrt_dhcp_lease_time_c(self): test_file = self.get_test_loc('ics/dnsmasq-contrib-wrt/dhcp_lease_time.c') expected = [ u'Copyright (c) 2007 Simon Kelley', ] check_detection(expected, test_file) def test_ics_dnsmasq_contrib_wrt_dhcp_release_c(self): test_file = self.get_test_loc('ics/dnsmasq-contrib-wrt/dhcp_release.c') expected = [ u'Copyright (c) 2006 Simon Kelley', ] check_detection(expected, test_file) def test_ics_dnsmasq_contrib_wrt_lease_update_sh(self): test_file = self.get_test_loc('ics/dnsmasq-contrib-wrt/lease_update.sh') expected = [ u'Copyright (c) 2006 Simon Kelley', ] check_detection(expected, test_file) def test_ics_dnsmasq_src_bpf_c(self): test_file = self.get_test_loc('ics/dnsmasq-src/bpf.c') expected = [ u'Copyright (c) 2000-2009 Simon Kelley', ] check_detection(expected, test_file) def test_ics_dnsmasq_src_dnsmasq_h(self): test_file = self.get_test_loc('ics/dnsmasq-src/dnsmasq.h') expected = [ u'Copyright (c) 2000-2009 Simon Kelley', u'Copyright (c) 2000-2009 Simon Kelley', ] check_detection(expected, test_file) def test_ics_dnsmasq_src_nameser_h(self): test_file = self.get_test_loc('ics/dnsmasq-src/nameser.h') expected = [ u'Copyright (c) 1983, 1989, 1993 The Regents of the University of California.', u'Portions Copyright (c) 1993 by Digital Equipment Corporation.', u'Portions Copyright (c) 1995 by International Business Machines, Inc.', ] check_detection(expected, test_file) def test_ics_doclava_notice(self): test_file = self.get_test_loc('ics/doclava/NOTICE') expected = [ u'Copyright (c) 2010 Google Inc.', u'Copyright (c) 2008 John Resig', u'Copyright (c) 2009 John Resig', ] check_detection(expected, test_file) def test_ics_doclava_res_assets_templates_assets_jquery_history_js(self): test_file = self.get_test_loc('ics/doclava-res-assets-templates-assets/jquery-history.js') expected = [ u'Copyright (c) 2008 Tom Rodenberg', ] check_detection(expected, test_file) def test_ics_doclava_res_assets_templates_assets_jquery_resizable_min_js(self): test_file = self.get_test_loc('ics/doclava-res-assets-templates-assets/jquery-resizable.min.js') expected = [ u'Copyright (c) 2009 John Resig', u'Copyright 2009, The Dojo Foundation', u'Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)', u'Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about)', ] check_detection(expected, test_file) def test_ics_doclava_src_com_google_doclava_annotationinstanceinfo_java(self): test_file = self.get_test_loc('ics/doclava-src-com-google-doclava/AnnotationInstanceInfo.java') expected = [ u'Copyright (c) 2010 Google Inc.', ] check_detection(expected, test_file) def test_ics_doclava_src_com_google_doclava_doclava2_java(self): test_file = self.get_test_loc('ics/doclava-src-com-google-doclava/Doclava2.java') expected = [ u'Copyright (c) 2011 Google Inc.', ] check_detection(expected, test_file) def test_ics_doclava_src_com_google_doclava_parser_java_g(self): test_file = self.get_test_loc('ics/doclava-src-com-google-doclava-parser/Java.g') expected = [ u'Copyright (c) 2007-2008 Terence Parr', ] check_detection(expected, test_file) def test_ics_dropbear_agentfwd_h(self): test_file = self.get_test_loc('ics/dropbear/agentfwd.h') expected = [ u'Copyright (c) 2002,2003 Matt Johnston', ] check_detection(expected, test_file) def test_ics_dropbear_atomicio_c(self): test_file = self.get_test_loc('ics/dropbear/atomicio.c') expected = [ u'Copyright (c) 1995,1999 Theo de Raadt.', ] check_detection(expected, test_file) def test_ics_dropbear_circbuffer_c(self): test_file = self.get_test_loc('ics/dropbear/circbuffer.c') expected = [ u'Copyright (c) 2002-2004 Matt Johnston', ] check_detection(expected, test_file) def test_ics_dropbear_cli_algo_c(self): test_file = self.get_test_loc('ics/dropbear/cli-algo.c') expected = [ u'Copyright (c) 2002,2003 Matt Johnston', u'Copyright (c) 2004 by Mihnea Stoenescu', ] check_detection(expected, test_file) def test_ics_dropbear_cli_authinteract_c(self): test_file = self.get_test_loc('ics/dropbear/cli-authinteract.c') expected = [ u'Copyright (c) 2005 Matt Johnston', ] check_detection(expected, test_file) def test_ics_dropbear_cli_kex_c(self): test_file = self.get_test_loc('ics/dropbear/cli-kex.c') expected = [ u'Copyright (c) 2002-2004 Matt Johnston', u'Copyright (c) 2004 by Mihnea Stoenescu', ] check_detection(expected, test_file) def test_ics_dropbear_common_kex_c(self): test_file = self.get_test_loc('ics/dropbear/common-kex.c') expected = [ u'Copyright (c) 2002-2004 Matt Johnston', u'Portions Copyright (c) 2004 by Mihnea Stoenescu', ] check_detection(expected, test_file) def test_ics_dropbear_compat_c(self): test_file = self.get_test_loc('ics/dropbear/compat.c') expected = [ u'Copyright (c) 2002,2003 Matt Johnston', u'Copyright (c) 1998 Todd C. Miller <Todd.Miller@courtesan.com>', u'Copyright (c) 1990, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_dropbear_configure(self): test_file = self.get_test_loc('ics/dropbear/configure') expected = [ u'Copyright (c) 2003 Free Software Foundation, Inc.', u'Copyright (c) 2003 Free Software Foundation, Inc.', u'Copyright (c) 2003 Free Software Foundation, Inc.', u'Copyright (c) 2003 Free Software Foundation, Inc.', u'Copyright (c) 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_dropbear_dbutil_c(self): test_file = self.get_test_loc('ics/dropbear/dbutil.c') expected = [ u'Copyright (c) 2002,2003 Matt Johnston', u'Copyright (c) 1998 Todd C. Miller <Todd.Miller@courtesan.com>', ] check_detection(expected, test_file) def test_ics_dropbear_fake_rfc2553_c(self): test_file = self.get_test_loc('ics/dropbear/fake-rfc2553.c') expected = [ u'Copyright (c) 2000-2003 Damien Miller.', u'Copyright (c) 1999 WIDE Project.', ] check_detection(expected, test_file) def test_ics_dropbear_install_sh(self): test_file = self.get_test_loc('ics/dropbear/install-sh') expected = [ u'Copyright 1991 by the Massachusetts Institute of Technology', ] check_detection(expected, test_file) def test_ics_dropbear_keyimport_c_trail_name(self): test_file = self.get_test_loc('ics/dropbear/keyimport.c') expected = [ u'copyright 2003 Matt Johnston', u'copyright 1997-2003 Simon Tatham.', u'Portions copyright Robert de Bath, Joris van Rantwijk, Delian Delchev, Andreas Schultz, Jeroen Massar, Wez Furlong, Nicolas Barry, Justin Bradford, and CORE SDI S.A.', ] check_detection(expected, test_file) def test_ics_dropbear_license_extra_portion_trail_name(self): test_file = self.get_test_loc('ics/dropbear/LICENSE') expected = [ u'(c) 2004 Mihnea Stoenescu', u'Copyright (c) 2002-2006 Matt Johnston', u'Portions copyright (c) 2004 Mihnea Stoenescu', u'Copyright (c) 1995 Tatu Ylonen <ylo@cs.hut.fi>, Espoo, Finland', u'(c) Todd C. Miller', u'copyright 1997-2003 Simon Tatham.', u'Portions copyright Robert de Bath, Joris van Rantwijk, Delian Delchev, Andreas Schultz, Jeroen Massar, Wez Furlong, Nicolas Barry, Justin Bradford, and CORE SDI S.A.', ] check_detection(expected, test_file) def test_ics_dropbear_loginrec_c_extra_portion_extra_portion(self): test_file = self.get_test_loc('ics/dropbear/loginrec.c') expected = [ u'Copyright (c) 2000 Andre Lucas.', u'Portions copyright (c) 1998 Todd C. Miller', u'Portions copyright (c) 1996 Jason Downs', u'Portions copyright (c) 1996 Theo de Raadt', ] check_detection(expected, test_file) def test_ics_dropbear_loginrec_h(self): test_file = self.get_test_loc('ics/dropbear/loginrec.h') expected = [ u'Copyright (c) 2000 Andre Lucas.', ] check_detection(expected, test_file) def test_ics_dropbear_netbsd_getpass_c(self): test_file = self.get_test_loc('ics/dropbear/netbsd_getpass.c') expected = [ u'Copyright (c) 1988, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_dropbear_progressmeter_c(self): test_file = self.get_test_loc('ics/dropbear/progressmeter.c') expected = [ u'Copyright (c) 2003 Nils Nordman.', ] check_detection(expected, test_file) def test_ics_dropbear_progressmeter_h(self): test_file = self.get_test_loc('ics/dropbear/progressmeter.h') expected = [ u'Copyright (c) 2002 Nils Nordman.', ] check_detection(expected, test_file) def test_ics_dropbear_scp_c(self): test_file = self.get_test_loc('ics/dropbear/scp.c') expected = [ u'Copyright (c) 1999 Theo de Raadt.', u'Copyright (c) 1999 Aaron Campbell.', u'Copyright (c) 1983, 1990, 1992, 1993, 1995 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_dropbear_scpmisc_c(self): test_file = self.get_test_loc('ics/dropbear/scpmisc.c') expected = [ u'Copyright (c) 2000 Markus Friedl.', u'Copyright (c) 1995 Tatu Ylonen <ylo@cs.hut.fi>, Espoo, Finland', ] check_detection(expected, test_file) def test_ics_dropbear_scpmisc_h(self): test_file = self.get_test_loc('ics/dropbear/scpmisc.h') expected = [ u'Copyright (c) 1995 Tatu Ylonen <ylo@cs.hut.fi>, Espoo, Finland', ] check_detection(expected, test_file) def test_ics_dropbear_svr_authpam_c(self): test_file = self.get_test_loc('ics/dropbear/svr-authpam.c') expected = [ u'Copyright (c) 2004 Martin Carlsson', u'Portions (c) 2004 Matt Johnston', ] check_detection(expected, test_file) def test_ics_dropbear_svr_main_c(self): test_file = self.get_test_loc('ics/dropbear/svr-main.c') expected = [ u'Copyright (c) 2002-2006 Matt Johnston', ] check_detection(expected, test_file) def test_ics_dropbear_libtommath_mtest_mpi_c(self): test_file = self.get_test_loc('ics/dropbear-libtommath-mtest/mpi.c') expected = [ u'Copyright (c) 1998 Michael J. Fromberger', ] check_detection(expected, test_file) def test_ics_dropbear_libtommath_mtest_mpi_h(self): test_file = self.get_test_loc('ics/dropbear-libtommath-mtest/mpi.h') expected = [ u'Copyright (c) 1998 Michael J. Fromberger', ] check_detection(expected, test_file) def test_ics_easymock_src_org_easymock_abstractmatcher_java_trail_name(self): test_file = self.get_test_loc('ics/easymock-src-org-easymock/AbstractMatcher.java') expected = [ u'Copyright 2001-2009 OFFIS, Tammo Freese', ] check_detection(expected, test_file) def test_ics_easymock_src_org_easymock_capture_java_trail_name(self): test_file = self.get_test_loc('ics/easymock-src-org-easymock/Capture.java') expected = [ u'Copyright 2003-2009 OFFIS, Henri Tremblay', ] check_detection(expected, test_file) def test_ics_easymock_src_org_easymock_iargumentmatcher_java_trail_name(self): test_file = self.get_test_loc('ics/easymock-src-org-easymock/IArgumentMatcher.java') expected = [ u'Copyright 2001-2006 OFFIS, Tammo Freese', ] check_detection(expected, test_file) def test_ics_embunit_inc_assertimpl_h(self): test_file = self.get_test_loc('ics/embunit-inc/AssertImpl.h') expected = [ u'Copyright (c) 2003 Embedded Unit Project', ] check_detection(expected, test_file) def test_ics_embunit_src_stdimpl_c(self): test_file = self.get_test_loc('ics/embunit-src/stdImpl.c') expected = [ u'Copyright (c) 2003 Embedded Unit Project', ] check_detection(expected, test_file) def test_ics_emma_android_mk(self): test_file = self.get_test_loc('ics/emma/Android.mk') expected = [ u'Copyright 2008 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_emma_build_txt(self): test_file = self.get_test_loc('ics/emma/BUILD.txt') expected = [ u'Copyright (c) 2003-2004 Vlad Roubtsov.', ] check_detection(expected, test_file) def test_ics_emma_test_sh(self): test_file = self.get_test_loc('ics/emma/test.sh') expected = [ u'Copyright 2009 Google Inc.', ] check_detection(expected, test_file) def test_ics_emma_ant_ant14_com_vladium_emma_antmain_java(self): test_file = self.get_test_loc('ics/emma-ant-ant14-com-vladium-emma/ANTMain.java') expected = [ u'Copyright (c) 2003 Vladimir Roubtsov.', u'Vlad Roubtsov, (c) 2004', ] check_detection(expected, test_file) def test_ics_emma_ant_ant14_com_vladium_emma_emmajavatask_java(self): test_file = self.get_test_loc('ics/emma-ant-ant14-com-vladium-emma/emmajavaTask.java') expected = [ u'Copyright (c) 2003 Vladimir Roubtsov.', u'Vlad Roubtsov, (c) 2003', ] check_detection(expected, test_file) def test_ics_emma_core_data_manifest_mf_extra_implementation(self): test_file = self.get_test_loc('ics/emma-core-data/MANIFEST.MF') expected = [ u'(c) Vladimir Roubtsov', ] check_detection(expected, test_file) def test_ics_emma_core_java12_com_vladium_emma_iappconstants_java_extra_string(self): test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-emma/IAppConstants.java') expected = [ u'Copyright (c) 2003 Vladimir Roubtsov.', u'Vlad Roubtsov, (c) 2003', u'(c) Vladimir Roubtsov', ] check_detection(expected, test_file) def test_ics_emma_core_java12_com_vladium_emma_processor_java(self): test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-emma/Processor.java') expected = [ u'Copyright (c) 2004 Vladimir Roubtsov.', u'Vlad Roubtsov, (c) 2004', ] check_detection(expected, test_file) def test_ics_emma_core_java12_com_vladium_emma_data_imetadataconstants_java(self): test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-emma-data/IMetadataConstants.java') expected = [ u'Copyright (c) 2003 Vladimir Roubtsov.', ] check_detection(expected, test_file) def test_ics_emma_core_java12_com_vladium_emma_report_lcov_reportgenerator_java(self): test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-emma-report-lcov/ReportGenerator.java') expected = [ u'Copyright 2009 Google Inc.', u'Copyright (c) 2003 Vladimir Roubtsov.', u'Vlad Roubtsov, (c) 2003', u'Tim Baverstock, (c) 2009', ] check_detection(expected, test_file) def test_ics_emma_core_java12_com_vladium_jcd_cls_abstractclassdefvisitor_java(self): test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-jcd-cls/AbstractClassDefVisitor.java') expected = [ u'Copyright (c) 2003 Vladimir Roubtsov.', u'(c) 2001, Vlad Roubtsov', ] check_detection(expected, test_file) def test_ics_emma_core_java12_com_vladium_jcd_cls_constantcollection_java(self): test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-jcd-cls/ConstantCollection.java') expected = [ u'Copyright (c) 2003 Vladimir Roubtsov.', u'(c) 2001, Vladimir Roubtsov', ] check_detection(expected, test_file) def test_ics_emma_core_java12_com_vladium_logging_iloglevels_java(self): test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-logging/ILogLevels.java') expected = [ u'Copyright (c) 2003 Vladimir Roubtsov.', u'Vlad Roubtsov, (c) 2001', ] check_detection(expected, test_file) def test_ics_emma_core_java12_com_vladium_util_softvaluemap_java(self): test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-util/SoftValueMap.java') expected = [ u'Copyright (c) 2003 Vladimir Roubtsov.', u'(c) 2002, Vlad Roubtsov', ] check_detection(expected, test_file) def test_ics_emma_core_java12_com_vladium_util_wcmatcher_java(self): test_file = self.get_test_loc('ics/emma-core-java12-com-vladium-util/WCMatcher.java') expected = [ u'Copyright (c) 2003 Vladimir Roubtsov.', u'Vlad Roubtsov, (c) 2002', ] check_detection(expected, test_file) def test_ics_esd_include_audiofile_h(self): test_file = self.get_test_loc('ics/esd-include/audiofile.h') expected = [ u'Copyright (c) 1998-2000, Michael Pruett <michael@68k.org>', ] check_detection(expected, test_file) def test_ics_expat_configure(self): test_file = self.get_test_loc('ics/expat/configure') expected = [ u'Copyright (c) 2003 Free Software Foundation, Inc.', u'Copyright (c) 2003 Free Software Foundation, Inc.', u'Copyright 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.', u'Copyright (c) 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_expat_configure_in(self): test_file = self.get_test_loc('ics/expat/configure.in') expected = [ u'Copyright 2000 Clark Cooper', ] check_detection(expected, test_file) def test_ics_expat_notice(self): test_file = self.get_test_loc('ics/expat/NOTICE') expected = [ u'Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd and Clark Cooper', u'Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Expat maintainers.' ] check_detection(expected, test_file) def test_ics_expat_amiga_expat_lib_c_trail_maint(self): test_file = self.get_test_loc('ics/expat-amiga/expat_lib.c') expected = [ u'Copyright (c) 2001-2007 Expat maintainers.', ] check_detection(expected, test_file) def test_ics_expat_conftools_libtool_m4(self): test_file = self.get_test_loc('ics/expat-conftools/libtool.m4') expected = [ u'Copyright 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_expat_conftools_ltmain_sh(self): test_file = self.get_test_loc('ics/expat-conftools/ltmain.sh') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996', u'Copyright (c) 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_expat_doc_reference_html(self): test_file = self.get_test_loc('ics/expat-doc/reference.html') expected = [ u'Copyright 1999,2000 Clark Cooper <coopercc@netheaven.com>', ] check_detection(expected, test_file) def test_ics_expat_examples_outline_c(self): test_file = self.get_test_loc('ics/expat-examples/outline.c') expected = [ u'Copyright 1999, Clark Cooper', ] check_detection(expected, test_file) def test_ics_expat_lib_ascii_h(self): test_file = self.get_test_loc('ics/expat-lib/ascii.h') expected = [ u'Copyright (c) 1998, 1999 Thai Open Source Software Center Ltd', ] check_detection(expected, test_file) def test_ics_expat_lib_expat_h(self): test_file = self.get_test_loc('ics/expat-lib/expat.h') expected = [ u'Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd', ] check_detection(expected, test_file) def test_ics_expat_lib_macconfig_h(self): test_file = self.get_test_loc('ics/expat-lib/macconfig.h') expected = [ u'Copyright 2000, Clark Cooper', ] check_detection(expected, test_file) def test_ics_expat_lib_makefile_mpw_extra_portion(self): test_file = self.get_test_loc('ics/expat-lib/Makefile.MPW') expected = [ u'Copyright (c) 2002 Daryle Walker', u'Portions Copyright (c) 2002 Thomas Wegner', ] check_detection(expected, test_file) def test_ics_expat_lib_xmlparse_c(self): test_file = self.get_test_loc('ics/expat-lib/xmlparse.c') expected = [ u'Copyright (c) 1998, 1999, 2000 Thai Open Source Software Center Ltd', ] check_detection(expected, test_file) def test_ics_expat_lib_xmltok_c(self): test_file = self.get_test_loc('ics/expat-lib/xmltok.c') expected = [ u'Copyright (c) 1998, 1999 Thai Open Source Software Center Ltd', ] check_detection(expected, test_file) def test_ics_expat_tests_chardata_c(self): test_file = self.get_test_loc('ics/expat-tests/chardata.c') expected = [ u'Copyright (c) 1998-2003 Thai Open Source Software Center Ltd', ] check_detection(expected, test_file) def test_ics_expat_win32_expat_iss(self): test_file = self.get_test_loc('ics/expat-win32/expat.iss') expected = [ u'Copyright (c) 1998-2006 Thai Open Source Software Center, Clark Cooper, and the Expat', # maintainers.' ] check_detection(expected, test_file) @expectedFailure def test_ics_expat_win32_expat_iss_trail_name_lead_copy(self): test_file = self.get_test_loc('ics/expat-win32/expat.iss') expected = [ u'Copyright (c) 1998-2006 Thai Open Source Software Center, Clark Cooper, and the Expat maintainers', ] check_detection(expected, test_file) def test_ics_eyes_free_notice(self): test_file = self.get_test_loc('ics/eyes-free/NOTICE') expected = [ u'Copyright 2010 Google Inc.', ] check_detection(expected, test_file) def test_ics_fdlibm_configure(self): test_file = self.get_test_loc('ics/fdlibm/configure') expected = [ u'Copyright (c) 2003 Free Software Foundation, Inc.', u'Copyright (c) 2003 Free Software Foundation, Inc.', u'Copyright (c) 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_fdlibm_e_acos_c(self): test_file = self.get_test_loc('ics/fdlibm/e_acos.c') expected = [ u'Copyright (c) 1993 by Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_fdlibm_e_exp_c(self): test_file = self.get_test_loc('ics/fdlibm/e_exp.c') expected = [ u'Copyright (c) 2004 by Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_fdlibm_k_tan_c(self): test_file = self.get_test_loc('ics/fdlibm/k_tan.c') expected = [ u'Copyright 2004 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_fdlibm_makefile_in(self): test_file = self.get_test_loc('ics/fdlibm/makefile.in') expected = [ u'Copyright (c) 1993 by Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_fdlibm_notice(self): test_file = self.get_test_loc('ics/fdlibm/NOTICE') expected = [ u'Copyright (c) 1993 by Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_flac_notice(self): test_file = self.get_test_loc('ics/flac/NOTICE') expected = [ u'Copyright (c) 2000,2001,2002,2003,2004,2005,2006,2007 Josh Coalson', ] check_detection(expected, test_file) def test_ics_flac_include_flac_all_h(self): test_file = self.get_test_loc('ics/flac-include-FLAC/all.h') expected = [ u'Copyright (c) 2000,2001,2002,2003,2004,2005,2006,2007 Josh Coalson', ] check_detection(expected, test_file) def test_ics_flac_include_flac_assert_h(self): test_file = self.get_test_loc('ics/flac-include-FLAC/assert.h') expected = [ u'Copyright (c) 2001,2002,2003,2004,2005,2006,2007 Josh Coalson', ] check_detection(expected, test_file) def test_ics_flac_include_flac_callback_h(self): test_file = self.get_test_loc('ics/flac-include-FLAC/callback.h') expected = [ u'Copyright (c) 2004,2005,2006,2007 Josh Coalson', ] check_detection(expected, test_file) def test_ics_flac_include_share_alloc_h(self): test_file = self.get_test_loc('ics/flac-include-share/alloc.h') expected = [ u'Copyright (c) 2011 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_flac_libflac_makefile_am(self): test_file = self.get_test_loc('ics/flac-libFLAC/Makefile.am') expected = [ u'Copyright (c) 2001,2002,2003,2004,2005,2006,2007 Josh Coalson', ] check_detection(expected, test_file) def test_ics_flac_libflac_makefile_in(self): test_file = self.get_test_loc('ics/flac-libFLAC/Makefile.in') expected = [ u'Copyright 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.', u'Copyright (c) 2001,2002,2003,2004,2005,2006,2007 Josh Coalson', ] check_detection(expected, test_file) def test_ics_flac_libflac_ogg_decoder_aspect_c(self): test_file = self.get_test_loc('ics/flac-libFLAC/ogg_decoder_aspect.c') expected = [ u'Copyright (c) 2002,2003,2004,2005,2006,2007 Josh Coalson', ] check_detection(expected, test_file) def test_ics_flac_libflac_window_c(self): test_file = self.get_test_loc('ics/flac-libFLAC/window.c') expected = [ u'Copyright (c) 2006,2007 Josh Coalson', ] check_detection(expected, test_file) def test_ics_flac_libflac_ia32_bitreader_asm_nasm(self): test_file = self.get_test_loc('ics/flac-libFLAC-ia32/bitreader_asm.nasm') expected = [ u'Copyright (c) 2001,2002,2003,2004,2005,2006,2007 Josh Coalson', ] check_detection(expected, test_file) def test_ics_flac_libflac_ppc_makefile_am(self): test_file = self.get_test_loc('ics/flac-libFLAC-ppc/Makefile.am') expected = [ u'Copyright (c) 2004,2005,2006,2007 Josh Coalson', ] check_detection(expected, test_file) def test_ics_flac_libflac_ppc_makefile_in(self): test_file = self.get_test_loc('ics/flac-libFLAC-ppc/Makefile.in') expected = [ u'Copyright 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.', u'Copyright (c) 2004,2005,2006,2007 Josh Coalson', ] check_detection(expected, test_file) def test_ics_freetype_notice(self): test_file = self.get_test_loc('ics/freetype/NOTICE') expected = [ u'Copyright 1996-2002, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg', u'copyright (c) The FreeType Project (www.freetype.org).', u'copyright (c) 1996-2000 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_builds_ft2unix_h(self): test_file = self.get_test_loc('ics/freetype-builds/ft2unix.h') expected = [ u'Copyright 1996-2001, 2003, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_ft2build_h(self): test_file = self.get_test_loc('ics/freetype-include/ft2build.h') expected = [ u'Copyright 1996-2001, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_freetype_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/freetype.h') expected = [ u'Copyright 1996-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftadvanc_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftadvanc.h') expected = [ u'Copyright 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftbbox_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftbbox.h') expected = [ u'Copyright 1996-2001, 2003, 2007, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftbdf_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftbdf.h') expected = [ u'Copyright 2002, 2003, 2004, 2006, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftbitmap_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftbitmap.h') expected = [ u'Copyright 2004, 2005, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftcache_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftcache.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftcid_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftcid.h') expected = [ u'Copyright 2007, 2009 by Dereg Clegg, Michael Toftdal.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_fterrdef_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/fterrdef.h') expected = [ u'Copyright 2002, 2004, 2006, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_fterrors_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/fterrors.h') expected = [ u'Copyright 1996-2001, 2002, 2004, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftgasp_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftgasp.h') expected = [ u'Copyright 2007, 2008, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftglyph_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftglyph.h') expected = [ u'Copyright 1996-2003, 2006, 2008, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftgxval_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftgxval.h') expected = [ u'Copyright 2004, 2005, 2006 by Masatake YAMATO', ] check_detection(expected, test_file) @expectedFailure def test_ics_freetype_include_freetype_ftgxval_h_trail_name(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftgxval.h') expected = [ u'Copyright 2004, 2005, 2006 by Masatake YAMATO, Redhat K.K, David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftgzip_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftgzip.h') expected = [ u'Copyright 2002, 2003, 2004, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftimage_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftimage.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftincrem_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftincrem.h') expected = [ u'Copyright 2002, 2003, 2006, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftlcdfil_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftlcdfil.h') expected = [ u'Copyright 2006, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftlist_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftlist.h') expected = [ u'Copyright 1996-2001, 2003, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftlzw_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftlzw.h') expected = [ u'Copyright 2004, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftmac_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftmac.h') expected = [ u'Copyright 1996-2001, 2004, 2006, 2007 by Just van Rossum, David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftmm_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftmm.h') expected = [ u'Copyright 1996-2001, 2003, 2004, 2006, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftmodapi_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftmodapi.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2006, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftmoderr_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftmoderr.h') expected = [ u'Copyright 2001, 2002, 2003, 2004, 2005, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftotval_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftotval.h') expected = [ u'Copyright 2004, 2005, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftoutln_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftoutln.h') expected = [ u'Copyright 1996-2003, 2005-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftpfr_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftpfr.h') expected = [ u'Copyright 2002, 2003, 2004, 2006, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftrender_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftrender.h') expected = [ u'Copyright 1996-2001, 2005, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftsnames_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftsnames.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2006, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftstroke_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftstroke.h') expected = [ u'Copyright 2002-2006, 2008, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftsynth_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftsynth.h') expected = [ u'Copyright 2000-2001, 2003, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftsystem_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftsystem.h') expected = [ u'Copyright 1996-2001, 2002, 2005, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_fttrigon_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/fttrigon.h') expected = [ u'Copyright 2001, 2003, 2005, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_fttypes_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/fttypes.h') expected = [ u'Copyright 1996-2001, 2002, 2004, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftwinfnt_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftwinfnt.h') expected = [ u'Copyright 2003, 2004, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ftxf86_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ftxf86.h') expected = [ u'Copyright 2002, 2003, 2004, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_t1tables_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/t1tables.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2006, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ttnameid_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ttnameid.h') expected = [ u'Copyright 1996-2002, 2003, 2004, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_tttables_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/tttables.h') expected = [ u'Copyright 1996-2005, 2008-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_tttags_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/tttags.h') expected = [ u'Copyright 1996-2001, 2004, 2005, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_ttunpat_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype/ttunpat.h') expected = [ u'Copyright 2003, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_config_ftconfig_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-config/ftconfig.h') expected = [ u'Copyright 1996-2004, 2006-2008, 2010-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_config_ftstdlib_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-config/ftstdlib.h') expected = [ u'Copyright 2002, 2003, 2004, 2005, 2006, 2007, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_autohint_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/autohint.h') expected = [ u'Copyright 1996-2001, 2002, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_ftcalc_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftcalc.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_ftdebug_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftdebug.h') expected = [ u'Copyright 1996-2001, 2002, 2004, 2006, 2007, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_ftdriver_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftdriver.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_ftgloadr_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftgloadr.h') expected = [ u'Copyright 2002, 2003, 2005, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_ftmemory_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftmemory.h') expected = [ u'Copyright 1996-2001, 2002, 2004, 2005, 2006, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_ftobjs_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftobjs.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_ftpic_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftpic.h') expected = [ u'Copyright 2009 by Oran Agra and Mickey Gabel.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_ftrfork_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftrfork.h') expected = [ u'Copyright 2004, 2006, 2007 by Masatake YAMATO and Redhat', ] check_detection(expected, test_file) @expectedFailure def test_ics_freetype_include_freetype_internal_ftrfork_h_trail_name(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftrfork.h') expected = [ u'Copyright 2004, 2006, 2007 by Masatake YAMATO and Redhat K.K.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_ftserv_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftserv.h') expected = [ u'Copyright 2003, 2004, 2005, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_ftstream_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftstream.h') expected = [ u'Copyright 1996-2002, 2004-2006, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_fttrace_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/fttrace.h') expected = [ u'Copyright 2002, 2004-2007, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_ftvalid_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/ftvalid.h') expected = [ u'Copyright 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_internal_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/internal.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_pcftypes_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/pcftypes.h') expected = [ u'Copyright (c) 2000, 2001, 2002 by Francesco Zappa Nardelli', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_pshints_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/pshints.h') expected = [ u'Copyright 2001, 2002, 2003, 2005, 2006, 2007, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_sfnt_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/sfnt.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_tttypes_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal/tttypes.h') expected = [ u'Copyright 1996-2001, 2002, 2004, 2005, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_services_svbdf_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svbdf.h') expected = [ u'Copyright 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_services_svcid_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svcid.h') expected = [ u'Copyright 2007, 2009 by Derek Clegg, Michael Toftdal.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_services_svgxval_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svgxval.h') expected = [ u'Copyright 2004, 2005 by Masatake YAMATO, Red Hat K.K., David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_services_svkern_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svkern.h') expected = [ u'Copyright 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_services_svmm_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svmm.h') expected = [ u'Copyright 2003, 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_services_svpostnm_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svpostnm.h') expected = [ u'Copyright 2003, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_services_svpsinfo_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svpsinfo.h') expected = [ u'Copyright 2003, 2004, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_services_svttcmap_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svttcmap.h') expected = [ u'Copyright 2003 by Masatake YAMATO', u'Copyright 2003, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) @expectedFailure def test_ics_freetype_include_freetype_internal_services_svttcmap_h_trail_name(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svttcmap.h') expected = [ u'Copyright 2003 by Masatake YAMATO, Redhat K.K.', u'Copyright 2003, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_include_freetype_internal_services_svttglyf_h(self): test_file = self.get_test_loc('ics/freetype-include-freetype-internal-services/svttglyf.h') expected = [ u'Copyright 2007 by David Turner.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afangles_c(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afangles.c') expected = [ u'Copyright 2003-2006, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afcjk_c(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afcjk.c') expected = [ u'Copyright 2006-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afcjk_h(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afcjk.h') expected = [ u'Copyright 2006, 2007, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afdummy_c(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afdummy.c') expected = [ u'Copyright 2003-2005, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_aferrors_h(self): test_file = self.get_test_loc('ics/freetype-src-autofit/aferrors.h') expected = [ u'Copyright 2005 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afglobal_c(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afglobal.c') expected = [ u'Copyright 2003-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afglobal_h(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afglobal.h') expected = [ u'Copyright 2003-2005, 2007, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afhints_c(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afhints.c') expected = [ u'Copyright 2003-2007, 2009-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afhints_h(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afhints.h') expected = [ u'Copyright 2003-2008, 2010-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afindic_c(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afindic.c') expected = [ u'Copyright 2007, 2011 by Rahul Bhalerao <rahul.bhalerao@redhat.com>, <b.rahul.pm@gmail.com>.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afindic_h(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afindic.h') expected = [ u'Copyright 2007 by Rahul Bhalerao <rahul.bhalerao@redhat.com>, <b.rahul.pm@gmail.com>.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_aflatin_h(self): test_file = self.get_test_loc('ics/freetype-src-autofit/aflatin.h') expected = [ u'Copyright 2003-2007, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afloader_c(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afloader.c') expected = [ u'Copyright 2003-2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afmodule_c(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afmodule.c') expected = [ u'Copyright 2003-2006, 2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afmodule_h(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afmodule.h') expected = [ u'Copyright 2003, 2004, 2005 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afpic_c(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afpic.c') expected = [ u'Copyright 2009, 2010, 2011 by Oran Agra and Mickey Gabel.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afpic_h(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afpic.h') expected = [ u'Copyright 2009, 2011 by Oran Agra and Mickey Gabel.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_afwarp_h(self): test_file = self.get_test_loc('ics/freetype-src-autofit/afwarp.h') expected = [ u'Copyright 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_autofit_autofit_c(self): test_file = self.get_test_loc('ics/freetype-src-autofit/autofit.c') expected = [ u'Copyright 2003-2007, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftadvanc_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftadvanc.c') expected = [ u'Copyright 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftapi_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftapi.c') expected = [ u'Copyright 2002 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftbase_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftbase.c') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2006, 2007, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftbase_h(self): test_file = self.get_test_loc('ics/freetype-src-base/ftbase.h') expected = [ u'Copyright 2008, 2010 by David Turner, Robert Wilhelm, Werner Lemberg', ] check_detection(expected, test_file) @expectedFailure def test_ics_freetype_src_base_ftbase_h_trail_name(self): test_file = self.get_test_loc('ics/freetype-src-base/ftbase.h') expected = [ u'Copyright 2008, 2010 by David Turner, Robert Wilhelm, Werner Lemberg and suzuki toshiya.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftbbox_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftbbox.c') expected = [ u'Copyright 1996-2001, 2002, 2004, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftbitmap_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftbitmap.c') expected = [ u'Copyright 2004, 2005, 2006, 2007, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftcalc_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftcalc.c') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftdbgmem_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftdbgmem.c') expected = [ u'Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftdebug_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftdebug.c') expected = [ u'Copyright 1996-2001, 2002, 2004, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftgloadr_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftgloadr.c') expected = [ u'Copyright 2002, 2003, 2004, 2005, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftglyph_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftglyph.c') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftinit_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftinit.c') expected = [ u'Copyright 1996-2001, 2002, 2005, 2007, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftlcdfil_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftlcdfil.c') expected = [ u'Copyright 2006, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftmm_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftmm.c') expected = [ u'Copyright 1996-2001, 2003, 2004, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftobjs_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftobjs.c') expected = [ u'Copyright 1996-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftpatent_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftpatent.c') expected = [ u'Copyright 2007, 2008, 2010 by David Turner.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftrfork_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftrfork.c') expected = [ u'Copyright 2004, 2005, 2006, 2007, 2008, 2009, 2010 by Masatake YAMATO and Redhat', ] check_detection(expected, test_file) @expectedFailure def test_ics_freetype_src_base_ftrfork_c_trail_name(self): test_file = self.get_test_loc('ics/freetype-src-base/ftrfork.c') expected = [ u'Copyright 2004, 2005, 2006, 2007, 2008, 2009, 2010 by Masatake YAMATO and Redhat K.K.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftsnames_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftsnames.c') expected = [ u'Copyright 1996-2001, 2002, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftstream_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftstream.c') expected = [ u'Copyright 2000-2002, 2004-2006, 2008-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftstroke_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftstroke.c') expected = [ u'Copyright 2002-2006, 2008-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftsynth_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftsynth.c') expected = [ u'Copyright 2000-2001, 2002, 2003, 2004, 2005, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftsystem_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftsystem.c') expected = [ u'Copyright 1996-2002, 2006, 2008-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_fttrigon_c(self): test_file = self.get_test_loc('ics/freetype-src-base/fttrigon.c') expected = [ u'Copyright 2001, 2002, 2003, 2004, 2005 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftutil_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftutil.c') expected = [ u'Copyright 2002, 2004, 2005, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_base_ftxf86_c(self): test_file = self.get_test_loc('ics/freetype-src-base/ftxf86.c') expected = [ u'Copyright 2002, 2003, 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_cff_cff_c(self): test_file = self.get_test_loc('ics/freetype-src-cff/cff.c') expected = [ u'Copyright 1996-2001, 2002 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_cff_cffcmap_c(self): test_file = self.get_test_loc('ics/freetype-src-cff/cffcmap.c') expected = [ u'Copyright 2002, 2003, 2004, 2005, 2006, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_cff_cffcmap_h(self): test_file = self.get_test_loc('ics/freetype-src-cff/cffcmap.h') expected = [ u'Copyright 2002, 2003, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_cff_cfferrs_h(self): test_file = self.get_test_loc('ics/freetype-src-cff/cfferrs.h') expected = [ u'Copyright 2001 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_cff_cffload_c(self): test_file = self.get_test_loc('ics/freetype-src-cff/cffload.c') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_cff_cffload_h(self): test_file = self.get_test_loc('ics/freetype-src-cff/cffload.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2007, 2008, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_cff_cffobjs_h(self): test_file = self.get_test_loc('ics/freetype-src-cff/cffobjs.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_cff_cffparse_c(self): test_file = self.get_test_loc('ics/freetype-src-cff/cffparse.c') expected = [ u'Copyright 1996-2004, 2007-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_cff_cffparse_h(self): test_file = self.get_test_loc('ics/freetype-src-cff/cffparse.h') expected = [ u'Copyright 1996-2001, 2002, 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_cff_cffpic_c(self): test_file = self.get_test_loc('ics/freetype-src-cff/cffpic.c') expected = [ u'Copyright 2009, 2010 by Oran Agra and Mickey Gabel.', ] check_detection(expected, test_file) def test_ics_freetype_src_cff_cfftypes_h(self): test_file = self.get_test_loc('ics/freetype-src-cff/cfftypes.h') expected = [ u'Copyright 1996-2003, 2006-2008, 2010-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_psaux_afmparse_c(self): test_file = self.get_test_loc('ics/freetype-src-psaux/afmparse.c') expected = [ u'Copyright 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_psaux_psaux_c(self): test_file = self.get_test_loc('ics/freetype-src-psaux/psaux.c') expected = [ u'Copyright 1996-2001, 2002, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_psaux_psauxmod_c(self): test_file = self.get_test_loc('ics/freetype-src-psaux/psauxmod.c') expected = [ u'Copyright 2000-2001, 2002, 2003, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_psaux_psauxmod_h(self): test_file = self.get_test_loc('ics/freetype-src-psaux/psauxmod.h') expected = [ u'Copyright 2000-2001 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_psaux_psconv_c(self): test_file = self.get_test_loc('ics/freetype-src-psaux/psconv.c') expected = [ u'Copyright 2006, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_psaux_t1cmap_c(self): test_file = self.get_test_loc('ics/freetype-src-psaux/t1cmap.c') expected = [ u'Copyright 2002, 2003, 2006, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_psaux_t1decode_c(self): test_file = self.get_test_loc('ics/freetype-src-psaux/t1decode.c') expected = [ u'Copyright 2000-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_psaux_t1decode_h(self): test_file = self.get_test_loc('ics/freetype-src-psaux/t1decode.h') expected = [ u'Copyright 2000-2001, 2002, 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_pshinter_pshalgo_c(self): test_file = self.get_test_loc('ics/freetype-src-pshinter/pshalgo.c') expected = [ u'Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_pshinter_pshalgo_h(self): test_file = self.get_test_loc('ics/freetype-src-pshinter/pshalgo.h') expected = [ u'Copyright 2001, 2002, 2003, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_pshinter_pshglob_c(self): test_file = self.get_test_loc('ics/freetype-src-pshinter/pshglob.c') expected = [ u'Copyright 2001, 2002, 2003, 2004, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_pshinter_pshglob_h(self): test_file = self.get_test_loc('ics/freetype-src-pshinter/pshglob.h') expected = [ u'Copyright 2001, 2002, 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_pshinter_pshinter_c(self): test_file = self.get_test_loc('ics/freetype-src-pshinter/pshinter.c') expected = [ u'Copyright 2001, 2003 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_pshinter_pshmod_c(self): test_file = self.get_test_loc('ics/freetype-src-pshinter/pshmod.c') expected = [ u'Copyright 2001, 2002, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_pshinter_pshrec_c(self): test_file = self.get_test_loc('ics/freetype-src-pshinter/pshrec.c') expected = [ u'Copyright 2001, 2002, 2003, 2004, 2007, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_pshinter_pshrec_h(self): test_file = self.get_test_loc('ics/freetype-src-pshinter/pshrec.h') expected = [ u'Copyright 2001, 2002, 2003, 2006, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_psnames_psmodule_c(self): test_file = self.get_test_loc('ics/freetype-src-psnames/psmodule.c') expected = [ u'Copyright 1996-2001, 2002, 2003, 2005, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_psnames_psmodule_h(self): test_file = self.get_test_loc('ics/freetype-src-psnames/psmodule.h') expected = [ u'Copyright 1996-2001 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_psnames_pstables_h(self): test_file = self.get_test_loc('ics/freetype-src-psnames/pstables.h') expected = [ u'Copyright 2005, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_raster_ftmisc_h(self): test_file = self.get_test_loc('ics/freetype-src-raster/ftmisc.h') expected = [ u'Copyright 2005, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_raster_ftraster_c(self): test_file = self.get_test_loc('ics/freetype-src-raster/ftraster.c') expected = [ u'Copyright 1996-2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_raster_ftrend1_c(self): test_file = self.get_test_loc('ics/freetype-src-raster/ftrend1.c') expected = [ u'Copyright 1996-2001, 2002, 2003, 2005, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_sfnt_sfdriver_c(self): test_file = self.get_test_loc('ics/freetype-src-sfnt/sfdriver.c') expected = [ u'Copyright 1996-2007, 2009-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_sfnt_sferrors_h(self): test_file = self.get_test_loc('ics/freetype-src-sfnt/sferrors.h') expected = [ u'Copyright 2001, 2004 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_sfnt_sfobjs_c(self): test_file = self.get_test_loc('ics/freetype-src-sfnt/sfobjs.c') expected = [ u'Copyright 1996-2008, 2010-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_sfnt_ttbdf_c(self): test_file = self.get_test_loc('ics/freetype-src-sfnt/ttbdf.c') expected = [ u'Copyright 2005, 2006, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_sfnt_ttcmap_c(self): test_file = self.get_test_loc('ics/freetype-src-sfnt/ttcmap.c') expected = [ u'Copyright 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_sfnt_ttcmap_h(self): test_file = self.get_test_loc('ics/freetype-src-sfnt/ttcmap.h') expected = [ u'Copyright 2002, 2003, 2004, 2005 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_sfnt_ttkern_c(self): test_file = self.get_test_loc('ics/freetype-src-sfnt/ttkern.c') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_sfnt_ttkern_h(self): test_file = self.get_test_loc('ics/freetype-src-sfnt/ttkern.h') expected = [ u'Copyright 1996-2001, 2002, 2005, 2007 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_sfnt_ttload_h(self): test_file = self.get_test_loc('ics/freetype-src-sfnt/ttload.h') expected = [ u'Copyright 1996-2001, 2002, 2005, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_sfnt_ttmtx_c(self): test_file = self.get_test_loc('ics/freetype-src-sfnt/ttmtx.c') expected = [ u'Copyright 2006-2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_sfnt_ttpost_c(self): test_file = self.get_test_loc('ics/freetype-src-sfnt/ttpost.c') expected = [ u'Copyright 1996-2001, 2002, 2003, 2006, 2007, 2008, 2009, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_sfnt_ttsbit_h(self): test_file = self.get_test_loc('ics/freetype-src-sfnt/ttsbit.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_sfnt_ttsbit0_c(self): test_file = self.get_test_loc('ics/freetype-src-sfnt/ttsbit0.c') expected = [ u'Copyright 2005, 2006, 2007, 2008, 2009 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_smooth_ftgrays_c(self): test_file = self.get_test_loc('ics/freetype-src-smooth/ftgrays.c') expected = [ u'Copyright 2000-2003, 2005-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_smooth_ftsmooth_c(self): test_file = self.get_test_loc('ics/freetype-src-smooth/ftsmooth.c') expected = [ u'Copyright 2000-2006, 2009-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_truetype_truetype_c(self): test_file = self.get_test_loc('ics/freetype-src-truetype/truetype.c') expected = [ u'Copyright 1996-2001, 2004, 2006 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_truetype_ttgload_c(self): test_file = self.get_test_loc('ics/freetype-src-truetype/ttgload.c') expected = [ u'Copyright 1996-2011 David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_truetype_ttgload_h(self): test_file = self.get_test_loc('ics/freetype-src-truetype/ttgload.h') expected = [ u'Copyright 1996-2006, 2008, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_truetype_ttinterp_h(self): test_file = self.get_test_loc('ics/freetype-src-truetype/ttinterp.h') expected = [ u'Copyright 1996-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2010 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_truetype_ttobjs_h(self): test_file = self.get_test_loc('ics/freetype-src-truetype/ttobjs.h') expected = [ u'Copyright 1996-2009, 2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_freetype_src_truetype_ttpload_c(self): test_file = self.get_test_loc('ics/freetype-src-truetype/ttpload.c') expected = [ u'Copyright 1996-2002, 2004-2011 by David Turner, Robert Wilhelm, and Werner Lemberg.', ] check_detection(expected, test_file) def test_ics_fsck_msdos_boot_c(self): test_file = self.get_test_loc('ics/fsck_msdos/boot.c') expected = [ u'Copyright (c) 1995, 1997 Wolfgang Solfrank', u'Copyright (c) 1995 Martin Husemann', ] check_detection(expected, test_file) def test_ics_fsck_msdos_check_c(self): test_file = self.get_test_loc('ics/fsck_msdos/check.c') expected = [ u'Copyright (c) 1995, 1996, 1997 Wolfgang Solfrank', u'Copyright (c) 1995 Martin Husemann', ] check_detection(expected, test_file) def test_ics_fsck_msdos_main_c(self): test_file = self.get_test_loc('ics/fsck_msdos/main.c') expected = [ u'Copyright (c) 1995 Wolfgang Solfrank', u'Copyright (c) 1995 Martin Husemann', ] check_detection(expected, test_file) def test_ics_fsck_msdos_notice(self): test_file = self.get_test_loc('ics/fsck_msdos/NOTICE') expected = [ u'Copyright (c) 1995, 1997 Wolfgang Solfrank', u'Copyright (c) 1995 Martin Husemann', u'Copyright (c) 1995, 1996, 1997 Wolfgang Solfrank', u'Copyright (c) 1995 Martin Husemann', u'Copyright (c) 1995 Wolfgang Solfrank', u'Copyright (c) 1995 Martin Husemann', ] check_detection(expected, test_file) def test_ics_genext2fs_aclocal_m4(self): test_file = self.get_test_loc('ics/genext2fs/aclocal.m4') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_genext2fs_configure(self): test_file = self.get_test_loc('ics/genext2fs/configure') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_genext2fs_genext2fs_c(self): test_file = self.get_test_loc('ics/genext2fs/genext2fs.c') expected = [ u'Copyright (c) 2000 Xavier Bestel <xavier.bestel@free.fr>', u'Copyright (c) 1999,2000 by Lineo, inc. and John Beppu', u'Copyright (c) 1999,2000,2001 by John Beppu <beppu@codepoet.org>', u'Copyright (c) 2002 Edward Betts <edward@debian.org>', u'Copyright (c) 2002 Ixia', u'Copyright (c) 2002 Ixia', u'Copyright (c) 2002 Ixia', ] check_detection(expected, test_file) @expectedFailure def test_ics_genext2fs_genext2fs_c_trail_name_trail_name_trail_name_trail_name(self): test_file = self.get_test_loc('ics/genext2fs/genext2fs.c') expected = [ u'Copyright (c) 2000 Xavier Bestel <xavier.bestel@free.fr>', u'Copyright (c) 1999,2000 by Lineo, inc. and John Beppu', u'Beppu Copyright (c) 1999,2000,2001 by John Beppu <beppu@codepoet.org>', u'Copyright (c) 2002 Edward Betts <edward@debian.org>', u'Copyright (c) 2002 Ixia communications', u'Copyright (c) 2002 Ixia communications', u'Copyright (c) 2002 Ixia communications', ] check_detection(expected, test_file) def test_ics_genext2fs_m4_ac_func_scanf_can_malloc_m4(self): test_file = self.get_test_loc('ics/genext2fs-m4/ac_func_scanf_can_malloc.m4') expected = [ u'(c) Finn Thain 2006', ] check_detection(expected, test_file) def test_ics_giflib_gif_lib_private_h(self): test_file = self.get_test_loc('ics/giflib/gif_lib_private.h') expected = [ u'(c) Copyright 1997 Eric S. Raymond', u'(c) Copyright 1997 Eric S. Raymond', ] check_detection(expected, test_file) def test_ics_giflib_notice(self): test_file = self.get_test_loc('ics/giflib/NOTICE') expected = [ u'Copyright (c) 1997 Eric S. Raymond', ] check_detection(expected, test_file) def test_ics_google_diff_match_patch_name_fraser_neil_plaintext_diff_match_patch_java(self): test_file = self.get_test_loc('ics/google-diff-match-patch-name-fraser-neil-plaintext/diff_match_patch.java') expected = [ u'Copyright 2006 Google Inc.', ] check_detection(expected, test_file) def test_ics_gtest_test_gtest_filter_unittest_py(self): test_file = self.get_test_loc('ics/gtest-test/gtest_filter_unittest.py') expected = [ u'Copyright 2005, Google Inc.', ] check_detection(expected, test_file) def test_ics_gtest_test_gtest_nc_test_py(self): test_file = self.get_test_loc('ics/gtest-test/gtest_nc_test.py') expected = [ u'Copyright 2007, Google Inc.', ] check_detection(expected, test_file) def test_ics_guava_guava_ipr(self): test_file = self.get_test_loc('ics/guava/guava.ipr') expected = [] check_detection(expected, test_file) @expectedFailure def test_ics_guava_guava_ipr_markup(self): test_file = self.get_test_loc('ics/guava/guava.ipr') expected = [ u'Copyright (c) today.year Google Inc.', ] check_detection(expected, test_file) def test_ics_guava_src_com_google_common_annotations_gwtcompatible_java(self): test_file = self.get_test_loc('ics/guava-src-com-google-common-annotations/GwtCompatible.java') expected = [ u'Copyright (c) 2009 Google Inc.', ] check_detection(expected, test_file) def test_ics_guava_src_com_google_common_annotations_visiblefortesting_java(self): test_file = self.get_test_loc('ics/guava-src-com-google-common-annotations/VisibleForTesting.java') expected = [ u'Copyright (c) 2006 Google Inc.', ] check_detection(expected, test_file) def test_ics_guava_src_com_google_common_base_charmatcher_java(self): test_file = self.get_test_loc('ics/guava-src-com-google-common-base/CharMatcher.java') expected = [ u'Copyright (c) 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_guava_src_com_google_common_base_charsets_java(self): test_file = self.get_test_loc('ics/guava-src-com-google-common-base/Charsets.java') expected = [ u'Copyright (c) 2007 Google Inc.', ] check_detection(expected, test_file) def test_ics_guava_src_com_google_common_io_nulloutputstream_java(self): test_file = self.get_test_loc('ics/guava-src-com-google-common-io/NullOutputStream.java') expected = [ u'Copyright (c) 2004 Google Inc.', ] check_detection(expected, test_file) def test_ics_harfbuzz_contrib_harfbuzz_unicode_icu_c(self): test_file = self.get_test_loc('ics/harfbuzz-contrib/harfbuzz-unicode-icu.c') expected = [ u'Copyright 2010, The Android Open Source Project', u'Copyright 2010, Google Inc.', ] check_detection(expected, test_file) def test_ics_harfbuzz_contrib_tables_bidimirroring_txt(self): test_file = self.get_test_loc('ics/harfbuzz-contrib-tables/BidiMirroring.txt') expected = [ u'Copyright (c) 1991-2008 Unicode, Inc.', ] check_detection(expected, test_file) def test_ics_harfbuzz_src_harfbuzz_arabic_c(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-arabic.c') expected = [ u'Copyright (c) 2008 Nokia Corporation', ] check_detection(expected, test_file) @expectedFailure def test_ics_harfbuzz_src_harfbuzz_arabic_c_trail_misc(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-arabic.c') expected = [ u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)', ] check_detection(expected, test_file) def test_ics_harfbuzz_src_harfbuzz_buffer_private_h(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-buffer-private.h') expected = [ u'Copyright (c) 1998-2004 David Turner and Werner Lemberg', u'Copyright (c) 2004,2007 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_harfbuzz_src_harfbuzz_dump_c(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-dump.c') expected = [ u'Copyright (c) 2000, 2007 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_harfbuzz_src_harfbuzz_external_h(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-external.h') expected = [ u'Copyright (c) 2008 Nokia Corporation', ] check_detection(expected, test_file) @expectedFailure def test_ics_harfbuzz_src_harfbuzz_external_h_trail_misc(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-external.h') expected = [ u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)', ] check_detection(expected, test_file) def test_ics_harfbuzz_src_harfbuzz_gdef_private_h(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-gdef-private.h') expected = [ u'Copyright (c) 1998-2004 David Turner and Werner Lemberg', u'Copyright (c) 2006 Behdad Esfahbod', ] check_detection(expected, test_file) def test_ics_harfbuzz_src_harfbuzz_global_h(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-global.h') expected = [ u'Copyright (c) 2008 Nokia Corporation', u'Copyright (c) 2007 Red Hat, Inc.', ] check_detection(expected, test_file) @expectedFailure def test_ics_harfbuzz_src_harfbuzz_global_h_trail_misc(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-global.h') expected = [ u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)', u'Copyright (c) 2007 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_harfbuzz_src_harfbuzz_gpos_c(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-gpos.c') expected = [ u'Copyright (c) 1998-2004 David Turner and Werner Lemberg', u'Copyright (c) 2006 Behdad Esfahbod', u'Copyright (c) 2007 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_harfbuzz_src_harfbuzz_greek_c(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-greek.c') expected = [ u'Copyright (c) 2010 Nokia Corporation', ] check_detection(expected, test_file) @expectedFailure def test_ics_harfbuzz_src_harfbuzz_greek_c_trail_misc(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-greek.c') expected = [ u'Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)', ] check_detection(expected, test_file) def test_ics_harfbuzz_src_harfbuzz_impl_c(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-impl.c') expected = [ u'Copyright (c) 1998-2004 David Turner and Werner Lemberg', u'Copyright (c) 2008 Nokia Corporation', u'Copyright (c) 2007 Red Hat, Inc.', ] check_detection(expected, test_file) @expectedFailure def test_ics_harfbuzz_src_harfbuzz_impl_c_trail_misc(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-impl.c') expected = [ u'Copyright (c) 1998-2004 David Turner and Werner Lemberg', u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)', u'Copyright (c) 2007 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_harfbuzz_src_harfbuzz_shape_h(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-shape.h') expected = [ u'Copyright (c) 2006 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_harfbuzz_src_harfbuzz_stream_c(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-stream.c') expected = [ u'Copyright (c) 2005 David Turner', u'Copyright (c) 2008 Nokia Corporation', u'Copyright (c) 2007 Red Hat, Inc.', ] check_detection(expected, test_file) @expectedFailure def test_ics_harfbuzz_src_harfbuzz_stream_c_trail_misc(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-stream.c') expected = [ u'Copyright (c) 2005 David Turner', u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)', u'Copyright (c) 2007 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_harfbuzz_src_harfbuzz_stream_h(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-stream.h') expected = [ u'Copyright (c) 2005 David Turner', u'Copyright (c) 2008 Nokia Corporation', ] check_detection(expected, test_file) @expectedFailure def test_ics_harfbuzz_src_harfbuzz_stream_h_trail_misc(self): test_file = self.get_test_loc('ics/harfbuzz-src/harfbuzz-stream.h') expected = [ u'Copyright (c) 2005 David Turner', u'Copyright (C) 2008 Nokia Corporation and/or its subsidiary(-ies)', ] check_detection(expected, test_file) def test_ics_hyphenation_hnjalloc_c(self): test_file = self.get_test_loc('ics/hyphenation/hnjalloc.c') expected = [ u'Copyright (c) 1998 Raph Levien', u'(c) 2001 ALTLinux, Moscow', ] check_detection(expected, test_file) def test_ics_hyphenation_hnjalloc_h(self): test_file = self.get_test_loc('ics/hyphenation/hnjalloc.h') expected = [ u'Copyright (c) 1998 Raph Levien', ] check_detection(expected, test_file) def test_ics_hyphenation_hyphen_c(self): test_file = self.get_test_loc('ics/hyphenation/hyphen.c') expected = [ u'Copyright (c) 1998 Raph Levien', u'(c) 2001 ALTLinux, Moscow', u'(c) 2001 Peter Novodvorsky (nidd@cs.msu.su)', u'(c) 2006, 2007, 2008 Laszlo Nemeth', ] check_detection(expected, test_file) def test_ics_hyphenation_hyphen_h(self): test_file = self.get_test_loc('ics/hyphenation/hyphen.h') expected = [ u'(c) 1998 Raph Levien', u'(c) 2001 ALTLinux, Moscow', u'(c) 2006, 2007, 2008 Laszlo Nemeth', u'Copyright (c) 1998 Raph Levien', ] check_detection(expected, test_file) def test_ics_hyphenation_readme(self): test_file = self.get_test_loc('ics/hyphenation/README') expected = [ u'(c) 1998 Raph Levien', u'(c) 2001 ALTLinux, Moscow', u'(c) 2006, 2007, 2008 Laszlo Nemeth', ] check_detection(expected, test_file) def test_ics_iproute2_readme_lnstat(self): test_file = self.get_test_loc('ics/iproute2/README.lnstat') expected = [ u'(c) 2004 Harald Welte laforge@gnumonks.org', ] check_detection(expected, test_file) def test_ics_iproute2_include_linux_if_addrlabel_h(self): test_file = self.get_test_loc('ics/iproute2-include-linux/if_addrlabel.h') expected = [ u'Copyright (c) 2007 USAGI/WIDE Project', ] check_detection(expected, test_file) def test_ics_iproute2_include_linux_if_arp_h(self): test_file = self.get_test_loc('ics/iproute2-include-linux/if_arp.h') expected = [ u'(c) UCB 1986-1988', ] check_detection(expected, test_file) def test_ics_iproute2_include_linux_if_tun_h(self): test_file = self.get_test_loc('ics/iproute2-include-linux/if_tun.h') expected = [ u'Copyright (c) 1999-2000 Maxim Krasnyansky <max_mk@yahoo.com>', ] check_detection(expected, test_file) def test_ics_iproute2_include_linux_netfilter_ipv4_h(self): test_file = self.get_test_loc('ics/iproute2-include-linux/netfilter_ipv4.h') expected = [ u'(c) 1998 Rusty Russell', ] check_detection(expected, test_file) def test_ics_iproute2_include_linux_can_netlink_h(self): test_file = self.get_test_loc('ics/iproute2-include-linux-can/netlink.h') expected = [ u'Copyright (c) 2009 Wolfgang Grandegger <wg@grandegger.com>', ] check_detection(expected, test_file) def test_ics_iproute2_include_linux_tc_act_tc_skbedit_h(self): test_file = self.get_test_loc('ics/iproute2-include-linux-tc_act/tc_skbedit.h') expected = [ u'Copyright (c) 2008, Intel Corporation.', ] check_detection(expected, test_file) def test_ics_iproute2_include_netinet_icmp6_h(self): test_file = self.get_test_loc('ics/iproute2-include-netinet/icmp6.h') expected = [ u'Copyright (c) 1991-1997,2000,2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_iproute2_ip_ip6tunnel_c(self): test_file = self.get_test_loc('ics/iproute2-ip/ip6tunnel.c') expected = [ u'Copyright (c) 2006 USAGI/WIDE Project', ] check_detection(expected, test_file) def test_ics_iproute2_ip_ipaddrlabel_c(self): test_file = self.get_test_loc('ics/iproute2-ip/ipaddrlabel.c') expected = [ u'Copyright (c) 2007 USAGI/WIDE Project', ] check_detection(expected, test_file) def test_ics_iproute2_ip_ipprefix_c(self): test_file = self.get_test_loc('ics/iproute2-ip/ipprefix.c') expected = [ u'Copyright (c) 2005 USAGI/WIDE Project', ] check_detection(expected, test_file) def test_ics_iproute2_ip_ipxfrm_c(self): test_file = self.get_test_loc('ics/iproute2-ip/ipxfrm.c') expected = [ u'Copyright (c) 2004 USAGI/WIDE Project', ] check_detection(expected, test_file) def test_ics_iproute2_misc_lnstat_c(self): test_file = self.get_test_loc('ics/iproute2-misc/lnstat.c') expected = [ u'Copyright (c) 2004 by Harald Welte <laforge@gnumonks.org>', u'Copyright 2001 by Robert Olsson <robert.olsson@its.uu.se> Uppsala University, Sweden', u'Copyright (c) 2004 by Harald Welte <laforge@gnumonks.org>', ] check_detection(expected, test_file) def test_ics_iproute2_misc_lnstat_util_c(self): test_file = self.get_test_loc('ics/iproute2-misc/lnstat_util.c') expected = [ u'Copyright (c) 2004 by Harald Welte <laforge@gnumonks.org>', u'Copyright 2001 by Robert Olsson <robert.olsson@its.uu.se> Uppsala University, Sweden', ] check_detection(expected, test_file) def test_ics_ipsec_tools_notice_extra_contributed(self): test_file = self.get_test_loc('ics/ipsec-tools/NOTICE') expected = [ u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.', u'Copyright (c) 2004 Emmanuel Dreyfus', u'Copyright (c) 2004-2006 Emmanuel Dreyfus', u'Copyright (c) 2000 WIDE Project.', u'Copyright (c) 2004-2005 Emmanuel Dreyfus', u'Copyright (c) 2000, 2001 WIDE Project.', u'Copyright (c) 2004 SuSE Linux AG, Nuernberg, Germany.', u'Copyright (c) 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002 and 2003 WIDE Project.', u'Copyright 2000 Wasabi Systems, Inc.', u'Copyright (c) 2005 International Business Machines Corporation', u'Copyright (c) 2005 by Trusted Computer Solutions, Inc.', u'Copyright 2000 Aaron D. Gifford.', u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.', u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.', u'Copyright (c) 1991, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_include_glibc_notice(self): test_file = self.get_test_loc('ics/ipsec-tools-src-include-glibc/NOTICE') expected = [ u'Copyright (c) 1991, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_libipsec_ipsec_dump_policy_c(self): test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/ipsec_dump_policy.c') expected = [ u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_libipsec_ipsec_set_policy_3(self): test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/ipsec_set_policy.3') expected = [ u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_libipsec_key_debug_c(self): test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/key_debug.c') expected = [ u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_libipsec_notice(self): test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/NOTICE') expected = [ u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.', u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_libipsec_policy_parse_y(self): test_file = self.get_test_loc('ics/ipsec-tools-src-libipsec/policy_parse.y') expected = [ u'Copyright (c) 1995, 1996, 1997, 1998, and 1999 WIDE Project.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_cfparse_y(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/cfparse.y') expected = [ u'Copyright (c) 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002 and 2003 WIDE Project.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_dump_h(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/dump.h') expected = [ u'Copyright (c) 2000 WIDE Project.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_evt_c(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/evt.c') expected = [ u'Copyright (c) 2004 Emmanuel Dreyfus', u'Copyright (c) 2008 Timo Teras', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_gcmalloc_h(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/gcmalloc.h') expected = [ u'Copyright (c) 2000, 2001 WIDE Project.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_genlist_c_extra_contributed(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/genlist.c') expected = [ u'Copyright (c) 2004 SuSE Linux AG, Nuernberg, Germany.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_grabmyaddr_c(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/grabmyaddr.c') expected = [ u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.', u'Copyright (c) 2008 Timo Teras <timo.teras@iki.fi>.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_gssapi_c(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/gssapi.c') expected = [ u'Copyright 2000 Wasabi Systems, Inc.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_handler_h(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/handler.h') expected = [ u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_isakmp_cfg_c(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/isakmp_cfg.c') expected = [ u'Copyright (c) 2004-2006 Emmanuel Dreyfus', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_isakmp_cfg_h(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/isakmp_cfg.h') expected = [ u'Copyright (c) 2004 Emmanuel Dreyfus', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_isakmp_xauth_c(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/isakmp_xauth.c') expected = [ u'Copyright (c) 2004-2005 Emmanuel Dreyfus', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_notice_extra_contributed(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/NOTICE') expected = [ u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.', u'Copyright (c) 2004 Emmanuel Dreyfus', u'Copyright (c) 2004-2006 Emmanuel Dreyfus', u'Copyright (c) 2000 WIDE Project.', u'Copyright (c) 2004-2005 Emmanuel Dreyfus', u'Copyright (c) 2000, 2001 WIDE Project.', u'Copyright (c) 2004 SuSE Linux AG, Nuernberg, Germany.', u'Copyright (c) 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002 and 2003 WIDE Project.', u'Copyright 2000 Wasabi Systems, Inc.', u'Copyright (c) 2005 International Business Machines Corporation', u'Copyright (c) 2005 by Trusted Computer Solutions, Inc.', u'Copyright 2000 Aaron D. Gifford.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_plainrsa_gen_8(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/plainrsa-gen.8') expected = [ u'Copyright (c) 2004 SuSE Linux AG, Nuernberg, Germany.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_racoon_8(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/racoon.8') expected = [ u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_racoonctl_8(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/racoonctl.8') expected = [ u'Copyright (c) 2004 Emmanuel Dreyfus', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_racoonctl_c(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/racoonctl.c') expected = [ u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.', u'Copyright (c) 2008 Timo Teras.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_security_c(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon/security.c') expected = [ u'Copyright (c) 2005 International Business Machines Corporation', u'Copyright (c) 2005 by Trusted Computer Solutions, Inc.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_missing_crypto_sha2_sha2_c(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon-missing-crypto-sha2/sha2.c') expected = [ u'Copyright 2000 Aaron D. Gifford.', ] check_detection(expected, test_file) def test_ics_ipsec_tools_src_racoon_missing_crypto_sha2_sha2_h(self): test_file = self.get_test_loc('ics/ipsec-tools-src-racoon-missing-crypto-sha2/sha2.h') expected = [ u'Copyright 2000 Aaron D. Gifford.', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libip6t_reject_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libip6t_REJECT.c') expected = [ u'(c) 2000 Jozsef Kadlecsik <kadlec@blackhole.kfki.hu>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libipt_clusterip_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libipt_CLUSTERIP.c') expected = [ u'(c) 2003 by Harald Welte <laforge@gnumonks.org>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libipt_ecn_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libipt_ECN.c') expected = [ u'(c) 2002 by Harald Welte <laforge@gnumonks.org>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libipt_ttl_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libipt_TTL.c') expected = [ u'(c) 2000 by Harald Welte <laforge@gnumonks.org>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_audit_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_AUDIT.c') expected = [ u'(c) 2010-2011, Thomas Graf <tgraf@redhat.com>', u'(c) 2010-2011, Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_checksum_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_CHECKSUM.c') expected = [ u'(c) 2002 by Harald Welte <laforge@gnumonks.org>', u'(c) 2010 by Red Hat, Inc', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_cluster_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_cluster.c') expected = [ u'(c) 2009 by Pablo Neira Ayuso <pablo@netfilter.org>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_connmark_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_connmark.c') expected = [ u'(c) 2002,2004 MARA Systems AB', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_connsecmark_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_CONNSECMARK.c') expected = [ u'Copyright (c) 2006 Red Hat, Inc., James Morris <jmorris@redhat.com>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_conntrack_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_conntrack.c') expected = [ u'(c) 2001 Marc Boucher (marc@mbsi.ca).', u'Copyright (c) CC Computer Consultants GmbH, 2007 - 2008 Jan Engelhardt <jengelh@computergmbh.de>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_dccp_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_dccp.c') expected = [ u'(c) 2005 by Harald Welte <laforge@netfilter.org>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_devgroup_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_devgroup.c') expected = [ u'Copyright (c) 2011 Patrick McHardy <kaber@trash.net>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_hashlimit_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_hashlimit.c') expected = [ u'(c) 2003-2004 by Harald Welte <laforge@netfilter.org>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_idletimer_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_IDLETIMER.c') expected = [ u'Copyright (c) 2010 Nokia Corporation.', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_led_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_LED.c') expected = [ u'(c) 2008 Adam Nielsen <a.nielsen@shikadi.net>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_osf_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_osf.c') expected = [ u'Copyright (c) 2003+ Evgeniy Polyakov <zbr@ioremap.net>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_owner_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_owner.c') expected = [ u'Copyright (c) CC Computer Consultants GmbH, 2007 - 2008 Jan Engelhardt <jengelh@computergmbh.de>' ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_set_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_SET.c') expected = [ u'Copyright (c) 2000-2002 Joakim Axelsson <gozem@linux.nu> Patrick Schaaf <bof@bof.de> Martin Josefsson <gandalf@wlug.westbo.se>', u'Copyright (c) 2003-2010 Jozsef Kadlecsik <kadlec@blackhole.kfki.hu>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_socket_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_socket.c') expected = [ u'Copyright (c) 2007 BalaBit IT Ltd.', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_string_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_string.c') expected = [ u'Copyright (c) 2000 Emmanuel Roger <winfield@freegates.be>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_tcpoptstrip_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_TCPOPTSTRIP.c') expected = [ u'Copyright (c) 2007 Sven Schnelle <svens@bitebene.org>', u'Copyright (c) CC Computer Consultants GmbH, 2007 Jan Engelhardt <jengelh@computergmbh.de>', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_tee_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_TEE.c') expected = [ u'Copyright (c) Sebastian Claen , 2007 Jan Engelhardt', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_time_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_time.c') expected = [ u'Copyright (c) CC Computer Consultants GmbH, 2007', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_tproxy_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_TPROXY.c') expected = [ u'Copyright (c) 2002-2008 BalaBit IT Ltd.', ] check_detection(expected, test_file) def test_ics_iptables_extensions_libxt_u32_c(self): test_file = self.get_test_loc('ics/iptables-extensions/libxt_u32.c') expected = [ u'(c) 2002 by Don Cohen <don-netf@isis.cs3-inc.com>', u'Copyright (c) CC Computer Consultants GmbH, 2007', ] check_detection(expected, test_file) def test_ics_iptables_include_libipq_libipq_h(self): test_file = self.get_test_loc('ics/iptables-include-libipq/libipq.h') expected = [ u'Copyright (c) 2000-2001 Netfilter Core Team', ] check_detection(expected, test_file) def test_ics_iptables_include_linux_netfilter_ipv6_h(self): test_file = self.get_test_loc('ics/iptables-include-linux/netfilter_ipv6.h') expected = [ u'(c) 1998 Rusty Russell', u'(c) 1999 David Jeffery', ] check_detection(expected, test_file) def test_ics_iptables_include_linux_netfilter_xt_audit_h(self): test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_AUDIT.h') expected = [ u'(c) 2010-2011 Thomas Graf <tgraf@redhat.com>', u'(c) 2010-2011 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_iptables_include_linux_netfilter_xt_checksum_h(self): test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_CHECKSUM.h') expected = [ u'(c) 2002 by Harald Welte <laforge@gnumonks.org>', u'(c) 2010 Red Hat Inc', ] check_detection(expected, test_file) def test_ics_iptables_include_linux_netfilter_xt_conntrack_h(self): test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_conntrack.h') expected = [ u'(c) 2001 Marc Boucher (marc@mbsi.ca).', ] check_detection(expected, test_file) def test_ics_iptables_include_linux_netfilter_xt_dscp_h(self): test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_DSCP.h') expected = [ u'(c) 2002 Harald Welte <laforge@gnumonks.org>', ] check_detection(expected, test_file) def test_ics_iptables_include_linux_netfilter_xt_idletimer_h(self): test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_IDLETIMER.h') expected = [ u'Copyright (c) 2004, 2010 Nokia Corporation', ] check_detection(expected, test_file) def test_ics_iptables_include_linux_netfilter_xt_nfqueue_h(self): test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_NFQUEUE.h') expected = [ u'(c) 2005 Harald Welte <laforge@netfilter.org>', ] check_detection(expected, test_file) def test_ics_iptables_include_linux_netfilter_xt_osf_h(self): test_file = self.get_test_loc('ics/iptables-include-linux-netfilter/xt_osf.h') expected = [ u'Copyright (c) 2003+ Evgeniy Polyakov <johnpol@2ka.mxt.ru>', ] check_detection(expected, test_file) def test_ics_iptables_include_linux_netfilter_ipv4_ipt_ttl_h(self): test_file = self.get_test_loc('ics/iptables-include-linux-netfilter_ipv4/ipt_ttl.h') expected = [ u'(c) 2000 by Harald Welte <laforge@gnumonks.org>', ] check_detection(expected, test_file) def test_ics_iptables_include_linux_netfilter_ipv4_ipt_ulog_h(self): test_file = self.get_test_loc('ics/iptables-include-linux-netfilter_ipv4/ipt_ULOG.h') expected = [ u'(c) 2000-2002 by Harald Welte <laforge@gnumonks.org>', ] check_detection(expected, test_file) def test_ics_iptables_iptables_ip6tables_standalone_c(self): test_file = self.get_test_loc('ics/iptables-iptables/ip6tables-standalone.c') expected = [ u'(c) 2000-2002', ] check_detection(expected, test_file) @expectedFailure def test_ics_iptables_iptables_ip6tables_standalone_c_trail_name(self): test_file = self.get_test_loc('ics/iptables-iptables/ip6tables-standalone.c') expected = [ u"(C) 2000-2002 by the netfilter coreteam <coreteam@netfilter.org>: Paul 'Rusty' Russell <rusty@rustcorp.com.au> Marc Boucher <marc+nf@mbsi.ca>", ] check_detection(expected, test_file) def test_ics_iptables_iptables_iptables_xslt(self): test_file = self.get_test_loc('ics/iptables-iptables/iptables.xslt') expected = [ u'Copyright 2006 UfoMechanic Author azez@ufomechanic.net', ] check_detection(expected, test_file) @expectedFailure def test_ics_iptables_iptables_iptables_xslt_extra_author(self): test_file = self.get_test_loc('ics/iptables-iptables/iptables.xslt') expected = [ u'Copyright 2006 UfoMechanic', ] check_detection(expected, test_file) def test_ics_iptables_iptables_iptables_apply(self): test_file = self.get_test_loc('ics/iptables-iptables/iptables-apply') expected = [ u'Copyright (c) Martin F. Krafft <madduck@madduck.net>', ] check_detection(expected, test_file) def test_ics_iptables_iptables_iptables_apply_8(self): test_file = self.get_test_loc('ics/iptables-iptables/iptables-apply.8') expected = [ u'copyright by Martin F. Krafft.', ] check_detection(expected, test_file) def test_ics_iptables_iptables_iptables_restore_c(self): test_file = self.get_test_loc('ics/iptables-iptables/iptables-restore.c') expected = [ u'(c) 2000-2002 by Harald Welte <laforge@gnumonks.org>', ] check_detection(expected, test_file) def test_ics_iptables_iptables_iptables_save_c(self): test_file = self.get_test_loc('ics/iptables-iptables/iptables-save.c') expected = [ u'(c) 1999 by Paul Rusty Russell <rusty@rustcorp.com.au>', u'(c) 2000-2002 by Harald Welte <laforge@gnumonks.org>', ] check_detection(expected, test_file) def test_ics_iptables_iptables_iptables_xml_c(self): test_file = self.get_test_loc('ics/iptables-iptables/iptables-xml.c') expected = [ u'(c) 2006 Ufo Mechanic <azez@ufomechanic.net>', u'(c) 2000-2002 by Harald Welte <laforge@gnumonks.org>', ] check_detection(expected, test_file) def test_ics_iptables_iptables_xtables_c(self): test_file = self.get_test_loc('ics/iptables-iptables/xtables.c') expected = [ u'(c) 2000-2006', ] check_detection(expected, test_file) @expectedFailure def test_ics_iptables_iptables_xtables_c_trail_name(self): test_file = self.get_test_loc('ics/iptables-iptables/xtables.c') expected = [ u'(c) 2000-2006 by the netfilter coreteam <coreteam@netfilter.org>', ] check_detection(expected, test_file) def test_ics_iptables_iptables_xtoptions_c(self): test_file = self.get_test_loc('ics/iptables-iptables/xtoptions.c') expected = [ u'Copyright (c) Jan Engelhardt, 2011', ] check_detection(expected, test_file) def test_ics_iptables_libipq_ipq_create_handle_3(self): test_file = self.get_test_loc('ics/iptables-libipq/ipq_create_handle.3') expected = [ u'Copyright (c) 2000-2001 Netfilter Core Team', u'Copyright (c) 2000-2001 Netfilter Core Team.', ] check_detection(expected, test_file) def test_ics_iptables_libipq_ipq_errstr_3(self): test_file = self.get_test_loc('ics/iptables-libipq/ipq_errstr.3') expected = [ u'Copyright (c) 2000 Netfilter Core Team', u'Copyright (c) 2000-2001 Netfilter Core Team.', ] check_detection(expected, test_file) def test_ics_iptables_libiptc_libip4tc_c(self): test_file = self.get_test_loc('ics/iptables-libiptc/libip4tc.c') expected = [ u'(c) 1999 Paul Rusty Russell', ] check_detection(expected, test_file) def test_ics_iptables_libiptc_libiptc_c(self): test_file = self.get_test_loc('ics/iptables-libiptc/libiptc.c') expected = [ u'(c) 1999 Paul Rusty Russell', u'(c) 2000-2004 by the Netfilter Core Team <coreteam@netfilter.org>', ] check_detection(expected, test_file) def test_ics_iptables_m4_ax_check_linker_flags_m4(self): test_file = self.get_test_loc('ics/iptables-m4/ax_check_linker_flags.m4') expected = [ u'Copyright (c) 2009 Mike Frysinger <vapier@gentoo.org>', u'Copyright (c) 2009 Steven G. Johnson <stevenj@alum.mit.edu>', u'Copyright (c) 2009 Matteo Frigo', ] check_detection(expected, test_file) def test_ics_iptables_utils_nfnl_osf_c(self): test_file = self.get_test_loc('ics/iptables-utils/nfnl_osf.c') expected = [ u'Copyright (c) 2005 Evgeniy Polyakov <johnpol@2ka.mxt.ru>', ] check_detection(expected, test_file) def test_ics_iptables_utils_pf_os(self): test_file = self.get_test_loc('ics/iptables-utils/pf.os') expected = [ u'(c) Copyright 2000-2003 by Michal Zalewski <lcamtuf@coredump.cx>', u'(c) Copyright 2003 by Mike Frantzen <frantzen@w4g.org>', ] check_detection(expected, test_file) def test_ics_javasqlite_src_main_native_sqlite_jni_defs_h(self): test_file = self.get_test_loc('ics/javasqlite-src-main-native/sqlite_jni_defs.h') expected = [ u'Copyright 2007, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_javassist_license_html(self): test_file = self.get_test_loc('ics/javassist/License.html') expected = [ u'Copyright (c) 1999-2010 Shigeru Chiba.', ] check_detection(expected, test_file) def test_ics_javassist_notice(self): test_file = self.get_test_loc('ics/javassist/NOTICE') expected = [ u'Copyright (c) 1999-2010 Shigeru Chiba.', ] check_detection(expected, test_file) def test_ics_javassist_readme_html(self): test_file = self.get_test_loc('ics/javassist/Readme.html') expected = [ u'Copyright (c) 1999-2010 by Shigeru Chiba', u'Copyright (c) 1999-2010 Shigeru Chiba.', u'Copyright (c) 1999-2010 Shigeru Chiba.', ] check_detection(expected, test_file) def test_ics_javassist_sample_preproc_assistant_java(self): test_file = self.get_test_loc('ics/javassist-sample-preproc/Assistant.java') expected = [ u'Copyright (c) 1999-2005 Shigeru Chiba.', ] check_detection(expected, test_file) def test_ics_javassist_src_main_javassist_bytearrayclasspath_java(self): test_file = self.get_test_loc('ics/javassist-src-main-javassist/ByteArrayClassPath.java') expected = [ u'Copyright (c) 1999-2007 Shigeru Chiba.', ] check_detection(expected, test_file) def test_ics_javassist_src_main_javassist_ctclass_java(self): test_file = self.get_test_loc('ics/javassist-src-main-javassist/CtClass.java') expected = [ u'Copyright (c) 1999-2007 Shigeru Chiba.', u'(c) 1999-2010 Shigeru Chiba.', ] check_detection(expected, test_file) @expectedFailure def test_ics_javassist_src_main_javassist_ctclass_java_lead_copy(self): test_file = self.get_test_loc('ics/javassist-src-main-javassist/CtClass.java') expected = [ u'Copyright (c) 1999-2007 Shigeru Chiba.', u'Copyright (c) 1999-2010 Shigeru Chiba.', ] check_detection(expected, test_file) def test_ics_javassist_src_main_javassist_bytecode_bytestream_java(self): test_file = self.get_test_loc('ics/javassist-src-main-javassist-bytecode/ByteStream.java') expected = [ u'Copyright (c) 1999-2010 Shigeru Chiba.', ] check_detection(expected, test_file) def test_ics_javassist_src_main_javassist_bytecode_instructionprinter_java(self): test_file = self.get_test_loc('ics/javassist-src-main-javassist-bytecode/InstructionPrinter.java') expected = [ u'Copyright (c) 1999-2007 Shigeru Chiba', ] check_detection(expected, test_file) def test_ics_javassist_src_main_javassist_bytecode_annotation_annotation_java(self): test_file = self.get_test_loc('ics/javassist-src-main-javassist-bytecode-annotation/Annotation.java') expected = [ u'Copyright (c) 2004 Bill Burke.', ] check_detection(expected, test_file) def test_ics_javassist_src_main_javassist_bytecode_annotation_nosuchclasserror_java(self): test_file = self.get_test_loc('ics/javassist-src-main-javassist-bytecode-annotation/NoSuchClassError.java') expected = [ u'Copyright (c) 1999-2009 Shigeru Chiba.', ] check_detection(expected, test_file) def test_ics_javassist_tutorial_tutorial_html(self): test_file = self.get_test_loc('ics/javassist-tutorial/tutorial.html') expected = [ u'Copyright (c) 2000-2010 by Shigeru Chiba', ] check_detection(expected, test_file) def test_ics_jdiff_src_jdiff_diffmyers_java(self): test_file = self.get_test_loc('ics/jdiff-src-jdiff/DiffMyers.java') expected = [ u'Copyright (c) 2000 Business Management Systems, Inc.', ] check_detection(expected, test_file) def test_ics_jhead_main_c(self): test_file = self.get_test_loc('ics/jhead/main.c') expected = [ u'Copyright (c) 2008, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_jpeg_ansi2knr_c(self): test_file = self.get_test_loc('ics/jpeg/ansi2knr.c') expected = [ u'Copyright (c) 1988 Richard M. Stallman', u'Copyright (c) 1989 Aladdin Enterprises.', ] check_detection(expected, test_file) def test_ics_jpeg_cderror_h(self): test_file = self.get_test_loc('ics/jpeg/cderror.h') expected = [ u'Copyright (c) 1994-1997, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_cdjpeg_c(self): test_file = self.get_test_loc('ics/jpeg/cdjpeg.c') expected = [ u'Copyright (c) 1991-1997, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_cjpeg_c(self): test_file = self.get_test_loc('ics/jpeg/cjpeg.c') expected = [ u'Copyright (c) 1991-1998, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_ckconfig_c(self): test_file = self.get_test_loc('ics/jpeg/ckconfig.c') expected = [ u'Copyright (c) 1991-1994, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_coderules_doc(self): test_file = self.get_test_loc('ics/jpeg/coderules.doc') expected = [ u'Copyright (c) 1991-1996, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_config_guess(self): test_file = self.get_test_loc('ics/jpeg/config.guess') expected = [ u'Copyright (c) 1992, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_jpeg_config_sub(self): test_file = self.get_test_loc('ics/jpeg/config.sub') expected = [ u'Copyright (c) 1991, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_jpeg_configure(self): test_file = self.get_test_loc('ics/jpeg/configure') expected = [ u'Copyright (c) 1992, 93, 94, 95, 96 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_jpeg_filelist_doc(self): test_file = self.get_test_loc('ics/jpeg/filelist.doc') expected = [ u'Copyright (c) 1994-1998, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_install_doc(self): test_file = self.get_test_loc('ics/jpeg/install.doc') expected = [ u'Copyright (c) 1991-1998, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_jcapimin_c(self): test_file = self.get_test_loc('ics/jpeg/jcapimin.c') expected = [ u'Copyright (c) 1994-1998, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_jcapistd_c(self): test_file = self.get_test_loc('ics/jpeg/jcapistd.c') expected = [ u'Copyright (c) 1994-1996, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_jccolor_c(self): test_file = self.get_test_loc('ics/jpeg/jccolor.c') expected = [ u'Copyright (c) 1991-1996, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_jcphuff_c(self): test_file = self.get_test_loc('ics/jpeg/jcphuff.c') expected = [ u'Copyright (c) 1995-1997, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_jctrans_c(self): test_file = self.get_test_loc('ics/jpeg/jctrans.c') expected = [ u'Copyright (c) 1995-1998, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_jmem_android_c(self): test_file = self.get_test_loc('ics/jpeg/jmem-android.c') expected = [ u'Copyright (c) 2007-2008 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_jpeg_jmemansi_c(self): test_file = self.get_test_loc('ics/jpeg/jmemansi.c') expected = [ u'Copyright (c) 1992-1996, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_jmemdos_c(self): test_file = self.get_test_loc('ics/jpeg/jmemdos.c') expected = [ u'Copyright (c) 1992-1997, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_jversion_h(self): test_file = self.get_test_loc('ics/jpeg/jversion.h') expected = [ u'Copyright (c) 1991-1998, Thomas G. Lane.', u'Copyright (c) 1998, Thomas G. Lane', ] check_detection(expected, test_file) def test_ics_jpeg_ltconfig(self): test_file = self.get_test_loc('ics/jpeg/ltconfig') expected = [ u'Copyright (c) 1996-1998 Free Software Foundation, Inc. Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996', u'Copyright (c) 1996-1998 Free Software Foundation, Inc. Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996', ] check_detection(expected, test_file) def test_ics_jpeg_ltmain_sh(self): test_file = self.get_test_loc('ics/jpeg/ltmain.sh') expected = [ u'Copyright (c) 1996-1998 Free Software Foundation, Inc. Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996', ] check_detection(expected, test_file) def test_ics_jpeg_notice(self): test_file = self.get_test_loc('ics/jpeg/NOTICE') expected = [ u'copyright (c) 1991-1998, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_rdcolmap_c(self): test_file = self.get_test_loc('ics/jpeg/rdcolmap.c') expected = [ u'Copyright (c) 1994-1996, Thomas G. Lane.', u'Copyright (c) 1988 by Jef Poskanzer.', ] check_detection(expected, test_file) def test_ics_jpeg_rdppm_c(self): test_file = self.get_test_loc('ics/jpeg/rdppm.c') expected = [ u'Copyright (c) 1991-1997, Thomas G. Lane.', u'Copyright (c) 1988 by Jef Poskanzer.', ] check_detection(expected, test_file) def test_ics_jpeg_readme(self): test_file = self.get_test_loc('ics/jpeg/README') expected = [ u'copyright (c) 1991-1998, Thomas G. Lane.', u'copyright by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_jpeg_structure_doc(self): test_file = self.get_test_loc('ics/jpeg/structure.doc') expected = [ u'Copyright (c) 1991-1995, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_transupp_c(self): test_file = self.get_test_loc('ics/jpeg/transupp.c') expected = [ u'Copyright (c) 1997, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jpeg_wrgif_c(self): test_file = self.get_test_loc('ics/jpeg/wrgif.c') expected = [ u'Copyright (c) 1991-1997, Thomas G. Lane.', u'Copyright (c) 1989 by Jef Poskanzer.', ] check_detection(expected, test_file) def test_ics_jpeg_wrjpgcom_c(self): test_file = self.get_test_loc('ics/jpeg/wrjpgcom.c') expected = [ u'Copyright (c) 1994-1997, Thomas G. Lane.', ] check_detection(expected, test_file) def test_ics_jsr305_notice_trail_name(self): test_file = self.get_test_loc('ics/jsr305/NOTICE') expected = [ u'Copyright (c) 2007-2009, JSR305 expert group', ] check_detection(expected, test_file) def test_ics_jsr305_ri_src_main_java_javax_annotation_concurrent_guardedby_java(self): test_file = self.get_test_loc('ics/jsr305-ri-src-main-java-javax-annotation-concurrent/GuardedBy.java') expected = [ u'Copyright (c) 2005 Brian Goetz', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_atomic_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/atomic.h') expected = [ u'Copyright (c) 1996 Russell King.', u'Copyright (c) 2002 Deep Blue Solutions Ltd.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_bitops_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/bitops.h') expected = [ u'Copyright 1995, Russell King. Various', u'Copyright 2001, Nicolas Pitre', ] check_detection(expected, test_file) @expectedFailure def test_ics_kernel_headers_original_asm_arm_bitops_h_extra_various(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/bitops.h') expected = [ u'Copyright 1995, Russell King.', u'Copyright 2001, Nicolas Pitre', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_cacheflush_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/cacheflush.h') expected = [ u'Copyright (c) 1999-2002 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_delay_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/delay.h') expected = [ u'Copyright (c) 1995-2004 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_domain_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/domain.h') expected = [ u'Copyright (c) 1999 Russell King.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_fpstate_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/fpstate.h') expected = [ u'Copyright (c) 1995 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_glue_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/glue.h') expected = [ u'Copyright (c) 1997-1999 Russell King', u'Copyright (c) 2000-2002 Deep Blue Solutions Ltd.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_hardware_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/hardware.h') expected = [ u'Copyright (c) 1996 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_ide_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/ide.h') expected = [ u'Copyright (c) 1994-1996 Linus Torvalds', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_io_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/io.h') expected = [ u'Copyright (c) 1996-2000 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_locks_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/locks.h') expected = [ u'Copyright (c) 2000 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_memory_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/memory.h') expected = [ u'Copyright (c) 2000-2002 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_mtd_xip_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/mtd-xip.h') expected = [ u'Copyright (c) 2004 MontaVista Software, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_page_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/page.h') expected = [ u'Copyright (c) 1995-2003 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_param_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/param.h') expected = [ u'Copyright (c) 1995-1999 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_pgalloc_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/pgalloc.h') expected = [ u'Copyright (c) 2000-2001 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_pgtable_hwdef_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/pgtable-hwdef.h') expected = [ u'Copyright (c) 1995-2002 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_posix_types_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/posix_types.h') expected = [ u'Copyright (c) 1996-1998 Russell King.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_proc_fns_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/proc-fns.h') expected = [ u'Copyright (c) 1997-1999 Russell King', u'Copyright (c) 2000 Deep Blue Solutions Ltd', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_procinfo_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/procinfo.h') expected = [ u'Copyright (c) 1996-1999 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_ptrace_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/ptrace.h') expected = [ u'Copyright (c) 1996-2003 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_sizes_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/sizes.h') expected = [ u'Copyright (c) ARM Limited 1998.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_smp_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/smp.h') expected = [ u'Copyright (c) 2004-2005 ARM Ltd.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_thread_info_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/thread_info.h') expected = [ u'Copyright (c) 2002 Russell King.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_timex_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/timex.h') expected = [ u'Copyright (c) 1997,1998 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_tlbflush_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/tlbflush.h') expected = [ u'Copyright (c) 1999-2003 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_unistd_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm/unistd.h') expected = [ u'Copyright (c) 2001-2005 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_board_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/board.h') expected = [ u'Copyright (c) 2004 Nokia Corporation', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_board_perseus2_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/board-perseus2.h') expected = [ u'Copyright 2003 by Texas Instruments Incorporated OMAP730 / Perseus2', u'Copyright (c) 2001 RidgeRun, Inc.', ] check_detection(expected, test_file) @expectedFailure def test_ics_kernel_headers_original_asm_arm_arch_board_perseus2_h_extra_name(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/board-perseus2.h') expected = [ u'Copyright 2003 by Texas Instruments Incorporated', u'Copyright (c) 2001 RidgeRun, Inc. (http://www.ridgerun.com)', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_dma_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/dma.h') expected = [ u'Copyright (c) 2003 Nokia Corporation', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_fpga_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/fpga.h') expected = [ u'Copyright (c) 2001 RidgeRun, Inc.', u'Copyright (c) 2002 MontaVista Software, Inc.', u'Copyright (c) 2004 Nokia Corporation', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_gpio_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/gpio.h') expected = [ u'Copyright (c) 2003-2005 Nokia Corporation', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_gpio_switch_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/gpio-switch.h') expected = [ u'Copyright (c) 2006 Nokia Corporation', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_hardware_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/hardware.h') expected = [ u'Copyright (c) 2001 RidgeRun, Inc. Author RidgeRun, Inc.', ] check_detection(expected, test_file) @expectedFailure def test_ics_kernel_headers_original_asm_arm_arch_hardware_h_extra_author(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/hardware.h') expected = [ u'Copyright (c) 2001 RidgeRun, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_io_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/io.h') expected = [ u'Copyright (c) 1997-1999 Russell King', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_irqs_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/irqs.h') expected = [ u'Copyright (c) Greg Lonnon 2001', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_mcbsp_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/mcbsp.h') expected = [ u'Copyright (c) 2002 RidgeRun, Inc. Author Steve Johnson', ] check_detection(expected, test_file) @expectedFailure def test_ics_kernel_headers_original_asm_arm_arch_mcbsp_h_extra_author(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/mcbsp.h') expected = [ u'Copyright (c) 2002 RidgeRun, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_memory_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/memory.h') expected = [ u'Copyright (c) 2000 RidgeRun, Inc.', u'Copyright (c) 1999 ARM Limited', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_mtd_xip_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/mtd-xip.h') expected = [ u'(c) 2005 MontaVista Software, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_mux_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/mux.h') expected = [ u'Copyright (c) 2003 - 2005 Nokia Corporation', u'Copyright (c) 2004 Texas Instruments', u'Copyright (c) 2004 Texas Instruments', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_timex_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/timex.h') expected = [ u'Copyright (c) 2000 RidgeRun, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_arm_arch_vmalloc_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-arm-arch/vmalloc.h') expected = [ u'Copyright (c) 2000 Russell King.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_generic_tlb_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-generic/tlb.h') expected = [ u'Copyright 2001 Red Hat, Inc.', u'Copyright Linus Torvalds', ] check_detection(expected, test_file) @expectedFailure def test_ics_kernel_headers_original_asm_generic_tlb_h_trail_other(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-generic/tlb.h') expected = [ u'Copyright 2001 Red Hat, Inc.', u'Copyright Linus Torvalds and others.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_generic_topology_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-generic/topology.h') expected = [ u'Copyright (c) 2002, IBM Corp.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_acpi_32_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/acpi_32.h') expected = [ u'Copyright (c) 2001 Paul Diefenbaugh <paul.s.diefenbaugh@intel.com>', u'Copyright (c) 2001 Patrick Mochel <mochel@osdl.org>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_bitops_32_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/bitops_32.h') expected = [ u'Copyright 1992, Linus Torvalds.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_delay_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/delay.h') expected = [ u'Copyright (c) 1993 Linus Torvalds', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_fixmap_32_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/fixmap_32.h') expected = [ u'Copyright (c) 1998 Ingo Molnar', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_genapic_32_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/genapic_32.h') expected = [ u'Copyright 2003 Andi Kleen, SuSE Labs.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_highmem_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/highmem.h') expected = [ u'Copyright (c) 1999 Gerhard Wichert, Siemens AG', u'Copyright (c) 1999 Ingo Molnar <mingo@redhat.com>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_hw_irq_32_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/hw_irq_32.h') expected = [ u'(c) 1992, 1993 Linus Torvalds', u'(c) 1997 Ingo Molnar', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_i387_32_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/i387_32.h') expected = [ u'Copyright (c) 1994 Linus Torvalds', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_io_apic_32_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/io_apic_32.h') expected = [ u'Copyright (c) 1997, 1998, 1999, 2000 Ingo Molnar', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_ist_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/ist.h') expected = [ u'Copyright 2002 Andy Grover <andrew.grover@intel.com>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_semaphore_32_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/semaphore_32.h') expected = [ u'(c) Copyright 1996 Linus Torvalds', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_thread_info_32_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/thread_info_32.h') expected = [ u'Copyright (c) 2002 David Howells (dhowells@redhat.com)', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_voyager_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86/voyager.h') expected = [ u'Copyright (c) 1999,2001', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_asm_x86_xen_hypercall_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-asm-x86-xen/hypercall.h') expected = [ u'Copyright (c) 2002-2004, K A Fraser', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_a1026_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/a1026.h') expected = [ u'Copyright (c) 2009 HTC Corporation.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_aio_abi_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/aio_abi.h') expected = [ u'Copyright 2000,2001,2002 Red Hat.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_android_alarm_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/android_alarm.h') expected = [ u'Copyright 2006, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_android_pmem_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/android_pmem.h') expected = [ u'Copyright (c) 2007 Google, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_android_power_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/android_power.h') expected = [ u'Copyright 2005-2006, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_apm_bios_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/apm_bios.h') expected = [ u'Copyright 1994-2001 Stephen Rothwell (sfr@canb.auug.org.au)', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ashmem_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ashmem.h') expected = [ u'Copyright 2008 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ata_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ata.h') expected = [ u'Copyright 2003-2004 Red Hat, Inc.', u'Copyright 2003-2004 Jeff Garzik', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_attribute_container_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/attribute_container.h') expected = [ u'Copyright (c) 2005 - James Bottomley <James.Bottomley@steeleye.com>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_auto_fs_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/auto_fs.h') expected = [ u'Copyright 1997 Transmeta Corporation', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_binder_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/binder.h') expected = [ u'Copyright (c) 2008 The Android Open Source Project', u'Copyright (c) 2005 Palmsource, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_bio_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/bio.h') expected = [ u'Copyright (c) 2001 Jens Axboe <axboe@suse.de>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_bmp085_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/bmp085.h') expected = [ u'Copyright (c) 2010 Motorola, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_capella_cm3602_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/capella_cm3602.h') expected = [ u'Copyright (c) 2009 Google, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_capi_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/capi.h') expected = [ u'Copyright 1997 by Carsten Paeth (calle@calle.in-berlin.de)', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_cdrom_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/cdrom.h') expected = [ u'Copyright (c) 1992 David Giller, rafetmad@oxy.edu 1994, 1995 Eberhard Moenkeberg, emoenke@gwdg.de 1996 David van Leeuwen', ] check_detection(expected, test_file) @expectedFailure def test_ics_kernel_headers_original_linux_cdrom_h_trail_email(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/cdrom.h') expected = [ u'Copyright (c) 1992 David Giller, rafetmad@oxy.edu 1994, 1995 Eberhard Moenkeberg, emoenke@gwdg.de 1996 David van Leeuwen, david@tm.tno.nl', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_clk_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/clk.h') expected = [ u'Copyright (c) 2004 ARM Limited.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_coda_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/coda.h') expected = [ u'Copyright (c) 1987-1999 Carnegie Mellon University', u'Copyright (c) 1987-1999 Carnegie Mellon University', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_coda_fs_i_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/coda_fs_i.h') expected = [ u'Copyright (c) 1998 Carnegie Mellon University', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_completion_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/completion.h') expected = [ u'(c) Copyright 2001 Linus Torvalds', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_cpcap_audio_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/cpcap_audio.h') expected = [ u'Copyright (c) 2010 Google, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_device_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/device.h') expected = [ u'Copyright (c) 2001-2003 Patrick Mochel <mochel@osdl.org>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_dmaengine_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/dmaengine.h') expected = [ u'Copyright (c) 2004 - 2006 Intel Corporation.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_dm_ioctl_h_trail_name(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/dm-ioctl.h') expected = [ u'Copyright (c) 2001 - 2003 Sistina Software (UK) Limited.', u'Copyright (c) 2004 - 2005 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_efs_dir_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/efs_dir.h') expected = [ u'Copyright (c) 1999 Al Smith', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_efs_fs_i_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/efs_fs_i.h') expected = [ u'Copyright (c) 1999 Al Smith', u'(c) 1988 Silicon Graphics', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ethtool_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ethtool.h') expected = [ u'Copyright (c) 1998 David S. Miller (davem@redhat.com)', u'Copyright 2001 Jeff Garzik <jgarzik@pobox.com>', u'Portions Copyright 2001 Sun Microsystems', u'Portions Copyright 2002 Intel', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ext2_fs_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ext2_fs.h') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995 Remy Card (card@masi.ibp.fr) Laboratoire MASI', u'Copyright (c) 1991, 1992 Linus Torvalds', ] check_detection(expected, test_file) @expectedFailure def test_ics_kernel_headers_original_linux_ext2_fs_h_trail_name(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ext2_fs.h') expected = [ u'Copyright (C) 1992, 1993, 1994, 1995 Remy Card (card@masi.ibp.fr) Laboratoire MASI - Institut Blaise Pascal', u'Copyright (c) 1991, 1992 Linus Torvalds', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ext3_fs_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ext3_fs.h') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995 Remy Card (card@masi.ibp.fr) Laboratoire MASI', u'Copyright (c) 1991, 1992 Linus Torvalds', u'(c) Daniel Phillips, 2001', ] check_detection(expected, test_file) @expectedFailure def test_ics_kernel_headers_original_linux_ext3_fs_h_trail_name(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ext3_fs.h') expected = [ u'Copyright (C) 1992, 1993, 1994, 1995 Remy Card (card@masi.ibp.fr) Laboratoire MASI - Institut Blaise Pascal', u'Copyright (c) 1991, 1992 Linus Torvalds', u'(c) Daniel Phillips, 2001', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ftape_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ftape.h') expected = [ u'Copyright (c) 1994-1996 Bas Laarhoven', u'(c) 1996-1997 Claus-Justus Heine.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_genhd_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/genhd.h') expected = [ u'Copyright (c) 1992 Drew Eckhardt Generic', ] check_detection(expected, test_file) @expectedFailure def test_ics_kernel_headers_original_linux_genhd_h_extra_generic(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/genhd.h') expected = [ u'Copyright (c) 1992 Drew Eckhardt', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_hdsmart_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/hdsmart.h') expected = [ u'Copyright (c) 1999-2000 Michael Cornwell <cornwell@acm.org>', u'Copyright (c) 2000 Andre Hedrick <andre@linux-ide.org>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_hid_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/hid.h') expected = [ u'Copyright (c) 1999 Andreas Gal', u'Copyright (c) 2000-2001 Vojtech Pavlik', u'Copyright (c) 2006-2007 Jiri Kosina', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_hidraw_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/hidraw.h') expected = [ u'Copyright (c) 2007 Jiri Kosina', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_hil_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/hil.h') expected = [ u'Copyright (c) 2001 Brian S. Julin', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_i2c_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/i2c.h') expected = [ u'Copyright (c) 1995-2000 Simon G. Vogl', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_if_ppp_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/if_ppp.h') expected = [ u'Copyright (c) 1989 Carnegie Mellon University.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_inotify_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/inotify.h') expected = [ u'Copyright (c) 2005 John McCutchan', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_input_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/input.h') expected = [ u'Copyright (c) 1999-2002 Vojtech Pavlik', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ion_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ion.h') expected = [ u'Copyright (c) 2011 Google, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ipmi_msgdefs_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ipmi_msgdefs.h') expected = [ u'Copyright 2002 MontaVista Software Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_jbd_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/jbd.h') expected = [ u'Copyright 1998-2000 Red Hat, Inc', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_kernelcapi_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/kernelcapi.h') expected = [ u'(c) Copyright 1997 by Carsten Paeth (calle@calle.in-berlin.de)', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_keychord_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/keychord.h') expected = [ u'Copyright (c) 2008 Google, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_klist_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/klist.h') expected = [ u'Copyright (c) 2005 Patrick Mochel', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_kobject_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/kobject.h') expected = [ u'Copyright (c) 2002-2003 Patrick Mochel', u'Copyright (c) 2002-2003 Open Source Development Labs', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_kref_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/kref.h') expected = [ u'Copyright (c) 2004 Greg Kroah-Hartman <greg@kroah.com>', u'Copyright (c) 2004 IBM Corp.', u'Copyright (c) 2002-2003 Patrick Mochel <mochel@osdl.org>', u'Copyright (c) 2002-2003 Open Source Development Labs', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ktime_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ktime.h') expected = [ u'Copyright (c) 2005, Thomas Gleixner <tglx@linutronix.de>', u'Copyright (c) 2005, Red Hat, Inc., Ingo Molnar', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_kxtf9_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/kxtf9.h') expected = [ u'Copyright (c) 2008-2009, Kionix, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_leds_an30259a_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/leds-an30259a.h') expected = [ u'Copyright (c) 2011 Samsung Electronics Co. Ltd.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_lis331dlh_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/lis331dlh.h') expected = [ u'Copyright (c) 2008-2009, Motorola', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_lockdep_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/lockdep.h') expected = [ u'Copyright (c) 2006 Red Hat, Inc., Ingo Molnar <mingo@redhat.com>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_loop_h_trail_name(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/loop.h') expected = [ u"Copyright 1993 by Theodore Ts'o.", ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_mc146818rtc_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/mc146818rtc.h') expected = [ u'Copyright Torsten Duwe <duwe@informatik.uni-erlangen.de> 1993', u'Copyright Motorola 1984', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_mempolicy_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/mempolicy.h') expected = [ u'Copyright 2003,2004 Andi Kleen SuSE Labs', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_moduleparam_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/moduleparam.h') expected = [ u'(c) Copyright 2001, 2002 Rusty Russell IBM Corporation', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_msm_kgsl_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_kgsl.h') expected = [ u'(c) Copyright Advanced Micro Devices, Inc. 2002, 2007', u'Copyright (c) 2008-2009 QUALCOMM USA, INC.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_msm_mdp_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_mdp.h') expected = [ u'Copyright (c) 2007 Google Incorporated', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_msm_q6vdec_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_q6vdec.h') expected = [ u'Copyright (c) 2008-2009, Code Aurora Forum.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_msm_vidc_dec_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_vidc_dec.h') expected = [ u'Copyright (c) 2010, Code Aurora Forum.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_msm_vidc_enc_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/msm_vidc_enc.h') expected = [ u'Copyright (c) 2009, Code Aurora Forum.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_mt9t013_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/mt9t013.h') expected = [ u'Copyright (c) 2007, 2008 HTC, Inc', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_mutex_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/mutex.h') expected = [ u'Copyright (c) 2004, 2005, 2006 Red Hat, Inc., Ingo Molnar <mingo@redhat.com>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ncp_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ncp.h') expected = [ u'Copyright (c) 1995 by Volker Lendecke', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ncp_mount_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ncp_mount.h') expected = [ u'Copyright (c) 1995, 1996 by Volker Lendecke', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_netfilter_arp_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/netfilter_arp.h') expected = [ u'(c) 2002 Rusty Russell', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_nfs4_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/nfs4.h') expected = [ u'Copyright (c) 2002 The Regents of the University of Michigan.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_nfsacl_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/nfsacl.h') expected = [ u'(c) 2003 Andreas Gruenbacher <agruen@suse.de>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_nvhdcp_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/nvhdcp.h') expected = [ u'Copyright (c) 2010-2011, NVIDIA Corporation.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_pagemap_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/pagemap.h') expected = [ u'Copyright 1995 Linus Torvalds', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_patchkey_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/patchkey.h') expected = [ u'Copyright (c) 2005 Stuart Brady', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_pci_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/pci.h') expected = [ u'Copyright 1994, Drew Eckhardt', u'Copyright 1997 1999 Martin Mares <mj@ucw.cz>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_perf_event_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/perf_event.h') expected = [ u'Copyright (c) 2008-2009, Thomas Gleixner <tglx@linutronix.de>', u'Copyright (c) 2008-2009, Red Hat, Inc., Ingo Molnar', u'Copyright (c) 2008-2009, Red Hat, Inc., Peter Zijlstra', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_plist_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/plist.h') expected = [ u'(c) 2002-2003 Intel Corp Inaky Perez-Gonzalez <inaky.perez-gonzalez@intel.com>.', u'(c) MontaVista Software, Inc.', u'(c) 2005 Thomas Gleixner <tglx@linutronix.de>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_pm_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/pm.h') expected = [ u'Copyright (c) 2000 Andrew Henroid', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_pn544_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/pn544.h') expected = [ u'Copyright (c) 2010 Trusted Logic S.A.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_posix_acl_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/posix_acl.h') expected = [ u'(c) 2002 Andreas Gruenbacher, <a.gruenbacher@computer.org>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ppdev_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ppdev.h') expected = [ u'Copyright (c) 1998-9 Tim Waugh <tim@cyberelk.demon.co.uk>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ppp_defs_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ppp_defs.h') expected = [ u'Copyright (c) 1994 The Australian National University.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_qic117_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/qic117.h') expected = [ u'Copyright (c) 1993-1996 Bas Laarhoven', u'(c) 1997 Claus-Justus Heine.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_quota_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/quota.h') expected = [ u'Copyright (c) 1982, 1986 Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_rcupdate_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/rcupdate.h') expected = [ u'Copyright (c) IBM Corporation, 2001', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_relay_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/relay.h') expected = [ u'Copyright (c) 2002, 2003 - Tom Zanussi (zanussi@us.ibm.com), IBM Corp', u'Copyright (c) 1999, 2000, 2001, 2002 - Karim Yaghmour (karim@opersys.com)', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_rpmsg_omx_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/rpmsg_omx.h') expected = [ u'Copyright (c) 2011 Texas Instruments.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_rtc_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/rtc.h') expected = [ u'Copyright (c) 1999 Hewlett-Packard Co.', u'Copyright (c) 1999 Stephane Eranian <eranian@hpl.hp.com>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_serial_core_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/serial_core.h') expected = [ u'Copyright (c) 2000 Deep Blue Solutions Ltd.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_serial_reg_h_trail_name(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/serial_reg.h') expected = [ u"Copyright (c) 1992, 1994 by Theodore Ts'o.", ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_sfh7743_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/sfh7743.h') expected = [ u'Copyright (c) 2009 Motorola, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_smb_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/smb.h') expected = [ u'Copyright (c) 1995, 1996 by Paal-Kr. Engstad and Volker Lendecke', u'Copyright (c) 1997 by Volker Lendecke', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_soundcard_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/soundcard.h') expected = [ u'Copyright by Hannu Savolainen 1993-1997', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_spinlock_api_smp_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/spinlock_api_smp.h') expected = [ u'portions Copyright 2005, Red Hat, Inc., Ingo Molnar', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_sysfs_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/sysfs.h') expected = [ u'Copyright (c) 2001,2002 Patrick Mochel', u'Copyright (c) 2004 Silicon Graphics, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_taskstats_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/taskstats.h') expected = [ u'Copyright (c) Shailabh Nagar, IBM Corp. 2006', u'(c) Balbir Singh, IBM Corp. 2006', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_telephony_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/telephony.h') expected = [ u'(c) Copyright 1999-2001 Quicknet Technologies, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_timex_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/timex.h') expected = [ u'Copyright (c) David L. Mills 1993', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_ufs_fs_i_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/ufs_fs_i.h') expected = [ u'Copyright (c) 1996 Adrian Rodriguez (adrian@franklins-tower.rutgers.edu)', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_usbdevice_fs_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/usbdevice_fs.h') expected = [ u'Copyright (c) 2000 Thomas Sailer (sailer@ife.ee.ethz.ch)', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_videodev2_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/videodev2.h') expected = [ u'Copyright (c) 1999-2007 the contributors', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_vt_buffer_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/vt_buffer.h') expected = [ u'(c) 1998 Martin Mares <mj@ucw.cz>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_wanrouter_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/wanrouter.h') expected = [ u'Copyright (c) 1995-2000 Sangoma Technologies Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_wireless_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/wireless.h') expected = [ u'Copyright (c) 1997-2006 Jean Tourrilhes', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_xattr_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/xattr.h') expected = [ u'Copyright (c) 2001 by Andreas Gruenbacher <a.gruenbacher@computer.org>', u'Copyright (c) 2001-2002 Silicon Graphics, Inc.', u'Copyright (c) 2004 Red Hat, Inc., James Morris <jmorris@redhat.com>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_zconf_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux/zconf.h') expected = [ u'Copyright (c) 1995-1998 Jean-loup Gailly.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_lockd_nlm_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-lockd/nlm.h') expected = [ u'Copyright (c) 1996, Olaf Kirch <okir@monad.swb.de>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_lockd_xdr_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-lockd/xdr.h') expected = [ u'Copyright (c) 1996 Olaf Kirch <okir@monad.swb.de>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_mtd_bbm_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/bbm.h') expected = [ u'Copyright (c) 2005 Samsung Electronics Kyungmin Park <kyungmin.park@samsung.com>', u'Copyright (c) 2000-2005 Thomas Gleixner <tglx@linuxtronix.de>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_mtd_blktrans_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/blktrans.h') expected = [ u'(c) 2003 David Woodhouse <dwmw2@infradead.org>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_mtd_flashchip_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/flashchip.h') expected = [ u'(c) 2000 Red Hat.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_mtd_mtd_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/mtd.h') expected = [ u'Copyright (c) 1999-2003 David Woodhouse <dwmw2@infradead.org>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_mtd_nand_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/nand.h') expected = [ u'Copyright (c) 2000 David Woodhouse <dwmw2@mvhi.com> Steven J. Hill <sjhill@realitydiluted.com> Thomas Gleixner <tglx@linutronix.de>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_mtd_nand_ecc_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/nand_ecc.h') expected = [ u'Copyright (c) 2000 Steven J. Hill (sjhill@realitydiluted.com)', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_mtd_nftl_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/nftl.h') expected = [ u'(c) 1999-2003 David Woodhouse <dwmw2@infradead.org>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_mtd_onenand_regs_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/onenand_regs.h') expected = [ u'Copyright (c) 2005 Samsung Electronics', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_mtd_partitions_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-mtd/partitions.h') expected = [ u'(c) 2000 Nicolas Pitre <nico@cam.org>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_netfilter_xt_connmark_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter/xt_CONNMARK.h') expected = [ u'Copyright (c) 2002,2004 MARA Systems AB', ] check_detection(expected, test_file) @expectedFailure def test_ics_kernel_headers_original_linux_netfilter_xt_connmark_h_trail_url(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter/xt_CONNMARK.h') expected = [ u'Copyright (c) 2002,2004 MARA Systems AB <http://www.marasystems.com>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_netfilter_ipv4_ip_queue_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter_ipv4/ip_queue.h') expected = [ u'(c) 2000 James Morris', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_netfilter_ipv4_ipt_dscp_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter_ipv4/ipt_DSCP.h') expected = [ u'(c) 2002 Harald Welte <laforge@gnumonks.org>', u'(c) 2000 by Matthew G. Marsh <mgm@paktronix.com>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_netfilter_ipv4_ipt_ttl_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-netfilter_ipv4/ipt_TTL.h') expected = [ u'(c) 2000 by Harald Welte <laforge@netfilter.org>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_nfsd_auth_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/auth.h') expected = [ u'Copyright (c) 1995, 1996 Olaf Kirch <okir@monad.swb.de>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_nfsd_const_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/const.h') expected = [ u'Copyright (c) 1995-1997 Olaf Kirch <okir@monad.swb.de>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_nfsd_debug_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/debug.h') expected = [ u'Copyright (c) 1995 Olaf Kirch <okir@monad.swb.de>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_nfsd_interface_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/interface.h') expected = [ u'Copyright (c) 2000 Neil Brown <neilb@cse.unsw.edu.au>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_nfsd_nfsfh_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-nfsd/nfsfh.h') expected = [ u'Copyright (c) 1995, 1996, 1997 Olaf Kirch <okir@monad.swb.de>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_raid_md_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-raid/md.h') expected = [ u'Copyright (c) 1996-98 Ingo Molnar, Gadi Oxman', u'Copyright (c) 1994-96 Marc ZYNGIER <zyngier@ufr-info-p7.ibp.fr>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_raid_md_k_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-raid/md_k.h') expected = [ u'Copyright (c) 1996-98 Ingo Molnar, Gadi Oxman', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_sunrpc_auth_gss_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/auth_gss.h') expected = [ u'Copyright (c) 2000 The Regents of the University of Michigan', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_sunrpc_clnt_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/clnt.h') expected = [ u'Copyright (c) 1995, 1996, Olaf Kirch <okir@monad.swb.de>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_sunrpc_gss_asn1_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/gss_asn1.h') expected = [ u'Copyright (c) 2000 The Regents of the University of Michigan.', u'Copyright 1995 by the Massachusetts Institute of Technology.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_sunrpc_gss_err_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/gss_err.h') expected = [ u'Copyright (c) 2002 The Regents of the University of Michigan.', u'Copyright 1993 by OpenVision Technologies, Inc.', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_linux_sunrpc_timer_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-linux-sunrpc/timer.h') expected = [ u'Copyright (c) 2002 Trond Myklebust <trond.myklebust@fys.uio.no>', ] check_detection(expected, test_file) def test_ics_kernel_headers_original_sound_asound_h(self): test_file = self.get_test_loc('ics/kernel-headers-original-sound/asound.h') expected = [ u'Copyright (c) 1994-2003 by Jaroslav Kysela <perex@perex.cz>, Abramo Bagnara <abramo@alsa-project.org>', ] check_detection(expected, test_file) def test_ics_libffi_aclocal_m4(self): test_file = self.get_test_loc('ics/libffi/aclocal.m4') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.', u'Copyright (c) 2002, 2003, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1998, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2006 Free Software Foundation, Inc.', u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_android_mk(self): test_file = self.get_test_loc('ics/libffi/Android.mk') expected = [ u'Copyright 2007 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_libffi_configure(self): test_file = self.get_test_loc('ics/libffi/configure') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.', u'Copyright (c) 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_depcomp(self): test_file = self.get_test_loc('ics/libffi/depcomp') expected = [ u'Copyright (c) 1999, 2000, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_license(self): test_file = self.get_test_loc('ics/libffi/LICENSE') expected = [ u'Copyright (c) 1996-2008 Red Hat, Inc', ] check_detection(expected, test_file) def test_ics_libffi_ltcf_c_sh(self): test_file = self.get_test_loc('ics/libffi/ltcf-c.sh') expected = [ u'Copyright (c) 1996-2000, 2001 Free Software Foundation, Inc.', u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996', ] check_detection(expected, test_file) def test_ics_libffi_ltcf_cxx_sh(self): test_file = self.get_test_loc('ics/libffi/ltcf-cxx.sh') expected = [ u'Copyright (c) 1996-1999, 2000, 2001, 2003 Free Software Foundation, Inc.', u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996', ] check_detection(expected, test_file) def test_ics_libffi_ltconfig(self): test_file = self.get_test_loc('ics/libffi/ltconfig') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.', u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996', u'Copyright (c) 1996-2000 Free Software Foundation, Inc.', u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996', u'Copyright (c) 1999-2000 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_ltmain_sh(self): test_file = self.get_test_loc('ics/libffi/ltmain.sh') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.', u'Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996', u'Copyright (c) 2007 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_missing(self): test_file = self.get_test_loc('ics/libffi/missing') expected = [ u'Copyright (c) 1996, 1997, 1999, 2000, 2002, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_darwin_x86_ffi_h(self): test_file = self.get_test_loc('ics/libffi-darwin-x86/ffi.h') expected = [ u'Copyright (c) 1996-2003, 2007, 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_darwin_x86_ffitarget_h(self): test_file = self.get_test_loc('ics/libffi-darwin-x86/ffitarget.h') expected = [ u'Copyright (c) 1996-2003 Red Hat, Inc.', u'Copyright (c) 2008 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_doc_libffi_texi(self): test_file = self.get_test_loc('ics/libffi-doc/libffi.texi') expected = [ u'Copyright 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_include_ffi_h_in(self): test_file = self.get_test_loc('ics/libffi-include/ffi.h.in') expected = [ u'Copyright (c) 1996-2003, 2007, 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_include_ffi_common_h(self): test_file = self.get_test_loc('ics/libffi-include/ffi_common.h') expected = [ u'Copyright (c) 1996 Red Hat, Inc.', u'Copyright (c) 2007 Free Software Foundation, Inc', ] check_detection(expected, test_file) def test_ics_libffi_src_closures_c(self): test_file = self.get_test_loc('ics/libffi-src/closures.c') expected = [ u'Copyright (c) 2007 Red Hat, Inc.', u'Copyright (c) 2007 Free Software Foundation, Inc', ] check_detection(expected, test_file) def test_ics_libffi_src_debug_c(self): test_file = self.get_test_loc('ics/libffi-src/debug.c') expected = [ u'Copyright (c) 1996 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_java_raw_api_c(self): test_file = self.get_test_loc('ics/libffi-src/java_raw_api.c') expected = [ u'Copyright (c) 1999, 2007, 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_prep_cif_c(self): test_file = self.get_test_loc('ics/libffi-src/prep_cif.c') expected = [ u'Copyright (c) 1996, 1998, 2007 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_raw_api_c(self): test_file = self.get_test_loc('ics/libffi-src/raw_api.c') expected = [ u'Copyright (c) 1999, 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_types_c(self): test_file = self.get_test_loc('ics/libffi-src/types.c') expected = [ u'Copyright (c) 1996, 1998 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_alpha_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-alpha/ffi.c') expected = [ u'Copyright (c) 1998, 2001, 2007, 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_alpha_ffitarget_h(self): test_file = self.get_test_loc('ics/libffi-src-alpha/ffitarget.h') expected = [ u'Copyright (c) 1996-2003 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_arm_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-arm/ffi.c') expected = [ u'Copyright (c) 1998, 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_cris_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-cris/ffi.c') expected = [ u'Copyright (c) 1998 Cygnus Solutions', u'Copyright (c) 2004 Simon Posnjak', u'Copyright (c) 2005 Axis Communications AB', u'Copyright (c) 2007 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_frv_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-frv/ffi.c') expected = [ u'Copyright (c) 2004 Anthony Green', u'Copyright (c) 2007 Free Software Foundation, Inc.', u'Copyright (c) 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_frv_ffitarget_h(self): test_file = self.get_test_loc('ics/libffi-src-frv/ffitarget.h') expected = [ u'Copyright (c) 1996-2004 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_ia64_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-ia64/ffi.c') expected = [ u'Copyright (c) 1998, 2007, 2008 Red Hat, Inc.', u'Copyright (c) 2000 Hewlett Packard Company', ] check_detection(expected, test_file) def test_ics_libffi_src_ia64_ia64_flags_h(self): test_file = self.get_test_loc('ics/libffi-src-ia64/ia64_flags.h') expected = [ u'Copyright (c) 2000 Hewlett Packard Company', ] check_detection(expected, test_file) def test_ics_libffi_src_m32r_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-m32r/ffi.c') expected = [ u'Copyright (c) 2004 Renesas Technology', u'Copyright (c) 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_m32r_ffitarget_h(self): test_file = self.get_test_loc('ics/libffi-src-m32r/ffitarget.h') expected = [ u'Copyright (c) 2004 Renesas Technology.', ] check_detection(expected, test_file) def test_ics_libffi_src_mips_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-mips/ffi.c') expected = [ u'Copyright (c) 1996, 2007, 2008 Red Hat, Inc.', u'Copyright (c) 2008 David Daney', ] check_detection(expected, test_file) def test_ics_libffi_src_pa_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-pa/ffi.c') expected = [ u'(c) 2003-2004 Randolph Chung <tausq@debian.org>', u'(c) 2008 Red Hat, Inc.', u'(c) 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_powerpc_asm_h(self): test_file = self.get_test_loc('ics/libffi-src-powerpc/asm.h') expected = [ u'Copyright (c) 1998 Geoffrey Keating', ] check_detection(expected, test_file) def test_ics_libffi_src_powerpc_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-powerpc/ffi.c') expected = [ u'Copyright (c) 1998 Geoffrey Keating', u'Copyright (c) 2007 Free Software Foundation, Inc', u'Copyright (c) 2008 Red Hat, Inc', ] check_detection(expected, test_file) def test_ics_libffi_src_powerpc_ffi_darwin_c(self): test_file = self.get_test_loc('ics/libffi-src-powerpc/ffi_darwin.c') expected = [ u'Copyright (c) 1998 Geoffrey Keating', u'Copyright (c) 2001 John Hornkvist', u'Copyright (c) 2002, 2006, 2007 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_powerpc_ffitarget_h(self): test_file = self.get_test_loc('ics/libffi-src-powerpc/ffitarget.h') expected = [ u'Copyright (c) 1996-2003 Red Hat, Inc.', u'Copyright (c) 2007 Free Software Foundation, Inc', ] check_detection(expected, test_file) def test_ics_libffi_src_s390_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-s390/ffi.c') expected = [ u'Copyright (c) 2000, 2007 Software AG', u'Copyright (c) 2008 Red Hat, Inc', ] check_detection(expected, test_file) def test_ics_libffi_src_sh_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-sh/ffi.c') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005, 2006, 2007, 2008 Kaz Kojima', u'Copyright (c) 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_sh64_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-sh64/ffi.c') expected = [ u'Copyright (c) 2003, 2004 Kaz Kojima', u'Copyright (c) 2008 Anthony Green', ] check_detection(expected, test_file) def test_ics_libffi_src_sparc_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-sparc/ffi.c') expected = [ u'Copyright (c) 1996, 2003, 2004, 2007, 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_x86_ffi_c(self): test_file = self.get_test_loc('ics/libffi-src-x86/ffi.c') expected = [ u'Copyright (c) 1996, 1998, 1999, 2001, 2007, 2008 Red Hat, Inc.', u'Copyright (c) 2002 Ranjit Mathew', u'Copyright (c) 2002 Bo Thorsen', u'Copyright (c) 2002 Roger Sayle', u'Copyright (c) 2008 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_src_x86_ffi64_c(self): test_file = self.get_test_loc('ics/libffi-src-x86/ffi64.c') expected = [ u'Copyright (c) 2002, 2007 Bo Thorsen <bo@suse.de>', u'Copyright (c) 2008 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_testsuite_run_all_tests(self): test_file = self.get_test_loc('ics/libffi-testsuite/run-all-tests') expected = [ u'Copyright 2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_libffi_testsuite_lib_libffi_dg_exp(self): test_file = self.get_test_loc('ics/libffi-testsuite-lib/libffi-dg.exp') expected = [ u'Copyright (c) 2003, 2005, 2008 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_testsuite_lib_target_libpath_exp(self): test_file = self.get_test_loc('ics/libffi-testsuite-lib/target-libpath.exp') expected = [ u'Copyright (c) 2004, 2005, 2007 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libffi_testsuite_lib_wrapper_exp(self): test_file = self.get_test_loc('ics/libffi-testsuite-lib/wrapper.exp') expected = [ u'Copyright (c) 2004, 2007 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libgsm_changelog(self): test_file = self.get_test_loc('ics/libgsm/ChangeLog') expected = [ u'Copyright 1992 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.', ] check_detection(expected, test_file) def test_ics_libgsm_notice(self): test_file = self.get_test_loc('ics/libgsm/NOTICE') expected = [ u'Copyright 1992, 1993, 1994 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin', ] check_detection(expected, test_file) def test_ics_libgsm_readme(self): test_file = self.get_test_loc('ics/libgsm/README') expected = [ u'Copyright 1992 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.', ] check_detection(expected, test_file) def test_ics_libgsm_inc_config_h(self): test_file = self.get_test_loc('ics/libgsm-inc/config.h') expected = [ u'Copyright 1992 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.', ] check_detection(expected, test_file) def test_ics_libgsm_man_gsm_3(self): test_file = self.get_test_loc('ics/libgsm-man/gsm.3') expected = [ u'Copyright 1992 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.', ] check_detection(expected, test_file) def test_ics_libgsm_man_gsm_option_3(self): test_file = self.get_test_loc('ics/libgsm-man/gsm_option.3') expected = [ u'Copyright 1992-1995 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin.', ] check_detection(expected, test_file) def test_ics_liblzf_license(self): test_file = self.get_test_loc('ics/liblzf/LICENSE') expected = [ u'Copyright (c) 2000-2009 Marc Alexander Lehmann <schmorp@schmorp.de>', ] check_detection(expected, test_file) def test_ics_liblzf_lzf_c(self): test_file = self.get_test_loc('ics/liblzf/lzf.c') expected = [ u'Copyright (c) 2006 Stefan Traby <stefan@hello-penguin.com>', ] check_detection(expected, test_file) def test_ics_liblzf_lzf_h(self): test_file = self.get_test_loc('ics/liblzf/lzf.h') expected = [ u'Copyright (c) 2000-2008 Marc Alexander Lehmann <schmorp@schmorp.de>', ] check_detection(expected, test_file) def test_ics_liblzf_lzf_c_c(self): test_file = self.get_test_loc('ics/liblzf/lzf_c.c') expected = [ u'Copyright (c) 2000-2010 Marc Alexander Lehmann <schmorp@schmorp.de>', ] check_detection(expected, test_file) def test_ics_liblzf_lzfp_h(self): test_file = self.get_test_loc('ics/liblzf/lzfP.h') expected = [ u'Copyright (c) 2000-2007 Marc Alexander Lehmann <schmorp@schmorp.de>', ] check_detection(expected, test_file) def test_ics_liblzf_cs_clzf_cs(self): test_file = self.get_test_loc('ics/liblzf-cs/CLZF.cs') expected = [ u'Copyright (c) 2005 Oren J. Maurice <oymaurice@hazorea.org.il>', ] check_detection(expected, test_file) def test_ics_libnfc_nxp_inc_nfc_custom_config_h(self): test_file = self.get_test_loc('ics/libnfc-nxp-inc/nfc_custom_config.h') expected = [ u'Copyright (c) 2010 NXP Semiconductors', ] check_detection(expected, test_file) def test_ics_libnl_headers_netlink_generic_h(self): test_file = self.get_test_loc('ics/libnl-headers/netlink-generic.h') expected = [ u'Copyright (c) 2003-2006 Thomas Graf <tgraf@suug.ch>', ] check_detection(expected, test_file) def test_ics_libnl_headers_netlink_local_h(self): test_file = self.get_test_loc('ics/libnl-headers/netlink-local.h') expected = [ u'Copyright (c) 2003-2008 Thomas Graf <tgraf@suug.ch>', ] check_detection(expected, test_file) def test_ics_libnl_headers_netlink_errno_h(self): test_file = self.get_test_loc('ics/libnl-headers-netlink/errno.h') expected = [ u'Copyright (c) 2008 Thomas Graf <tgraf@suug.ch>', ] check_detection(expected, test_file) def test_ics_libnl_headers_netlink_object_api_h(self): test_file = self.get_test_loc('ics/libnl-headers-netlink/object-api.h') expected = [ u'Copyright (c) 2003-2007 Thomas Graf <tgraf@suug.ch>', ] check_detection(expected, test_file) def test_ics_libnl_headers_netlink_cli_utils_h(self): test_file = self.get_test_loc('ics/libnl-headers-netlink-cli/utils.h') expected = [ u'Copyright (c) 2003-2009 Thomas Graf <tgraf@suug.ch>', ] check_detection(expected, test_file) def test_ics_libnl_headers_netlink_netfilter_ct_h(self): test_file = self.get_test_loc('ics/libnl-headers-netlink-netfilter/ct.h') expected = [ u'Copyright (c) 2003-2008 Thomas Graf <tgraf@suug.ch>', u'Copyright (c) 2007 Philip Craig <philipc@snapgear.com>', u'Copyright (c) 2007 Secure Computing Corporation', ] check_detection(expected, test_file) def test_ics_libnl_headers_netlink_route_addr_h(self): test_file = self.get_test_loc('ics/libnl-headers-netlink-route/addr.h') expected = [ u'Copyright (c) 2003-2008 Thomas Graf <tgraf@suug.ch>', u'Copyright (c) 2003-2006 Baruch Even <baruch@ev-en.org>, Mediatrix Telecom, inc.', ] check_detection(expected, test_file) @expectedFailure def test_ics_libnl_headers_netlink_route_addr_h_trail_email(self): test_file = self.get_test_loc('ics/libnl-headers-netlink-route/addr.h') expected = [ u'Copyright (c) 2003-2008 Thomas Graf <tgraf@suug.ch>', u'Copyright (c) 2003-2006 Baruch Even <baruch@ev-en.org>, Mediatrix Telecom, inc. <ericb@mediatrix.com>', ] check_detection(expected, test_file) def test_ics_libpcap_aclocal_m4_trail_name(self): test_file = self.get_test_loc('ics/libpcap/aclocal.m4') expected = [ u'Copyright (c) 1995, 1996, 1997, 1998 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_atmuni31_h(self): test_file = self.get_test_loc('ics/libpcap/atmuni31.h') expected = [ u'Copyright (c) 1997 Yen Yen Lim and North Dakota State University', ] check_detection(expected, test_file) def test_ics_libpcap_bpf_dump_c(self): test_file = self.get_test_loc('ics/libpcap/bpf_dump.c') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_bpf_image_c(self): test_file = self.get_test_loc('ics/libpcap/bpf_image.c') expected = [ u'Copyright (c) 1990, 1991, 1992, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_config_guess(self): test_file = self.get_test_loc('ics/libpcap/config.guess') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.', u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libpcap_configure_in_trail_name(self): test_file = self.get_test_loc('ics/libpcap/configure.in') expected = [ u'Copyright (c) 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_etherent_c(self): test_file = self.get_test_loc('ics/libpcap/etherent.c') expected = [ u'Copyright (c) 1990, 1993, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_ethertype_h(self): test_file = self.get_test_loc('ics/libpcap/ethertype.h') expected = [ u'Copyright (c) 1993, 1994, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_fad_getad_c(self): test_file = self.get_test_loc('ics/libpcap/fad-getad.c') expected = [ u'Copyright (c) 1994, 1995, 1996, 1997, 1998 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_fad_win32_c(self): test_file = self.get_test_loc('ics/libpcap/fad-win32.c') expected = [ u'Copyright (c) 2002 - 2005 NetGroup, Politecnico di Torino (Italy)', u'Copyright (c) 2005 - 2006 CACE Technologies', ] check_detection(expected, test_file) @expectedFailure def test_ics_libpcap_fad_win32_c_trail_name_trail_place(self): test_file = self.get_test_loc('ics/libpcap/fad-win32.c') expected = [ u'Copyright (c) 2002 - 2005 NetGroup, Politecnico di Torino (Italy)', u'Copyright (c) 2005 - 2006 CACE Technologies, Davis (California)', ] check_detection(expected, test_file) def test_ics_libpcap_gencode_c(self): test_file = self.get_test_loc('ics/libpcap/gencode.c') expected = [ u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_gencode_h(self): test_file = self.get_test_loc('ics/libpcap/gencode.h') expected = [ u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.', u'Copyright (c) 1997 Yen Yen Lim and North Dakota State University', ] check_detection(expected, test_file) def test_ics_libpcap_grammar_c(self): test_file = self.get_test_loc('ics/libpcap/grammar.c') expected = [ u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_llc_h(self): test_file = self.get_test_loc('ics/libpcap/llc.h') expected = [ u'Copyright (c) 1993, 1994, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_makefile_in(self): test_file = self.get_test_loc('ics/libpcap/Makefile.in') expected = [ u'Copyright (c) 1993, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_mkdep(self): test_file = self.get_test_loc('ics/libpcap/mkdep') expected = [ u'Copyright (c) 1994, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_nlpid_h(self): test_file = self.get_test_loc('ics/libpcap/nlpid.h') expected = [ u'Copyright (c) 1996 Juniper Networks, Inc.', ] check_detection(expected, test_file) def test_ics_libpcap_optimize_c(self): test_file = self.get_test_loc('ics/libpcap/optimize.c') expected = [ u'Copyright (c) 1988, 1989, 1990, 1991, 1993, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_3_trail_name(self): test_file = self.get_test_loc('ics/libpcap/pcap.3') expected = [ u'Copyright (c) 1994, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_c(self): test_file = self.get_test_loc('ics/libpcap/pcap.c') expected = [ u'Copyright (c) 1993, 1994, 1995, 1996, 1997, 1998 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_h(self): test_file = self.get_test_loc('ics/libpcap/pcap.h') expected = [ u'Copyright (c) 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_bpf_c(self): test_file = self.get_test_loc('ics/libpcap/pcap-bpf.c') expected = [ u'Copyright (c) 1993, 1994, 1995, 1996, 1998 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_bpf_h(self): test_file = self.get_test_loc('ics/libpcap/pcap-bpf.h') expected = [ u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_dlpi_c(self): test_file = self.get_test_loc('ics/libpcap/pcap-dlpi.c') expected = [ u'Copyright (c) 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_int_h(self): test_file = self.get_test_loc('ics/libpcap/pcap-int.h') expected = [ u'Copyright (c) 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_linux_c(self): test_file = self.get_test_loc('ics/libpcap/pcap-linux.c') expected = [ u'Copyright (c) 2000 Torsten Landschoff <torsten@debian.org> Sebastian Krahmer <krahmer@cs.uni-potsdam.de>', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_namedb_h(self): test_file = self.get_test_loc('ics/libpcap/pcap-namedb.h') expected = [ u'Copyright (c) 1994, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_nit_c(self): test_file = self.get_test_loc('ics/libpcap/pcap-nit.c') expected = [ u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_nit_h(self): test_file = self.get_test_loc('ics/libpcap/pcap-nit.h') expected = [ u'Copyright (c) 1990, 1994 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_null_c(self): test_file = self.get_test_loc('ics/libpcap/pcap-null.c') expected = [ u'Copyright (c) 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_stdinc_h_trail_name(self): test_file = self.get_test_loc('ics/libpcap/pcap-stdinc.h') expected = [ u'Copyright (c) 2002 - 2003 NetGroup, Politecnico di Torino (Italy)', ] check_detection(expected, test_file) def test_ics_libpcap_pcap_win32_c(self): test_file = self.get_test_loc('ics/libpcap/pcap-win32.c') expected = [ u'Copyright (c) 1999 - 2005 NetGroup, Politecnico di Torino (Italy)', u'Copyright (c) 2005 - 2007 CACE Technologies', ] check_detection(expected, test_file) @expectedFailure def test_ics_libpcap_pcap_win32_c_trail_name_trail_place(self): test_file = self.get_test_loc('ics/libpcap/pcap-win32.c') expected = [ u'Copyright (c) 1999 - 2005 NetGroup, Politecnico di Torino (Italy)', u'Copyright (c) 2005 - 2007 CACE Technologies, Davis (California)', ] check_detection(expected, test_file) def test_ics_libpcap_ppp_h(self): test_file = self.get_test_loc('ics/libpcap/ppp.h') expected = [ u'Copyright 1989 by Carnegie Mellon.', ] check_detection(expected, test_file) def test_ics_libpcap_scanner_c(self): test_file = self.get_test_loc('ics/libpcap/scanner.c') expected = [ u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_tokdefs_h(self): test_file = self.get_test_loc('ics/libpcap/tokdefs.h') expected = [ u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_libpcap_doc_pcap_html(self): test_file = self.get_test_loc('ics/libpcap-doc/pcap.html') expected = [ u'Copyright (c) The Internet Society (2004).', u'Copyright (c) The Internet Society (2004).', ] check_detection(expected, test_file) def test_ics_libpcap_doc_pcap_txt(self): test_file = self.get_test_loc('ics/libpcap-doc/pcap.txt') expected = [ u'Copyright (c) The Internet Society (2004).', u'Full Copyright Statement', u'Copyright (c) The Internet Society (2004).', ] check_detection(expected, test_file) def test_ics_libpcap_lbl_os_sunos4_h(self): test_file = self.get_test_loc('ics/libpcap-lbl/os-sunos4.h') expected = [ u'Copyright (c) 1989, 1990, 1993, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_lbl_os_ultrix4_h(self): test_file = self.get_test_loc('ics/libpcap-lbl/os-ultrix4.h') expected = [ u'Copyright (c) 1990, 1993, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_libpcap_missing_snprintf_c(self): test_file = self.get_test_loc('ics/libpcap-missing/snprintf.c') expected = [ u'Copyright (c) 1995-1999 Kungliga Tekniska Hogskolan Royal Institute of Technology', ] check_detection(expected, test_file) @expectedFailure def test_ics_libpcap_missing_snprintf_c_trail_place(self): test_file = self.get_test_loc('ics/libpcap-missing/snprintf.c') expected = [ u'Copyright (c) 1995-1999 Kungliga Tekniska Hogskolan (Royal Institute of Technology, Stockholm, Sweden).', ] check_detection(expected, test_file) def test_ics_libvpx_args_c(self): test_file = self.get_test_loc('ics/libvpx/args.c') expected = [ u'Copyright (c) 2010 The WebM project', ] check_detection(expected, test_file) def test_ics_libvpx_docs_mk(self): test_file = self.get_test_loc('ics/libvpx/docs.mk') expected = [ u'Copyright (c) 2010 The WebM project', ] check_detection(expected, test_file) def test_ics_libvpx_license(self): test_file = self.get_test_loc('ics/libvpx/LICENSE') expected = [ u'Copyright (c) 2010, Google Inc.', ] check_detection(expected, test_file) def test_ics_libvpx_y4minput_c(self): test_file = self.get_test_loc('ics/libvpx/y4minput.c') expected = [ u'Copyright (c) 2010 The WebM project', u'Copyright (c) 2002-2010 The Xiph.Org Foundation', ] check_detection(expected, test_file) def test_ics_libvpx_build_x86_msvs_obj_int_extract_bat(self): test_file = self.get_test_loc('ics/libvpx-build-x86-msvs/obj_int_extract.bat') expected = [ u'Copyright (c) 2011 The WebM project', ] check_detection(expected, test_file) def test_ics_libvpx_examples_includes_asciimathphp_2_0_htmlmathml_js(self): test_file = self.get_test_loc('ics/libvpx-examples-includes-ASCIIMathPHP-2.0/htmlMathML.js') expected = [ u'(c) Peter Jipsen', ] check_detection(expected, test_file) def test_ics_libvpx_examples_includes_geshi_docs_geshi_doc_html(self): test_file = self.get_test_loc('ics/libvpx-examples-includes-geshi-docs/geshi-doc.html') expected = [ u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann', u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann', u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann', u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann', u'(c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann', u'Copyright (c) 2004 Nigel McNie', u"Copyright (c) 2008 &lt name&gt (&lt website URL&gt ) <span class coMULTI'>&nbsp", ] check_detection(expected, test_file) def test_ics_libvpx_examples_includes_geshi_docs_geshi_doc_txt(self): test_file = self.get_test_loc('ics/libvpx-examples-includes-geshi-docs/geshi-doc.txt') expected = [ u'Copyright (c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann Email nigel@geshi.org', u'Copyright (c) 2004 Nigel McNie', u'Copyright (c) 2004 ( )', ] check_detection(expected, test_file) @expectedFailure def test_ics_libvpx_examples_includes_geshi_docs_geshi_doc_txt_trail_email_trail_url_misc(self): test_file = self.get_test_loc('ics/libvpx-examples-includes-geshi-docs/geshi-doc.txt') expected = [ u'Copyright (c) 2004 - 2007 Nigel McNie, 2007 - 2008 Benny Baumann Email nigel@geshi.org, BenBE@omorphia.de', u'Copyright: (c) 2004 Nigel McNie (http://qbnz.com/highlighter/)', u'Copyright: (c) 2004 <name> (<website URL>)', ] check_detection(expected, test_file) def test_ics_libvpx_examples_includes_geshi_docs_phpdoc_ini(self): test_file = self.get_test_loc('ics/libvpx-examples-includes-geshi-docs/phpdoc.ini') expected = [ u'Copyright 2002, Greg Beaver <cellog@users.sourceforge.net>', ] check_detection(expected, test_file) def test_ics_libvpx_examples_includes_html_toc_0_91_toc_pod(self): test_file = self.get_test_loc('ics/libvpx-examples-includes-HTML-Toc-0.91/Toc.pod') expected = [ u'Copyright (c) 2001 Freddy Vulto.', ] check_detection(expected, test_file) def test_ics_libvpx_examples_includes_php_markdown_extra_1_2_3_license_text(self): test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-Markdown-Extra-1.2.3/License.text') expected = [ u'Copyright (c) 2004-2008 Michel Fortin', u'Copyright (c) 2003-2006 John Gruber', ] check_detection(expected, test_file) def test_ics_libvpx_examples_includes_php_markdown_extra_1_2_3_markdown_php(self): test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-Markdown-Extra-1.2.3/markdown.php') expected = [ u'Copyright (c) 2004-2008 Michel Fortin', u'Copyright (c) 2004-2006 John Gruber', u'Copyright (c) 2004-2008 Michel Fortin', u'Copyright (c) 2003-2006 John Gruber', ] check_detection(expected, test_file) def test_ics_libvpx_examples_includes_php_markdown_extra_1_2_3_php_markdown_extra_readme_text(self): test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-Markdown-Extra-1.2.3/PHP Markdown Extra Readme.text') expected = [ u'Copyright (c) 2004-2005 Michel Fortin', u'Copyright (c) 2003-2005 John Gruber', ] check_detection(expected, test_file) def test_ics_libvpx_examples_includes_php_smartypants_1_5_1e_php_smartypants_readme_txt(self): test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-SmartyPants-1.5.1e/PHP SmartyPants Readme.txt') expected = [ u'Copyright (c) 2005 Michel Fortin', u'Copyright (c) 2003-2004 John Gruber', ] check_detection(expected, test_file) def test_ics_libvpx_examples_includes_php_smartypants_1_5_1e_smartypants_php(self): test_file = self.get_test_loc('ics/libvpx-examples-includes-PHP-SmartyPants-1.5.1e/smartypants.php') expected = [ u'Copyright (c) 2003-2004 John Gruber', u'Copyright (c) 2004-2005 Michel Fortin', u'Copyright (c) 2003 John Gruber', u'Copyright (c) 2004-2005 Michel Fortin', ] check_detection(expected, test_file) def test_ics_libvpx_libmkv_ebmlids_h(self): test_file = self.get_test_loc('ics/libvpx-libmkv/EbmlIDs.h') expected = [ u'Copyright (c) 2010 The WebM project', ] check_detection(expected, test_file) def test_ics_libvpx_nestegg_license(self): test_file = self.get_test_loc('ics/libvpx-nestegg/LICENSE') expected = [ u'Copyright (c) 2010 Mozilla Foundation', ] check_detection(expected, test_file) def test_ics_libvpx_nestegg_halloc_halloc_h(self): test_file = self.get_test_loc('ics/libvpx-nestegg-halloc/halloc.h') expected = [ u'Copyright (c) 2004-2010 Alex Pankratov.', ] check_detection(expected, test_file) def test_ics_libvpx_nestegg_halloc_readme(self): test_file = self.get_test_loc('ics/libvpx-nestegg-halloc/README') expected = [ u'Copyright (c) 2004-2010, Alex Pankratov (ap@swapped.cc).', ] check_detection(expected, test_file) def test_ics_libvpx_nestegg_halloc_src_halloc_c(self): test_file = self.get_test_loc('ics/libvpx-nestegg-halloc-src/halloc.c') expected = [ u'Copyright (c) 2004i-2010 Alex Pankratov.', ] check_detection(expected, test_file) def test_ics_libvpx_nestegg_include_nestegg_nestegg_h(self): test_file = self.get_test_loc('ics/libvpx-nestegg-include-nestegg/nestegg.h') expected = [ u'Copyright (c) 2010 Mozilla Foundation', ] check_detection(expected, test_file) def test_ics_libvpx_nestegg_m4_pkg_m4(self): test_file = self.get_test_loc('ics/libvpx-nestegg-m4/pkg.m4') expected = [ u'Copyright (c) 2004 Scott James Remnant <scott@netsplit.com>.', ] check_detection(expected, test_file) def test_ics_libvpx_vp8_common_asm_com_offsets_c(self): test_file = self.get_test_loc('ics/libvpx-vp8-common/asm_com_offsets.c') expected = [ u'Copyright (c) 2011 The WebM project', ] check_detection(expected, test_file) def test_ics_libxml2_dict_c(self): test_file = self.get_test_loc('ics/libxml2/dict.c') expected = [ u'Copyright (c) 2003 Daniel Veillard.', ] check_detection(expected, test_file) def test_ics_libxml2_hash_c(self): test_file = self.get_test_loc('ics/libxml2/hash.c') expected = [ u'Copyright (c) 2000 Bjorn Reese and Daniel Veillard.', ] check_detection(expected, test_file) def test_ics_libxml2_list_c(self): test_file = self.get_test_loc('ics/libxml2/list.c') expected = [ u'Copyright (c) 2000 Gary Pennington and Daniel Veillard.', ] check_detection(expected, test_file) def test_ics_libxml2_notice(self): test_file = self.get_test_loc('ics/libxml2/NOTICE') expected = [ u'Copyright (c) 1998-2003 Daniel Veillard.', ] check_detection(expected, test_file) def test_ics_libxml2_trio_c(self): test_file = self.get_test_loc('ics/libxml2/trio.c') expected = [ u'Copyright (c) 1998 Bjorn Reese and Daniel Stenberg.', ] check_detection(expected, test_file) def test_ics_libxml2_triodef_h(self): test_file = self.get_test_loc('ics/libxml2/triodef.h') expected = [ u'Copyright (c) 2001 Bjorn Reese <breese@users.sourceforge.net>', ] check_detection(expected, test_file) def test_ics_libxml2_triop_h(self): test_file = self.get_test_loc('ics/libxml2/triop.h') expected = [ u'Copyright (c) 2000 Bjorn Reese and Daniel Stenberg.', ] check_detection(expected, test_file) def test_ics_libxml2_triostr_c(self): test_file = self.get_test_loc('ics/libxml2/triostr.c') expected = [ u'Copyright (c) 2001 Bjorn Reese and Daniel Stenberg.', ] check_detection(expected, test_file) def test_ics_libxslt_copyright(self): test_file = self.get_test_loc('ics/libxslt/Copyright') expected = [ u'Copyright (c) 2001-2002 Daniel Veillard.', u'Copyright (c) 2001-2002 Thomas Broyer, Charlie Bozeman and Daniel Veillard.', ] check_detection(expected, test_file) def test_ics_lohit_fonts_notice(self): test_file = self.get_test_loc('ics/lohit-fonts/NOTICE') expected = [ u'Copyright 2011 Lohit Fonts Project contributors', ] check_detection(expected, test_file) @expectedFailure def test_ics_lohit_fonts_notice_trail_url(self): test_file = self.get_test_loc('ics/lohit-fonts/NOTICE') expected = [ u'Copyright 2011 Lohit Fonts Project contributors <http://fedorahosted.org/lohit>', ] check_detection(expected, test_file) def test_ics_lohit_fonts_lohit_bengali_ttf_copyright(self): test_file = self.get_test_loc('ics/lohit-fonts-lohit-bengali-ttf/COPYRIGHT') expected = [ u'Copyright 2011 Lohit Fonts Project contributors.', ] check_detection(expected, test_file) @expectedFailure def test_ics_lohit_fonts_lohit_bengali_ttf_copyright_trail_url(self): test_file = self.get_test_loc('ics/lohit-fonts-lohit-bengali-ttf/COPYRIGHT') expected = [ u'Copyright 2011 Lohit Fonts Project contributors. <http://fedorahosted.org/lohit>', ] check_detection(expected, test_file) def test_ics_markdown_notice(self): test_file = self.get_test_loc('ics/markdown/NOTICE') expected = [ u'Copyright 2007, 2008 The Python Markdown Project', u'Copyright 2004, 2005, 2006 Yuri Takhteyev', u'Copyright 2004 Manfred Stienstra', ] check_detection(expected, test_file) def test_ics_markdown_bin_markdown(self): test_file = self.get_test_loc('ics/markdown-bin/markdown') expected = [ u'Copyright 2007, 2008 The Python Markdown Project', u'Copyright 2004, 2005, 2006 Yuri Takhteyev', u'Copyright 2004 Manfred Stienstra', ] check_detection(expected, test_file) def test_ics_markdown_markdown_html4_py(self): test_file = self.get_test_loc('ics/markdown-markdown/html4.py') expected = [ u'Copyright (c) 1999-2007 by Fredrik Lundh.', u'Copyright (c) 1999-2007 by Fredrik Lundh', ] check_detection(expected, test_file) def test_ics_markdown_markdown_extensions_abbr_py(self): test_file = self.get_test_loc('ics/markdown-markdown-extensions/abbr.py') expected = [ u'Copyright 2007-2008 Waylan Limberg', ] check_detection(expected, test_file) @expectedFailure def test_ics_markdown_markdown_extensions_abbr_py_trail_url(self): test_file = self.get_test_loc('ics/markdown-markdown-extensions/abbr.py') expected = [ u'Copyright 2007-2008 [Waylan Limberg](http://achinghead.com/) [Seemant Kulleen](http://www.kulleen.org/)', ] check_detection(expected, test_file) def test_ics_markdown_markdown_extensions_codehilite_py(self): test_file = self.get_test_loc('ics/markdown-markdown-extensions/codehilite.py') expected = [ u'Copyright 2006-2008 Waylan Limberg', ] check_detection(expected, test_file) @expectedFailure def test_ics_markdown_markdown_extensions_codehilite_py_trail_url(self): test_file = self.get_test_loc('ics/markdown-markdown-extensions/codehilite.py') expected = [ u'Copyright 2006-2008 [Waylan Limberg](http://achinghead.com/).', ] check_detection(expected, test_file) def test_ics_markdown_markdown_extensions_def_list_py(self): test_file = self.get_test_loc('ics/markdown-markdown-extensions/def_list.py') expected = [ u'Copyright 2008 - Waylan Limberg', ] check_detection(expected, test_file) @expectedFailure def test_ics_markdown_markdown_extensions_def_list_py_trail_url(self): test_file = self.get_test_loc('ics/markdown-markdown-extensions/def_list.py') expected = [ u'Copyright 2008 - [Waylan Limberg](http://achinghead.com)', ] check_detection(expected, test_file) def test_ics_markdown_markdown_extensions_html_tidy_py(self): test_file = self.get_test_loc('ics/markdown-markdown-extensions/html_tidy.py') expected = [ u'Copyright (c) 2008 Waylan Limberg', ] check_detection(expected, test_file) @expectedFailure def test_ics_markdown_markdown_extensions_html_tidy_py_trail_url(self): test_file = self.get_test_loc('ics/markdown-markdown-extensions/html_tidy.py') expected = [ u'Copyright (c)2008 [Waylan Limberg](http://achinghead.com)', ] check_detection(expected, test_file) def test_ics_markdown_markdown_extensions_tables_py(self): test_file = self.get_test_loc('ics/markdown-markdown-extensions/tables.py') expected = [ u'Copyright 2009 - Waylan Limberg', ] check_detection(expected, test_file) @expectedFailure def test_ics_markdown_markdown_extensions_tables_py_trail_url(self): test_file = self.get_test_loc('ics/markdown-markdown-extensions/tables.py') expected = [ u'Copyright 2009 - [Waylan Limberg](http://achinghead.com)', ] check_detection(expected, test_file) def test_ics_markdown_markdown_extensions_toc_py(self): test_file = self.get_test_loc('ics/markdown-markdown-extensions/toc.py') expected = [ u'(c) 2008 Jack Miller', ] check_detection(expected, test_file) @expectedFailure def test_ics_markdown_markdown_extensions_toc_py_trail_url(self): test_file = self.get_test_loc('ics/markdown-markdown-extensions/toc.py') expected = [ u'(c) 2008 [Jack Miller](http://codezen.org)', ] check_detection(expected, test_file) def test_ics_mesa3d_notice(self): test_file = self.get_test_loc('ics/mesa3d/NOTICE') expected = [ u'Copyright (c) 1999-2008 Brian Paul', u'Copyright (c) 2008-1010 Intel Corporation', u'Copyright (c) 2007-2010 VMware, Inc.', u'Copyright (c) 2010 Luca Barbieri', u'Copyright (c) 2006 Alexander Chemeris', u'Copyright 2007,2010,2011 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_mesa3d_docs_license_html(self): test_file = self.get_test_loc('ics/mesa3d-docs/license.html') expected = [ u'copyrighted by Mark Kilgard', u'Copyright (c) 1999-2007 Brian Paul', ] check_detection(expected, test_file) def test_ics_mesa3d_docs_subset_a_html(self): test_file = self.get_test_loc('ics/mesa3d-docs/subset-A.html') expected = [ u'Copyright (c) 2002-2003 by Tungsten Graphics, Inc., Cedar Park, Texas.', ] check_detection(expected, test_file) def test_ics_mesa3d_include_c99_inttypes_h(self): test_file = self.get_test_loc('ics/mesa3d-include-c99/inttypes.h') expected = [ u'Copyright (c) 2006 Alexander Chemeris', ] check_detection(expected, test_file) def test_ics_mesa3d_include_c99_stdbool_h(self): test_file = self.get_test_loc('ics/mesa3d-include-c99/stdbool.h') expected = [ u'Copyright 2007-2010 VMware, Inc.', ] check_detection(expected, test_file) def test_ics_mesa3d_include_c99_stdint_h(self): test_file = self.get_test_loc('ics/mesa3d-include-c99/stdint.h') expected = [ u'Copyright (c) 2006-2008 Alexander Chemeris', ] check_detection(expected, test_file) def test_ics_mesa3d_include_pixelflinger2_pixelflinger2_interface_h(self): test_file = self.get_test_loc('ics/mesa3d-include-pixelflinger2/pixelflinger2_interface.h') expected = [ u'Copyright 2010, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_mesa3d_src_glsl_ast_h(self): test_file = self.get_test_loc('ics/mesa3d-src-glsl/ast.h') expected = [ u'Copyright (c) 2009 Intel Corporation', ] check_detection(expected, test_file) def test_ics_mesa3d_src_glsl_ast_expr_cpp(self): test_file = self.get_test_loc('ics/mesa3d-src-glsl/ast_expr.cpp') expected = [ u'Copyright (c) 2010 Intel Corporation', ] check_detection(expected, test_file) def test_ics_mesa3d_src_glsl_glsl_compiler_cpp(self): test_file = self.get_test_loc('ics/mesa3d-src-glsl/glsl_compiler.cpp') expected = [ u'Copyright (c) 2008, 2009 Intel Corporation', ] check_detection(expected, test_file) def test_ics_mesa3d_src_glsl_glsl_parser_cpp(self): test_file = self.get_test_loc('ics/mesa3d-src-glsl/glsl_parser.cpp') expected = [ u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009, 2010 Free Software Foundation, Inc.', u'Copyright (c) 2008, 2009 Intel Corporation', ] check_detection(expected, test_file) def test_ics_mesa3d_src_glsl_glsl_parser_h(self): test_file = self.get_test_loc('ics/mesa3d-src-glsl/glsl_parser.h') expected = [ u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009, 2010 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_mesa3d_src_glsl_ir_to_llvm_cpp(self): test_file = self.get_test_loc('ics/mesa3d-src-glsl/ir_to_llvm.cpp') expected = [ u'Copyright (c) 2005-2007 Brian Paul', u'Copyright (c) 2008 VMware, Inc.', u'Copyright (c) 2010 Intel Corporation', u'Copyright (c) 2010 Luca Barbieri', ] check_detection(expected, test_file) def test_ics_mesa3d_src_glsl_list_h(self): test_file = self.get_test_loc('ics/mesa3d-src-glsl/list.h') expected = [ u'Copyright (c) 2008, 2010 Intel Corporation', ] check_detection(expected, test_file) def test_ics_mesa3d_src_glsl_lower_jumps_cpp(self): test_file = self.get_test_loc('ics/mesa3d-src-glsl/lower_jumps.cpp') expected = [ u'Copyright (c) 2010 Luca Barbieri', ] check_detection(expected, test_file) def test_ics_mesa3d_src_glsl_program_h(self): test_file = self.get_test_loc('ics/mesa3d-src-glsl/program.h') expected = [ u'Copyright (c) 1999-2008 Brian Paul', u'Copyright (c) 2009 VMware, Inc.', u'Copyright (c) 2010 Intel Corporation', ] check_detection(expected, test_file) def test_ics_mesa3d_src_glsl_strtod_c(self): test_file = self.get_test_loc('ics/mesa3d-src-glsl/strtod.c') expected = [ u'Copyright 2010 VMware, Inc.', ] check_detection(expected, test_file) def test_ics_mesa3d_src_glsl_glcpp_glcpp_lex_c(self): test_file = self.get_test_loc('ics/mesa3d-src-glsl-glcpp/glcpp-lex.c') expected = [ u'Copyright (c) 2010 Intel Corporation', ] check_detection(expected, test_file) def test_ics_mesa3d_src_glsl_glcpp_glcpp_parse_c(self): test_file = self.get_test_loc('ics/mesa3d-src-glsl-glcpp/glcpp-parse.c') expected = [ u'Copyright (c) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009, 2010 Free Software Foundation, Inc.', u'Copyright (c) 2010 Intel Corporation', ] check_detection(expected, test_file) def test_ics_mesa3d_src_glsl_glcpp_makefile_am(self): test_file = self.get_test_loc('ics/mesa3d-src-glsl-glcpp/Makefile.am') expected = [ u'Copyright (c) 2010 Intel Corporation', ] check_detection(expected, test_file) def test_ics_mesa3d_src_mesa_main_compiler_h(self): test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/compiler.h') expected = [ u'Copyright (c) 1999-2008 Brian Paul', u'Copyright (c) 2009 VMware, Inc.', ] check_detection(expected, test_file) def test_ics_mesa3d_src_mesa_main_config_h(self): test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/config.h') expected = [ u'Copyright (c) 1999-2007 Brian Paul', u'Copyright (c) 2008 VMware, Inc.', ] check_detection(expected, test_file) def test_ics_mesa3d_src_mesa_main_core_h(self): test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/core.h') expected = [ u'Copyright (c) 2010 LunarG Inc.', ] check_detection(expected, test_file) def test_ics_mesa3d_src_mesa_main_debug_h(self): test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/debug.h') expected = [ u'Copyright (c) 1999-2004 Brian Paul', ] check_detection(expected, test_file) def test_ics_mesa3d_src_mesa_main_get_h(self): test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/get.h') expected = [ u'Copyright (c) 1999-2001 Brian Paul', ] check_detection(expected, test_file) def test_ics_mesa3d_src_mesa_main_glheader_h(self): test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/glheader.h') expected = [ u'Copyright (c) 1999-2008 Brian Paul', ] check_detection(expected, test_file) def test_ics_mesa3d_src_mesa_main_hash_h(self): test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/hash.h') expected = [ u'Copyright (c) 1999-2006 Brian Paul', ] check_detection(expected, test_file) def test_ics_mesa3d_src_mesa_main_shaderobj_h(self): test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/shaderobj.h') expected = [ u'Copyright (c) 2004-2007 Brian Paul', ] check_detection(expected, test_file) def test_ics_mesa3d_src_mesa_main_simple_list_h(self): test_file = self.get_test_loc('ics/mesa3d-src-mesa-main/simple_list.h') expected = [ u'(c) 1997, Keith Whitwell', u'Copyright (c) 1999-2001 Brian Paul', ] check_detection(expected, test_file) def test_ics_mesa3d_src_mesa_program_hash_table_c(self): test_file = self.get_test_loc('ics/mesa3d-src-mesa-program/hash_table.c') expected = [ u'Copyright (c) 2008 Intel Corporation', ] check_detection(expected, test_file) def test_ics_mesa3d_src_mesa_program_prog_statevars_h(self): test_file = self.get_test_loc('ics/mesa3d-src-mesa-program/prog_statevars.h') expected = [ u'Copyright (c) 1999-2007 Brian Paul', ] check_detection(expected, test_file) def test_ics_mesa3d_src_pixelflinger2_pixelflinger2_cpp(self): test_file = self.get_test_loc('ics/mesa3d-src-pixelflinger2/pixelflinger2.cpp') expected = [ u'Copyright 2010, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_mesa3d_test_egl_cpp(self): test_file = self.get_test_loc('ics/mesa3d-test/egl.cpp') expected = [ u'Copyright 2007 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_mesa3d_test_m_matrix_c(self): test_file = self.get_test_loc('ics/mesa3d-test/m_matrix.c') expected = [ u'Copyright (c) 1999-2005 Brian Paul', ] check_detection(expected, test_file) def test_ics_mesa3d_test_m_matrix_h(self): test_file = self.get_test_loc('ics/mesa3d-test/m_matrix.h') expected = [ u'Copyright (c) 1999-2005 Brian Paul', ] check_detection(expected, test_file) def test_ics_mksh_android_mk(self): test_file = self.get_test_loc('ics/mksh/Android.mk') expected = [ u'Copyright (c) 2010 Thorsten Glaser <t.glaser@tarent.de>', ] check_detection(expected, test_file) def test_ics_mksh_mkshrc(self): test_file = self.get_test_loc('ics/mksh/mkshrc') expected = [ u'Copyright (c) 2010 Thorsten Glaser <t.glaser@tarent.de>', ] check_detection(expected, test_file) def test_ics_mksh_notice(self): test_file = self.get_test_loc('ics/mksh/NOTICE') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>', ] check_detection(expected, test_file) def test_ics_mksh_src_build_sh(self): test_file = self.get_test_loc('ics/mksh-src/Build.sh') expected = [ u'Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>', ] check_detection(expected, test_file) def test_ics_mksh_src_edit_c(self): test_file = self.get_test_loc('ics/mksh-src/edit.c') expected = [ u'Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>', ] check_detection(expected, test_file) def test_ics_mksh_src_funcs_c(self): test_file = self.get_test_loc('ics/mksh-src/funcs.c') expected = [ u'Copyright (c) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>', ] check_detection(expected, test_file) def test_ics_mksh_src_jobs_c(self): test_file = self.get_test_loc('ics/mksh-src/jobs.c') expected = [ u'Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Thorsten Glaser <tg@mirbsd.org>', ] check_detection(expected, test_file) def test_ics_mksh_src_lalloc_c(self): test_file = self.get_test_loc('ics/mksh-src/lalloc.c') expected = [ u'Copyright (c) 2009 Thorsten Glaser <tg@mirbsd.org>', ] check_detection(expected, test_file) def test_ics_mksh_src_sh_h(self): test_file = self.get_test_loc('ics/mksh-src/sh.h') expected = [ u'Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Thorsten Glaser <tg@mirbsd.org>', ] check_detection(expected, test_file) def test_ics_mtpd_l2tp_c(self): test_file = self.get_test_loc('ics/mtpd/l2tp.c') expected = [ u'Copyright (c) 2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_mtpd_notice(self): test_file = self.get_test_loc('ics/mtpd/NOTICE') expected = [ u'Copyright (c) 2009, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_netperf_module_license_hp(self): test_file = self.get_test_loc('ics/netperf/MODULE_LICENSE_HP') expected = [ u'Copyright (c) 1993 Hewlett-Packard Company', ] check_detection(expected, test_file) def test_ics_netperf_netcpu_kstat10_c(self): test_file = self.get_test_loc('ics/netperf/netcpu_kstat10.c') expected = [ u'(c) Copyright 2005-2007, Hewlett-Packard Company', ] check_detection(expected, test_file) def test_ics_netperf_netcpu_looper_c(self): test_file = self.get_test_loc('ics/netperf/netcpu_looper.c') expected = [ u'(c) Copyright 2005-2007. version 2.4.3', ] check_detection(expected, test_file) def test_ics_netperf_netcpu_none_c(self): test_file = self.get_test_loc('ics/netperf/netcpu_none.c') expected = [ u'(c) Copyright 2005, Hewlett-Packard Company', ] check_detection(expected, test_file) def test_ics_netperf_netcpu_procstat_c(self): test_file = self.get_test_loc('ics/netperf/netcpu_procstat.c') expected = [ u'(c) Copyright 2005-2007 version 2.4.3', ] check_detection(expected, test_file) def test_ics_netperf_netlib_c(self): test_file = self.get_test_loc('ics/netperf/netlib.c') expected = [ u'(c) Copyright 1993-2007 Hewlett-Packard Company.', ] check_detection(expected, test_file) def test_ics_netperf_netlib_h(self): test_file = self.get_test_loc('ics/netperf/netlib.h') expected = [ u'Copyright (c) 1993-2005 Hewlett-Packard Company', ] check_detection(expected, test_file) def test_ics_netperf_netperf_c(self): test_file = self.get_test_loc('ics/netperf/netperf.c') expected = [ u'Copyright (c) 1993-2007 Hewlett-Packard Company', u'(c) Copyright 1993-2007 Hewlett-Packard Company.', ] check_detection(expected, test_file) def test_ics_netperf_netserver_c(self): test_file = self.get_test_loc('ics/netperf/netserver.c') expected = [ u'Copyright (c) 1993-2007 Hewlett-Packard Company', u'(c) Copyright 1993-2007 Hewlett-Packard Co.', ] check_detection(expected, test_file) def test_ics_netperf_netsh_h(self): test_file = self.get_test_loc('ics/netperf/netsh.h') expected = [ u'Copyright (c) 1993,1995 Hewlett-Packard Company', ] check_detection(expected, test_file) def test_ics_netperf_nettest_bsd_c(self): test_file = self.get_test_loc('ics/netperf/nettest_bsd.c') expected = [ u'(c) Copyright 1993-2004 Hewlett-Packard Co.', ] check_detection(expected, test_file) def test_ics_netperf_nettest_bsd_h(self): test_file = self.get_test_loc('ics/netperf/nettest_bsd.h') expected = [ u'Copyright (c) 1993-2004 Hewlett-Packard Company', ] check_detection(expected, test_file) def test_ics_netperf_nettest_dlpi_c(self): test_file = self.get_test_loc('ics/netperf/nettest_dlpi.c') expected = [ u'(c) Copyright 1993,1995,2004 Hewlett-Packard Co.', ] check_detection(expected, test_file) def test_ics_netperf_nettest_dlpi_h(self): test_file = self.get_test_loc('ics/netperf/nettest_dlpi.h') expected = [ u'Copyright (c) 1993, Hewlett-Packard Company', ] check_detection(expected, test_file) def test_ics_netperf_nettest_sctp_c(self): test_file = self.get_test_loc('ics/netperf/nettest_sctp.c') expected = [ u'(c) Copyright 2005-2007 Hewlett-Packard Co.', ] check_detection(expected, test_file) def test_ics_netperf_nettest_sctp_h(self): test_file = self.get_test_loc('ics/netperf/nettest_sctp.h') expected = [ u'Copyright (c) 1993-2003 Hewlett-Packard Company', ] check_detection(expected, test_file) def test_ics_netperf_nettest_sdp_c(self): test_file = self.get_test_loc('ics/netperf/nettest_sdp.c') expected = [ u'(c) Copyright 2007 Hewlett-Packard Co.', ] check_detection(expected, test_file) def test_ics_netperf_nettest_sdp_h(self): test_file = self.get_test_loc('ics/netperf/nettest_sdp.h') expected = [ u'Copyright (c) 2007 Hewlett-Packard Company', ] check_detection(expected, test_file) def test_ics_netperf_nettest_unix_c(self): test_file = self.get_test_loc('ics/netperf/nettest_unix.c') expected = [ u'(c) Copyright 1994-2007 Hewlett-Packard Co.', ] check_detection(expected, test_file) def test_ics_netperf_nettest_xti_c(self): test_file = self.get_test_loc('ics/netperf/nettest_xti.c') expected = [ u'(c) Copyright 1995-2007 Hewlett-Packard Co.', ] check_detection(expected, test_file) def test_ics_netperf_nettest_xti_h(self): test_file = self.get_test_loc('ics/netperf/nettest_xti.h') expected = [ u'Copyright (c) 1995,2004 Hewlett-Packard Company', ] check_detection(expected, test_file) def test_ics_neven_facedetector_jni_cpp(self): test_file = self.get_test_loc('ics/neven/FaceDetector_jni.cpp') expected = [ u'Copyright (c) 2006 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_neven_notice(self): test_file = self.get_test_loc('ics/neven/NOTICE') expected = [ u'Copyright (c) 2008 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_nist_sip_java_gov_nist_core_multimap_java(self): test_file = self.get_test_loc('ics/nist-sip-java-gov-nist-core/MultiMap.java') expected = [ u'Copyright 1999-2004 The Apache Software Foundation', ] check_detection(expected, test_file) def test_ics_oauth_core_src_main_java_net_oauth_consumerproperties_java(self): test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth/ConsumerProperties.java') expected = [ u'Copyright 2007 Netflix, Inc.', ] check_detection(expected, test_file) def test_ics_oauth_core_src_main_java_net_oauth_oauthexception_java(self): test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth/OAuthException.java') expected = [ u'Copyright 2008 Google, Inc.', ] check_detection(expected, test_file) def test_ics_oauth_core_src_main_java_net_oauth_oauthmessage_java(self): test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth/OAuthMessage.java') expected = [ u'Copyright 2007, 2008 Netflix, Inc.', ] check_detection(expected, test_file) def test_ics_oauth_core_src_main_java_net_oauth_client_oauthresponsemessage_java(self): test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth-client/OAuthResponseMessage.java') expected = [ u'Copyright 2008 Netflix, Inc.', ] check_detection(expected, test_file) def test_ics_oauth_core_src_main_java_net_oauth_client_httpclient4_httpclient4_java(self): test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth-client-httpclient4/HttpClient4.java') expected = [ u'Copyright 2008 Sean Sullivan', ] check_detection(expected, test_file) def test_ics_oauth_core_src_main_java_net_oauth_signature_rsa_sha1_java(self): test_file = self.get_test_loc('ics/oauth-core-src-main-java-net-oauth-signature/RSA_SHA1.java') expected = [ u'Copyright 2007 Google, Inc.', ] check_detection(expected, test_file) def test_ics_opencv_cvjni_cpp(self): test_file = self.get_test_loc('ics/opencv/cvjni.cpp') expected = [ u'Copyright (c) 2006-2009 SIProp Project', ] check_detection(expected, test_file) def test_ics_opencv_license_opencv(self): test_file = self.get_test_loc('ics/opencv/LICENSE_OpenCV') expected = [ u'Copyright (c) 2000-2006, Intel Corporation', ] check_detection(expected, test_file) def test_ics_opencv_notice(self): test_file = self.get_test_loc('ics/opencv/NOTICE') expected = [ u'Copyright (c) 2000-2006, Intel Corporation', u'Copyright (c) 2006-2009 SIProp Project', u'Copyright (c) 1992, 1993 The Regents of the University of California.', u'Copyright (c) 2008, Liu Liu', u'Copyright (c) 2008, Google', u'Copyright (c) 1992, 1993 The Regents of the University of California.', u'Copyright (c) 2002, MD-Mathematische Dienste GmbH Im Defdahl', u'Copyright (c) 2000-2003 Chih-Chung Chang and Chih-Jen Lin', u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 2002, Intel Corporation', u'Copyright (c) 2008, Xavier Delacour', u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 2008, Nils Hasler', u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 1978-1999 Ken Turkowski. <turk@computer.org>', u'Copyright (c) 1981-1999 Ken Turkowski. <turk@computer.org>', u'Copyright (c) 1998 Yossi Rubner Computer Science Department, Stanford University', u'Copyright (c) 2006 Simon Perreault', u'Copyright (c) 1995 Intel Corporation.', ] check_detection(expected, test_file) def test_ics_opencv_cvaux_src_cv3dtracker_cpp(self): test_file = self.get_test_loc('ics/opencv-cvaux-src/cv3dtracker.cpp') expected = [ u'Copyright (c) 2002, Intel Corporation', ] check_detection(expected, test_file) def test_ics_opencv_cvaux_src_cvdpstereo_cpp(self): test_file = self.get_test_loc('ics/opencv-cvaux-src/cvdpstereo.cpp') expected = [ u'Copyright (c) 2000, Intel Corporation', ] check_detection(expected, test_file) def test_ics_opencv_cv_include_cv_h(self): test_file = self.get_test_loc('ics/opencv-cv-include/cv.h') expected = [ u'Copyright (c) 2000, Intel Corporation', ] check_detection(expected, test_file) def test_ics_opencv_cv_src_cvkdtree_hpp(self): test_file = self.get_test_loc('ics/opencv-cv-src/_cvkdtree.hpp') expected = [ u'Copyright (c) 2008, Xavier Delacour', ] check_detection(expected, test_file) def test_ics_opencv_cv_src_cvcolor_cpp(self): test_file = self.get_test_loc('ics/opencv-cv-src/cvcolor.cpp') expected = [ u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 2002, MD-Mathematische Dienste GmbH Im Defdahl', ] check_detection(expected, test_file) def test_ics_opencv_cv_src_cvdistransform_cpp(self): test_file = self.get_test_loc('ics/opencv-cv-src/cvdistransform.cpp') expected = [ u'Copyright (c) 2000, Intel Corporation', u'(c) 2006 by Jay Stavinzky.', ] check_detection(expected, test_file) def test_ics_opencv_cv_src_cvemd_cpp(self): test_file = self.get_test_loc('ics/opencv-cv-src/cvemd.cpp') expected = [ u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 1998 Yossi Rubner Computer Science Department, Stanford University', ] check_detection(expected, test_file) @expectedFailure def test_ics_opencv_cv_src_cvemd_cpp_trail_email(self): test_file = self.get_test_loc('ics/opencv-cv-src/cvemd.cpp') expected = [ u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 1998 Yossi Rubner Computer Science Department, Stanford University E-Mail: rubner@cs.stanford.edu URL: http://vision.stanford.edu/~rubner', ] check_detection(expected, test_file) def test_ics_opencv_cv_src_cvkdtree_cpp(self): test_file = self.get_test_loc('ics/opencv-cv-src/cvkdtree.cpp') expected = [ u'Copyright (c) 2008, Xavier Delacour', ] check_detection(expected, test_file) def test_ics_opencv_cv_src_cvsmooth_cpp(self): test_file = self.get_test_loc('ics/opencv-cv-src/cvsmooth.cpp') expected = [ u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 2006 Simon Perreault', ] check_detection(expected, test_file) def test_ics_opencv_cv_src_cvsurf_cpp(self): test_file = self.get_test_loc('ics/opencv-cv-src/cvsurf.cpp') expected = [ u'Copyright (c) 2008, Liu Liu', ] check_detection(expected, test_file) def test_ics_opencv_cxcore_include_cvwimage_h(self): test_file = self.get_test_loc('ics/opencv-cxcore-include/cvwimage.h') expected = [ u'Copyright (c) 2008, Google', ] check_detection(expected, test_file) def test_ics_opencv_cxcore_include_cxmisc_h(self): test_file = self.get_test_loc('ics/opencv-cxcore-include/cxmisc.h') expected = [ u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 1992, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_opencv_cxcore_include_cxtypes_h(self): test_file = self.get_test_loc('ics/opencv-cxcore-include/cxtypes.h') expected = [ u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 1995 Intel Corporation.', ] check_detection(expected, test_file) def test_ics_opencv_cxcore_src_cxdatastructs_cpp(self): test_file = self.get_test_loc('ics/opencv-cxcore-src/cxdatastructs.cpp') expected = [ u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 1992, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_opencv_cxcore_src_cxutils_cpp(self): test_file = self.get_test_loc('ics/opencv-cxcore-src/cxutils.cpp') expected = [ u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 1978-1999 Ken Turkowski. <turk@computer.org>', u'Copyright (c) 1981-1999 Ken Turkowski. <turk@computer.org>', ] check_detection(expected, test_file) def test_ics_opencv_ml_src_mlsvm_cpp(self): test_file = self.get_test_loc('ics/opencv-ml-src/mlsvm.cpp') expected = [ u'Copyright (c) 2000, Intel Corporation', u'Copyright (c) 2000-2003 Chih-Chung Chang and Chih-Jen Lin', ] check_detection(expected, test_file) def test_ics_opencv_otherlibs_highgui_cvcap_socket_cpp(self): test_file = self.get_test_loc('ics/opencv-otherlibs-highgui/cvcap_socket.cpp') expected = [ u'Copyright (c) 2008, Nils Hasler', ] check_detection(expected, test_file) def test_ics_opencv_otherlibs_highgui_grfmt_png_cpp(self): test_file = self.get_test_loc('ics/opencv-otherlibs-highgui/grfmt_png.cpp') expected = [ u'Copyright (c) 2000, Intel Corporation', u'(Copyright (c) 1999-2001 MIYASAKA Masaru)', ] check_detection(expected, test_file) def test_ics_openssl_e_os_h(self): test_file = self.get_test_loc('ics/openssl/e_os.h') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', ] check_detection(expected, test_file) def test_ics_openssl_e_os2_h(self): test_file = self.get_test_loc('ics/openssl/e_os2.h') expected = [ u'Copyright (c) 1998-2000 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_notice(self): test_file = self.get_test_loc('ics/openssl/NOTICE') expected = [ u'Copyright (c) 1998-2011 The OpenSSL Project.', u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', ] check_detection(expected, test_file) def test_ics_openssl_apps_app_rand_c(self): test_file = self.get_test_loc('ics/openssl-apps/app_rand.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2000 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_apps_apps_c(self): test_file = self.get_test_loc('ics/openssl-apps/apps.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2001 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_apps_apps_h(self): test_file = self.get_test_loc('ics/openssl-apps/apps.h') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2001 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_apps_asn1pars_c(self): test_file = self.get_test_loc('ics/openssl-apps/asn1pars.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', ] check_detection(expected, test_file) def test_ics_openssl_apps_cms_c(self): test_file = self.get_test_loc('ics/openssl-apps/cms.c') expected = [ u'Copyright (c) 2008 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_apps_ec_c(self): test_file = self.get_test_loc('ics/openssl-apps/ec.c') expected = [ u'Copyright (c) 1998-2005 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_apps_ecparam_c(self): test_file = self.get_test_loc('ics/openssl-apps/ecparam.c') expected = [ u'Copyright (c) 1998-2005 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_apps_engine_c(self): test_file = self.get_test_loc('ics/openssl-apps/engine.c') expected = [ u'Copyright (c) 2000 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_apps_genpkey_c(self): test_file = self.get_test_loc('ics/openssl-apps/genpkey.c') expected = [ u'Copyright (c) 2006 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_apps_nseq_c(self): test_file = self.get_test_loc('ics/openssl-apps/nseq.c') expected = [ u'Copyright (c) 1999 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_apps_openssl_c(self): test_file = self.get_test_loc('ics/openssl-apps/openssl.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2006 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_apps_pkcs12_c(self): test_file = self.get_test_loc('ics/openssl-apps/pkcs12.c') expected = [ u'Copyright (c) 1999-2006 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_apps_prime_c(self): test_file = self.get_test_loc('ics/openssl-apps/prime.c') expected = [ u'Copyright (c) 2004 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_apps_rand_c(self): test_file = self.get_test_loc('ics/openssl-apps/rand.c') expected = [ u'Copyright (c) 1998-2001 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_apps_s_client_c(self): test_file = self.get_test_loc('ics/openssl-apps/s_client.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2006 The OpenSSL Project.', u'Copyright 2005 Nokia.', ] check_detection(expected, test_file) def test_ics_openssl_apps_s_server_c(self): test_file = self.get_test_loc('ics/openssl-apps/s_server.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2006 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', u'Copyright 2005 Nokia.', ] check_detection(expected, test_file) def test_ics_openssl_apps_smime_c(self): test_file = self.get_test_loc('ics/openssl-apps/smime.c') expected = [ u'Copyright (c) 1999-2004 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_apps_speed_c(self): test_file = self.get_test_loc('ics/openssl-apps/speed.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_apps_timeouts_h(self): test_file = self.get_test_loc('ics/openssl-apps/timeouts.h') expected = [ u'Copyright (c) 1999-2005 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_cryptlib_c(self): test_file = self.get_test_loc('ics/openssl-crypto/cryptlib.c') expected = [ u'Copyright (c) 1998-2006 The OpenSSL Project.', u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_lpdir_nyi_c(self): test_file = self.get_test_loc('ics/openssl-crypto/LPdir_nyi.c') expected = [ u'Copyright (c) 2004, Richard Levitte <richard@levitte.org>', ] check_detection(expected, test_file) def test_ics_openssl_crypto_md32_common_h(self): test_file = self.get_test_loc('ics/openssl-crypto/md32_common.h') expected = [ u'Copyright (c) 1999-2007 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_mem_clr_c(self): test_file = self.get_test_loc('ics/openssl-crypto/mem_clr.c') expected = [ u'Copyright (c) 2001 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_o_str_c(self): test_file = self.get_test_loc('ics/openssl-crypto/o_str.c') expected = [ u'Copyright (c) 2003 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_aes_aes_h(self): test_file = self.get_test_loc('ics/openssl-crypto-aes/aes.h') expected = [ u'Copyright (c) 1998-2002 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_aes_aes_cfb_c(self): test_file = self.get_test_loc('ics/openssl-crypto-aes/aes_cfb.c') expected = [ u'Copyright (c) 2002-2006 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_asn1_a_sign_c(self): test_file = self.get_test_loc('ics/openssl-crypto-asn1/a_sign.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2003 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_asn1_asn_mime_c(self): test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn_mime.c') expected = [ u'Copyright (c) 1999-2008 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_asn1_asn_moid_c(self): test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn_moid.c') expected = [ u'Copyright (c) 2001-2004 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_asn1_asn1_err_c(self): test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn1_err.c') expected = [ u'Copyright (c) 1999-2009 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_asn1_asn1_gen_c(self): test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn1_gen.c') expected = [ u'Copyright (c) 2002 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_asn1_asn1t_h(self): test_file = self.get_test_loc('ics/openssl-crypto-asn1/asn1t.h') expected = [ u'Copyright (c) 2000-2005 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_asn1_tasn_dec_c(self): test_file = self.get_test_loc('ics/openssl-crypto-asn1/tasn_dec.c') expected = [ u'Copyright (c) 2000-2005 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_asn1_tasn_enc_c(self): test_file = self.get_test_loc('ics/openssl-crypto-asn1/tasn_enc.c') expected = [ u'Copyright (c) 2000-2004 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_asn1_tasn_prn_c(self): test_file = self.get_test_loc('ics/openssl-crypto-asn1/tasn_prn.c') expected = [ u'Copyright (c) 2000,2005 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_asn1_x_nx509_c(self): test_file = self.get_test_loc('ics/openssl-crypto-asn1/x_nx509.c') expected = [ u'Copyright (c) 2005 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_bf_bf_locl_h(self): test_file = self.get_test_loc('ics/openssl-crypto-bf/bf_locl.h') expected = [ u'Copyright (c) 1995-1997 Eric Young (eay@cryptsoft.com)', ] check_detection(expected, test_file) def test_ics_openssl_crypto_bf_copyright(self): test_file = self.get_test_loc('ics/openssl-crypto-bf/COPYRIGHT') expected = [ u'Copyright (c) 1995-1997 Eric Young (eay@cryptsoft.com)', ] check_detection(expected, test_file) def test_ics_openssl_crypto_bio_b_print_c(self): test_file = self.get_test_loc('ics/openssl-crypto-bio/b_print.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright Patrick Powell 1995', ] check_detection(expected, test_file) def test_ics_openssl_crypto_bio_bss_bio_c(self): test_file = self.get_test_loc('ics/openssl-crypto-bio/bss_bio.c') expected = [ u'Copyright (c) 1998-2003 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_bn_bn_h(self): test_file = self.get_test_loc('ics/openssl-crypto-bn/bn.h') expected = [ u'Copyright (c) 1995-1997 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2006 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_bn_bn_blind_c(self): test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_blind.c') expected = [ u'Copyright (c) 1998-2006 The OpenSSL Project.', u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', ] check_detection(expected, test_file) def test_ics_openssl_crypto_bn_bn_ctx_c(self): test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_ctx.c') expected = [ u'Copyright (c) 1998-2004 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_bn_bn_err_c(self): test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_err.c') expected = [ u'Copyright (c) 1999-2007 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_bn_bn_exp_c(self): test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_exp.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2005 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_bn_bn_gf2m_c(self): test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_gf2m.c') expected = [ u'Copyright 2002 Sun Microsystems, Inc.', u'Copyright (c) 1998-2002 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_bn_bn_lcl_h(self): test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_lcl.h') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2000 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_bn_bn_mod_c(self): test_file = self.get_test_loc('ics/openssl-crypto-bn/bn_mod.c') expected = [ u'Copyright (c) 1998-2000 The OpenSSL Project.', u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', ] check_detection(expected, test_file) def test_ics_openssl_crypto_des_read2pwd_c(self): test_file = self.get_test_loc('ics/openssl-crypto-des/read2pwd.c') expected = [ u'Copyright (c) 2001-2002 The OpenSSL Project.', u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', ] check_detection(expected, test_file) def test_ics_openssl_crypto_des_readme(self): test_file = self.get_test_loc('ics/openssl-crypto-des/README') expected = [ u'Copyright (c) 1997, Eric Young', ] check_detection(expected, test_file) def test_ics_openssl_crypto_des_rpc_des_h(self): test_file = self.get_test_loc('ics/openssl-crypto-des/rpc_des.h') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1986 by Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_des_asm_des_enc_m4(self): test_file = self.get_test_loc('ics/openssl-crypto-des-asm/des_enc.m4') expected = [ u'Copyright Svend Olaf Mikkelsen.', u'Copyright Eric A. Young.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_dsa_dsa_locl_h(self): test_file = self.get_test_loc('ics/openssl-crypto-dsa/dsa_locl.h') expected = [ u'Copyright (c) 2007 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ec_ec_h(self): test_file = self.get_test_loc('ics/openssl-crypto-ec/ec.h') expected = [ u'Copyright (c) 1998-2005 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ec_ec_asn1_c(self): test_file = self.get_test_loc('ics/openssl-crypto-ec/ec_asn1.c') expected = [ u'Copyright (c) 2000-2003 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ec_ec_curve_c(self): test_file = self.get_test_loc('ics/openssl-crypto-ec/ec_curve.c') expected = [ u'Copyright (c) 1998-2004 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ec_ec_mult_c(self): test_file = self.get_test_loc('ics/openssl-crypto-ec/ec_mult.c') expected = [ u'Copyright (c) 1998-2007 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ec_ec2_mult_c(self): test_file = self.get_test_loc('ics/openssl-crypto-ec/ec2_mult.c') expected = [ u'Copyright 2002 Sun Microsystems, Inc.', u'Copyright (c) 1998-2003 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ec_ec2_smpl_c(self): test_file = self.get_test_loc('ics/openssl-crypto-ec/ec2_smpl.c') expected = [ u'Copyright 2002 Sun Microsystems, Inc.', u'Copyright (c) 1998-2005 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ec_ecp_mont_c(self): test_file = self.get_test_loc('ics/openssl-crypto-ec/ecp_mont.c') expected = [ u'Copyright (c) 1998-2001 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ec_ecp_nist_c(self): test_file = self.get_test_loc('ics/openssl-crypto-ec/ecp_nist.c') expected = [ u'Copyright (c) 1998-2003 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ec_ecp_smpl_c(self): test_file = self.get_test_loc('ics/openssl-crypto-ec/ecp_smpl.c') expected = [ u'Copyright (c) 1998-2002 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ecdh_ecdh_h(self): test_file = self.get_test_loc('ics/openssl-crypto-ecdh/ecdh.h') expected = [ u'Copyright 2002 Sun Microsystems, Inc.', u'Copyright (c) 2000-2002 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ecdsa_ecdsatest_c(self): test_file = self.get_test_loc('ics/openssl-crypto-ecdsa/ecdsatest.c') expected = [ u'Copyright (c) 2000-2005 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ecdsa_ecs_asn1_c(self): test_file = self.get_test_loc('ics/openssl-crypto-ecdsa/ecs_asn1.c') expected = [ u'Copyright (c) 2000-2002 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_engine_eng_all_c(self): test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_all.c') expected = [ u'Copyright (c) 2000-2001 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_engine_eng_cryptodev_c(self): test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_cryptodev.c') expected = [ u'Copyright (c) 2002 Bob Beck <beck@openbsd.org>', u'Copyright (c) 2002 Theo de Raadt', u'Copyright (c) 2002 Markus Friedl', ] check_detection(expected, test_file) def test_ics_openssl_crypto_engine_eng_dyn_c(self): test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_dyn.c') expected = [ u'Copyright (c) 1999-2001 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_engine_eng_err_c(self): test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_err.c') expected = [ u'Copyright (c) 1999-2010 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_engine_eng_fat_c(self): test_file = self.get_test_loc('ics/openssl-crypto-engine/eng_fat.c') expected = [ u'Copyright (c) 1999-2001 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_engine_engine_h(self): test_file = self.get_test_loc('ics/openssl-crypto-engine/engine.h') expected = [ u'Copyright (c) 1999-2004 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_evp_m_ecdsa_c(self): test_file = self.get_test_loc('ics/openssl-crypto-evp/m_ecdsa.c') expected = [ u'Copyright (c) 1998-2002 The OpenSSL Project.', u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', ] check_detection(expected, test_file) def test_ics_openssl_crypto_evp_m_sigver_c(self): test_file = self.get_test_loc('ics/openssl-crypto-evp/m_sigver.c') expected = [ u'Copyright (c) 2006,2007 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_pem_pem_all_c(self): test_file = self.get_test_loc('ics/openssl-crypto-pem/pem_all.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2002 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_pkcs12_p12_crt_c(self): test_file = self.get_test_loc('ics/openssl-crypto-pkcs12/p12_crt.c') expected = [ u'Copyright (c) 1999-2002 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_rand_rand_win_c(self): test_file = self.get_test_loc('ics/openssl-crypto-rand/rand_win.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2000 The OpenSSL Project.', u'(c) Copyright Microsoft Corp. 1993.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_rc4_asm_rc4_ia64_pl(self): test_file = self.get_test_loc('ics/openssl-crypto-rc4-asm/rc4-ia64.pl') expected = [ u'Copyright (c) 2005 Hewlett-Packard Development Company', u'(c) 2005 Hewlett-Packard Development Company', ] check_detection(expected, test_file) @expectedFailure def test_ics_openssl_crypto_rc4_asm_rc4_ia64_pl_trail_abbrev_lead_copy_trail_abbrev(self): test_file = self.get_test_loc('ics/openssl-crypto-rc4-asm/rc4-ia64.pl') expected = [ u'Copyright (c) 2005 Hewlett-Packard Development Company, L.P.', u'Copyright (c) 2005 Hewlett-Packard Development Company, L.P.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ui_ui_compat_c(self): test_file = self.get_test_loc('ics/openssl-crypto-ui/ui_compat.c') expected = [ u'Copyright (c) 2001-2002 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_ui_ui_openssl_c(self): test_file = self.get_test_loc('ics/openssl-crypto-ui/ui_openssl.c') expected = [ u'Copyright (c) 2001 The OpenSSL Project.', u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', ] check_detection(expected, test_file) def test_ics_openssl_crypto_x509_x509_h(self): test_file = self.get_test_loc('ics/openssl-crypto-x509/x509.h') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_x509v3_v3_alt_c(self): test_file = self.get_test_loc('ics/openssl-crypto-x509v3/v3_alt.c') expected = [ u'Copyright (c) 1999-2003 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_crypto_x509v3_v3_pci_c(self): test_file = self.get_test_loc('ics/openssl-crypto-x509v3/v3_pci.c') expected = [ u'Copyright (c) 2004 Kungliga Tekniska Hogskolan Royal Institute of Technology', ] check_detection(expected, test_file) @expectedFailure def test_ics_openssl_crypto_x509v3_v3_pci_c_trail_place(self): test_file = self.get_test_loc('ics/openssl-crypto-x509v3/v3_pci.c') expected = [ u'Copyright (c) 2004 Kungliga Tekniska Hogskolan (Royal Institute of Technology, Stockholm, Sweden)', ] check_detection(expected, test_file) def test_ics_openssl_include_openssl_modes_h(self): test_file = self.get_test_loc('ics/openssl-include-openssl/modes.h') expected = [ u'Copyright (c) 2008 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_include_openssl_ssl_h(self): test_file = self.get_test_loc('ics/openssl-include-openssl/ssl.h') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2007 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', u'Copyright 2005 Nokia.', ] check_detection(expected, test_file) def test_ics_openssl_include_openssl_ssl3_h(self): test_file = self.get_test_loc('ics/openssl-include-openssl/ssl3.h') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2002 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_include_openssl_tls1_h(self): test_file = self.get_test_loc('ics/openssl-include-openssl/tls1.h') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2006 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', u'Copyright 2005 Nokia.', ] check_detection(expected, test_file) def test_ics_openssl_ssl_d1_both_c(self): test_file = self.get_test_loc('ics/openssl-ssl/d1_both.c') expected = [ u'Copyright (c) 1998-2005 The OpenSSL Project.', u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', ] check_detection(expected, test_file) def test_ics_openssl_ssl_d1_clnt_c(self): test_file = self.get_test_loc('ics/openssl-ssl/d1_clnt.c') expected = [ u'Copyright (c) 1999-2007 The OpenSSL Project.', u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', ] check_detection(expected, test_file) def test_ics_openssl_ssl_s2_lib_c(self): test_file = self.get_test_loc('ics/openssl-ssl/s2_lib.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2007 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_openssl_ssl_s3_enc_c(self): test_file = self.get_test_loc('ics/openssl-ssl/s3_enc.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2007 The OpenSSL Project.', u'Copyright 2005 Nokia.', ] check_detection(expected, test_file) def test_ics_openssl_ssl_s3_lib_c(self): test_file = self.get_test_loc('ics/openssl-ssl/s3_lib.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2007 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', u'Copyright 2005 Nokia.', ] check_detection(expected, test_file) def test_ics_openssl_ssl_ssl_asn1_c(self): test_file = self.get_test_loc('ics/openssl-ssl/ssl_asn1.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright 2005 Nokia.', ] check_detection(expected, test_file) def test_ics_openssl_ssl_ssl_cert_c(self): test_file = self.get_test_loc('ics/openssl-ssl/ssl_cert.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2007 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_openssl_ssl_ssltest_c(self): test_file = self.get_test_loc('ics/openssl-ssl/ssltest.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2000 The OpenSSL Project.', u'Copyright 2002 Sun Microsystems, Inc.', u'Copyright 2005 Nokia.', ] check_detection(expected, test_file) def test_ics_openssl_ssl_t1_reneg_c(self): test_file = self.get_test_loc('ics/openssl-ssl/t1_reneg.c') expected = [ u'Copyright (c) 1995-1998 Eric Young (eay@cryptsoft.com)', u'Copyright (c) 1998-2009 The OpenSSL Project.', ] check_detection(expected, test_file) def test_ics_oprofile_changelog_2002(self): test_file = self.get_test_loc('ics/oprofile/ChangeLog-2002') expected = [ u'copyright for 2002', ] check_detection(expected, test_file) def test_ics_oprofile_configure_in(self): test_file = self.get_test_loc('ics/oprofile/configure.in') expected = [ u'Copyright 1999 Olaf Titz <olaf@bigred.inka.de>', ] check_detection(expected, test_file) def test_ics_oprofile_popt_h(self): test_file = self.get_test_loc('ics/oprofile/popt.h') expected = [ u'(c) 1998-2000 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_oprofile_agents_jvmpi_jvmpi_oprofile_cpp(self): test_file = self.get_test_loc('ics/oprofile-agents-jvmpi/jvmpi_oprofile.cpp') expected = [ u'Copyright 2007 OProfile authors', u'Copyright IBM Corporation 2007', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_init_c(self): test_file = self.get_test_loc('ics/oprofile-daemon/init.c') expected = [ u'Copyright 2002 OProfile authors', u'Copyright (c) 2005 Hewlett-Packard Co.', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_anon_c(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_anon.c') expected = [ u'Copyright 2005 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_cookie_c(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_cookie.c') expected = [ u'Copyright 2002, 2005 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_events_c(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_events.c') expected = [ u'Copyright 2002, 2003 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_extended_c(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_extended.c') expected = [ u'Copyright 2007-2009 OProfile authors', u'Copyright (c) 2009 Advanced Micro Devices, Inc.', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_ibs_c(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_ibs.c') expected = [ u'Copyright 2007-2010 OProfile authors', u'Copyright (c) 2008 Advanced Micro Devices, Inc.', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_ibs_h(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_ibs.h') expected = [ u'Copyright 2008-2010 OProfile authors', u'Copyright (c) 2008 Advanced Micro Devices, Inc.', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_ibs_trans_c(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_ibs_trans.c') expected = [ u'Copyright 2008 - 2010 OProfile authors', u'Copyright (c) 2008 Advanced Micro Devices, Inc.', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_ibs_trans_h(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_ibs_trans.h') expected = [ u'Copyright 2008 OProfile authors', u'Copyright (c) 2008 Advanced Micro Devices, Inc.', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_mangling_c(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_mangling.c') expected = [ u'Copyright 2002 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_perfmon_c(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_perfmon.c') expected = [ u'Copyright 2003 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_pipe_c(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_pipe.c') expected = [ u'Copyright 2008 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_spu_c(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_spu.c') expected = [ u'Copyright 2007 OProfile authors', u'(c) Copyright IBM Corporation 2007', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_trans_c(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_trans.c') expected = [ u'Copyright 2002 OProfile authors', u'Copyright (c) 2005 Hewlett-Packard Co.', u'(c) Copyright IBM Corporation 2007', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_opd_trans_h(self): test_file = self.get_test_loc('ics/oprofile-daemon/opd_trans.h') expected = [ u'Copyright 2002 OProfile authors', u'(c) Copyright IBM Corporation 2007', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_oprofiled_c(self): test_file = self.get_test_loc('ics/oprofile-daemon/oprofiled.c') expected = [ u'Copyright 2002, 2003 OProfile authors', u'Copyright (c) 2005 Hewlett-Packard Co.', ] check_detection(expected, test_file) def test_ics_oprofile_daemon_liblegacy_p_module_h(self): test_file = self.get_test_loc('ics/oprofile-daemon-liblegacy/p_module.h') expected = [ u'Copyright 1996, 1997 Linux International.', ] check_detection(expected, test_file) def test_ics_oprofile_doc_oprofile_1_in(self): test_file = self.get_test_loc('ics/oprofile-doc/oprofile.1.in') expected = [ u'Copyright (c) 1998-2004 University of Manchester', # MISSED , UK, John Levon', ] check_detection(expected, test_file) @expectedFailure def test_ics_oprofile_doc_oprofile_1_in_trail_other(self): test_file = self.get_test_loc('ics/oprofile-doc/oprofile.1.in') expected = [ u'Copyright (C) 1998-2004 University of Manchester, UK, John Levon, and others.', ] check_detection(expected, test_file) def test_ics_oprofile_events_ppc64_970mp_events(self): test_file = self.get_test_loc('ics/oprofile-events-ppc64-970MP/events') expected = [ u'Copyright OProfile authors', u'Copyright (c) International Business Machines, 2007.', ] check_detection(expected, test_file) def test_ics_oprofile_events_ppc64_970mp_unit_masks(self): test_file = self.get_test_loc('ics/oprofile-events-ppc64-970MP/unit_masks') expected = [ u'Copyright OProfile authors', u'Copyright (c) International Business Machines, 2006.', ] check_detection(expected, test_file) def test_ics_oprofile_events_ppc64_cell_be_events(self): test_file = self.get_test_loc('ics/oprofile-events-ppc64-cell-be/events') expected = [ u'Copyright OProfile authors', u'(c) COPYRIGHT International Business Machines Corp. 2006', ] check_detection(expected, test_file) def test_ics_oprofile_events_ppc64_ibm_compat_v1_events(self): test_file = self.get_test_loc('ics/oprofile-events-ppc64-ibm-compat-v1/events') expected = [ u'Copyright OProfile authors', u'Copyright (c) International Business Machines, 2009.', ] check_detection(expected, test_file) def test_ics_oprofile_events_x86_64_family10_events_extra_contributed(self): test_file = self.get_test_loc('ics/oprofile-events-x86-64-family10/events') expected = [ u'Copyright OProfile authors', u'Copyright (c) 2006-2008 Advanced Micro Devices', ] check_detection(expected, test_file) def test_ics_oprofile_events_x86_64_family11h_unit_masks(self): test_file = self.get_test_loc('ics/oprofile-events-x86-64-family11h/unit_masks') expected = [ u'Copyright OProfile authors', u'Copyright (c) Advanced Micro Devices, 2006-2008', ] check_detection(expected, test_file) def test_ics_oprofile_events_x86_64_family12h_events_extra_contributed(self): test_file = self.get_test_loc('ics/oprofile-events-x86-64-family12h/events') expected = [ u'Copyright OProfile authors', u'Copyright (c) 2006-2010 Advanced Micro Devices', ] check_detection(expected, test_file) def test_ics_oprofile_include_sstream(self): test_file = self.get_test_loc('ics/oprofile-include/sstream') expected = [ u'Copyright (c) 2000 Free Software Foundation', ] check_detection(expected, test_file) def test_ics_oprofile_libop_op_hw_specific_h(self): test_file = self.get_test_loc('ics/oprofile-libop/op_hw_specific.h') expected = [ u'Copyright 2008 Intel Corporation', ] check_detection(expected, test_file) def test_ics_oprofile_libpopt_findme_c(self): test_file = self.get_test_loc('ics/oprofile-libpopt/findme.c') expected = [ u'(c) 1998-2002 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_oprofile_libpp_callgraph_container_cpp(self): test_file = self.get_test_loc('ics/oprofile-libpp/callgraph_container.cpp') expected = [ u'Copyright 2004 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_libpp_format_output_h(self): test_file = self.get_test_loc('ics/oprofile-libpp/format_output.h') expected = [ u'Copyright 2002 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_libpp_populate_cpp(self): test_file = self.get_test_loc('ics/oprofile-libpp/populate.cpp') expected = [ u'Copyright 2003 OProfile authors', u'(c) Copyright IBM Corporation 2007', ] check_detection(expected, test_file) def test_ics_oprofile_libpp_symbol_cpp(self): test_file = self.get_test_loc('ics/oprofile-libpp/symbol.cpp') expected = [ u'Copyright 2002, 2004 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_libpp_xml_utils_cpp(self): test_file = self.get_test_loc('ics/oprofile-libpp/xml_utils.cpp') expected = [ u'Copyright 2006 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_libregex_demangle_java_symbol_cpp(self): test_file = self.get_test_loc('ics/oprofile-libregex/demangle_java_symbol.cpp') expected = [ u'Copyright 2007 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_libutil_sparse_array_h(self): test_file = self.get_test_loc('ics/oprofile-libutil++/sparse_array.h') expected = [ u'Copyright 2007 OProfile authors', u'Copyright (c) International Business Machines, 2007.', ] check_detection(expected, test_file) def test_ics_oprofile_libutil_string_manip_cpp(self): test_file = self.get_test_loc('ics/oprofile-libutil++/string_manip.cpp') expected = [ u'Copyright 2002 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_libutil_utility_h(self): test_file = self.get_test_loc('ics/oprofile-libutil++/utility.h') expected = [ u'Copyright 2002 OProfile authors', u'(c) Copyright boost.org 1999.', ] check_detection(expected, test_file) def test_ics_oprofile_module_ia64_op_pmu_c(self): test_file = self.get_test_loc('ics/oprofile-module-ia64/op_pmu.c') expected = [ u'Copyright 2002 OProfile authors', u'Copyright (c) 1999 Ganesh Venkitachalam <venkitac@us.ibm.com>', u'Copyright (c) 1999-2002 Hewlett Packard Co Stephane Eranian <eranian@hpl.hp.com> David Mosberger-Tang <davidm@hpl.hp.com>', ] check_detection(expected, test_file) def test_ics_oprofile_opcontrol_opcontrol_cpp(self): test_file = self.get_test_loc('ics/oprofile-opcontrol/opcontrol.cpp') expected = [ u'Copyright 2008, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_oprofile_opjitconv_conversion_c(self): test_file = self.get_test_loc('ics/oprofile-opjitconv/conversion.c') expected = [ u'Copyright 2008 OProfile authors', u'Copyright IBM Corporation 2008', ] check_detection(expected, test_file) def test_ics_oprofile_pp_oparchive_cpp(self): test_file = self.get_test_loc('ics/oprofile-pp/oparchive.cpp') expected = [ u'Copyright 2003, 2004 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_pp_oparchive_options_cpp(self): test_file = self.get_test_loc('ics/oprofile-pp/oparchive_options.cpp') expected = [ u'Copyright 2002, 2003, 2004 OProfile authors', ] check_detection(expected, test_file) def test_ics_oprofile_utils_opcontrol(self): test_file = self.get_test_loc('ics/oprofile-utils/opcontrol') expected = [ u'Copyright 2002 Read', u'Copyright IBM Corporation 2007', ] check_detection(expected, test_file) @expectedFailure def test_ics_oprofile_utils_opcontrol_misc(self): test_file = self.get_test_loc('ics/oprofile-utils/opcontrol') expected = [ u'Copyright 2002 Read the file COPYING', u'Copyright IBM Corporation 2007', ] check_detection(expected, test_file) def test_ics_ping_notice(self): test_file = self.get_test_loc('ics/ping/NOTICE') expected = [ u'Copyright (c) 1989 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_ping_ping_c(self): test_file = self.get_test_loc('ics/ping/ping.c') expected = [ u'Copyright (c) 1989 The Regents of the University of California.', u'Copyright (c) 1989 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_ping6_notice(self): test_file = self.get_test_loc('ics/ping6/NOTICE') expected = [ u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.', u'Copyright (c) 1989, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_ping6_ping6_c(self): test_file = self.get_test_loc('ics/ping6/ping6.c') expected = [ u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.', u'Copyright (c) 1989, 1993 The Regents of the University of California.', u'Copyright (c) 1989, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_auth_c(self): test_file = self.get_test_loc('ics/ppp-pppd/auth.c') expected = [ u'Copyright (c) 1993-2002 Paul Mackerras.', u'Copyright (c) 1984-2000 Carnegie Mellon University.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_cbcp_c(self): test_file = self.get_test_loc('ics/ppp-pppd/cbcp.c') expected = [ u'Copyright (c) 1995 Pedro Roque Marques.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_ccp_c(self): test_file = self.get_test_loc('ics/ppp-pppd/ccp.c') expected = [ u'Copyright (c) 1994-2002 Paul Mackerras.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_chap_ms_c(self): test_file = self.get_test_loc('ics/ppp-pppd/chap_ms.c') expected = [ u'Copyright (c) 1995 Eric Rosenquist.', u'Copyright (c) 2002 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_ppp_pppd_chap_ms_h(self): test_file = self.get_test_loc('ics/ppp-pppd/chap_ms.h') expected = [ u'Copyright (c) 1995 Eric Rosenquist.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_chap_md5_c(self): test_file = self.get_test_loc('ics/ppp-pppd/chap-md5.c') expected = [ u'Copyright (c) 2003 Paul Mackerras.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_demand_c(self): test_file = self.get_test_loc('ics/ppp-pppd/demand.c') expected = [ u'Copyright (c) 1996-2002 Paul Mackerras.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_eap_c(self): test_file = self.get_test_loc('ics/ppp-pppd/eap.c') expected = [ u'Copyright (c) 2001 by Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_ecp_c(self): test_file = self.get_test_loc('ics/ppp-pppd/ecp.c') expected = [ u'Copyright (c) 2002 The Android Open Source Project', u'Copyright (c) 1994-2002 Paul Mackerras.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_ecp_h(self): test_file = self.get_test_loc('ics/ppp-pppd/ecp.h') expected = [ u'Copyright (c) 2002 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_ppp_pppd_eui64_c(self): test_file = self.get_test_loc('ics/ppp-pppd/eui64.c') expected = [ u'Copyright (c) 1999 Tommi Komulainen.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_fsm_c(self): test_file = self.get_test_loc('ics/ppp-pppd/fsm.c') expected = [ u'Copyright (c) 1984-2000 Carnegie Mellon University.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_ipv6cp_c(self): test_file = self.get_test_loc('ics/ppp-pppd/ipv6cp.c') expected = [ u'Copyright (c) 1999 Tommi Komulainen.', u'Copyright (c) 1995, 1996, 1997 Francis.Dupont@inria.fr', u'Copyright (c) 1998, 1999 Francis.Dupont@inria.fr', u'Copyright (c) 1984-2000 Carnegie Mellon University.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_main_c(self): test_file = self.get_test_loc('ics/ppp-pppd/main.c') expected = [ u'Copyright (c) 1984-2000 Carnegie Mellon University.', u'Copyright (c) 1999-2004 Paul Mackerras.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_md4_c(self): test_file = self.get_test_loc('ics/ppp-pppd/md4.c') expected = [ u'(c) 1990 RSA Data Security, Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_md5_c(self): test_file = self.get_test_loc('ics/ppp-pppd/md5.c') expected = [ u'Copyright (c) 1990, RSA Data Security, Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_md5_h(self): test_file = self.get_test_loc('ics/ppp-pppd/md5.h') expected = [ u'Copyright (c) 1990, RSA Data Security, Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_notice(self): test_file = self.get_test_loc('ics/ppp-pppd/NOTICE') expected = [ u'Copyright (c) 1984-2000 Carnegie Mellon University.', u'Copyright (c) 1995 Pedro Roque Marques.', u'Copyright (c) 2000-2004 Paul Mackerras.', u'Copyright (c) 1994-2002 Paul Mackerras.', u'Copyright (c) 2003 Paul Mackerras.', u'Copyright (c) 1996-2002 Paul Mackerras.', u'Copyright (c) 1999-2004 Paul Mackerras.', u'Copyright (c) 2000-2002 Paul Mackerras.', u'Copyright (c) 1999-2002 Paul Mackerras.', u'Copyright (c) 1995 Eric Rosenquist.', u'Copyright (c) 2002 The Android Open Source Project', u'Copyright (c) 1990, RSA Data Security, Inc.', u'Copyright (c) 2001 by Sun Microsystems, Inc.', u'Copyright (c) 1999 Tommi Komulainen.', u'Copyright (c) 1995, 1996, 1997 Francis.Dupont@inria.fr', u'Copyright (c) 1998, 1999 Francis.Dupont@inria.fr', ] check_detection(expected, test_file) def test_ics_ppp_pppd_pppd_8(self): test_file = self.get_test_loc('ics/ppp-pppd/pppd.8') expected = [ u'Copyright (c) 1993-2003 Paul Mackerras <paulus@samba.org>', u'Copyright (c) 1984-2000 Carnegie Mellon University.', u'Copyright (c) 1993-2004 Paul Mackerras.', u'Copyright (c) 1995 Pedro Roque Marques.', u'Copyright (c) 1995 Eric Rosenquist.', u'Copyright (c) 1999 Tommi Komulainen.', u'Copyright (c) Andrew Tridgell 1999', u'Copyright (c) 2000 by Sun Microsystems, Inc.', u'Copyright (c) 2001 by Sun Microsystems, Inc.', u'Copyright (c) 2002 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_ppp_pppd_pppd_h(self): test_file = self.get_test_loc('ics/ppp-pppd/pppd.h') expected = [ u'Copyright (c) 1984-2000 Carnegie Mellon University.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_sys_linux_c(self): test_file = self.get_test_loc('ics/ppp-pppd/sys-linux.c') expected = [ u'Copyright (c) 1994-2004 Paul Mackerras.', u'Copyright (c) 1984-2000 Carnegie Mellon University.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_sys_solaris_c(self): test_file = self.get_test_loc('ics/ppp-pppd/sys-solaris.c') expected = [ u'Copyright (c) 2000 by Sun Microsystems, Inc.', u'Copyright (c) 1995-2002 Paul Mackerras.', u'Copyright (c) 1984-2000 Carnegie Mellon University.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_tty_c(self): test_file = self.get_test_loc('ics/ppp-pppd/tty.c') expected = [ u'Copyright (c) 2000-2004 Paul Mackerras.', u'Copyright (c) 1984-2000 Carnegie Mellon University.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_utils_c(self): test_file = self.get_test_loc('ics/ppp-pppd/utils.c') expected = [ u'Copyright (c) 1999-2002 Paul Mackerras.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_include_net_ppp_defs_h(self): test_file = self.get_test_loc('ics/ppp-pppd-include-net/ppp_defs.h') expected = [ u'Copyright (c) 1984 Paul Mackerras.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_include_net_pppio_h(self): test_file = self.get_test_loc('ics/ppp-pppd-include-net/pppio.h') expected = [ u'Copyright (c) 1994 Paul Mackerras.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_include_net_slcompress_h(self): test_file = self.get_test_loc('ics/ppp-pppd-include-net/slcompress.h') expected = [ u'Copyright (c) 1989 Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_minconn_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins/minconn.c') expected = [ u'Copyright (c) 1999 Paul Mackerras.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_passprompt_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins/passprompt.c') expected = [ u'Copyright 1999 Paul Mackerras, Alan Curry.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_winbind_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins/winbind.c') expected = [ u'Copyright (c) 2003 Andrew Bartlet <abartlet@samba.org>', u'Copyright 1999 Paul Mackerras, Alan Curry.', u'Copyright (c) 2002 Roaring Penguin Software Inc.', u'Copyright (c) 1996, Matjaz Godec <gody@elgo.si>', u'Copyright (c) 1996, Lars Fenneberg <in5y050@public.uni-hamburg.de>', u'Copyright (c) 1997, Miguel A.L. Paraz <map@iphil.net>', u'Copyright (c) 1995,1996,1997,1998 Lars Fenneberg <lf@elemental.net>', u'Copyright (c) 2002 Roaring Penguin Software Inc.', u'Copyright (c) 2003, Sean E. Millichamp', u'Copyright (c) Andrew Tridgell 1992-2001', u'Copyright (c) Simo Sorce 2001-2002', u'Copyright (c) Martin Pool 2003', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_pppoatm_copying(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-pppoatm/COPYING') expected = [ u'Copyright 1995-2000 EPFL-LRC/ICA', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_pppoatm_pppoatm_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-pppoatm/pppoatm.c') expected = [ u'Copyright 2000 Mitchell Blank Jr.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_avpair_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/avpair.c') expected = [ u'Copyright (c) 1995 Lars Fenneberg', u'Copyright 1992 Livingston Enterprises, Inc.', u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_buildreq_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/buildreq.c') expected = [ u'Copyright (c) 1995,1997 Lars Fenneberg', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_clientid_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/clientid.c') expected = [ u'Copyright (c) 1995,1996,1997 Lars Fenneberg', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_config_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/config.c') expected = [ u'Copyright (c) 1995,1996,1997 Lars Fenneberg', u'Copyright 1992 Livingston Enterprises, Inc.', u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_copyright(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/COPYRIGHT') expected = [ u'Copyright (c) 2002 Roaring Penguin Software Inc.', u'Copyright (c) 1995,1996,1997,1998 Lars Fenneberg <lf@elemental.net>', u'Copyright 1992 Livingston Enterprises, Inc. Livingston Enterprises, Inc.', u'Copyright (c) 1991-2, RSA Data Security, Inc.', ] check_detection(expected, test_file) @expectedFailure def test_ics_ppp_pppd_plugins_radius_copyright_extra_name(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/COPYRIGHT') expected = [ u'Copyright (c) 2002 Roaring Penguin Software Inc.', u'Copyright (c) 1995,1996,1997,1998 Lars Fenneberg <lf@elemental.net>', u'Copyright 1992 Livingston Enterprises, Inc.', u'Copyright (c) 1991-2, RSA Data Security, Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_dict_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/dict.c') expected = [ u'Copyright (c) 2002 Roaring Penguin Software Inc.', u'Copyright (c) 1995,1996,1997 Lars Fenneberg', u'Copyright 1992 Livingston Enterprises, Inc.', u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_includes_h(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/includes.h') expected = [ u'Copyright (c) 1997 Lars Fenneberg', u'Copyright 1992 Livingston Enterprises, Inc.', u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_lock_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/lock.c') expected = [ u'Copyright (c) 1997 Lars Fenneberg', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_makefile_linux(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/Makefile.linux') expected = [ u'Copyright 2002 Roaring Penguin Software Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_options_h(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/options.h') expected = [ u'Copyright (c) 1996 Lars Fenneberg', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_pathnames_h(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/pathnames.h') expected = [ u'Copyright (c) 1995,1996 Lars Fenneberg', u'Copyright 1992 Livingston Enterprises, Inc.', u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_radattr_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/radattr.c') expected = [ u'Copyright (c) 2002 Roaring Penguin Software Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_radius_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/radius.c') expected = [ u'Copyright (c) 2002 Roaring Penguin Software Inc.', u'Copyright (c) 1996, Matjaz Godec <gody@elgo.si>', u'Copyright (c) 1996, Lars Fenneberg <in5y050@public.uni-hamburg.de>', u'Copyright (c) 1997, Miguel A.L. Paraz <map@iphil.net>', u'Copyright (c) 1995,1996,1997,1998 Lars Fenneberg <lf@elemental.net>', u'Copyright (c) 2002 Roaring Penguin Software Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_radiusclient_h(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/radiusclient.h') expected = [ u'Copyright (c) 1995,1996,1997,1998 Lars Fenneberg', u'Copyright 1992 Livingston Enterprises, Inc.', u'Copyright 1992,1993, 1994,1995 The Regents of the University of Michigan and Merit Network, Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_radius_radrealms_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-radius/radrealms.c') expected = [ u'Copyright (c) 2002 Netservers', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_rp_pppoe_common_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/common.c') expected = [ u'Copyright (c) 2000 by Roaring Penguin Software Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_rp_pppoe_discovery_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/discovery.c') expected = [ u'Copyright (c) 1999 by Roaring Penguin Software Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_rp_pppoe_makefile_linux(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/Makefile.linux') expected = [ u'Copyright (c) 2001 Roaring Penguin Software Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_rp_pppoe_plugin_c(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/plugin.c') expected = [ u'Copyright (c) 2001 by Roaring Penguin Software Inc., Michal Ostrowski and Jamal Hadi Salim.', u'Copyright 2000 Michal Ostrowski <mostrows@styx.uwaterloo.ca>, Jamal Hadi Salim <hadi@cyberus.ca>', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_rp_pppoe_pppoe_h(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/pppoe.h') expected = [ u'Copyright (c) 2000 Roaring Penguin Software Inc.', ] check_detection(expected, test_file) def test_ics_ppp_pppd_plugins_rp_pppoe_pppoe_discovery_c_trail_name(self): test_file = self.get_test_loc('ics/ppp-pppd-plugins-rp-pppoe/pppoe-discovery.c') expected = [ u'Copyright (c) 2000-2001 by Roaring Penguin Software Inc.', u"Copyright (c) 2004 Marco d'Itri <md@linux.it>", ] check_detection(expected, test_file) def test_ics_proguard_notice(self): test_file = self.get_test_loc('ics/proguard/NOTICE') expected = [ u'Copyright (c) 2002-2009 Eric Lafortune.', u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_proguard_readme(self): test_file = self.get_test_loc('ics/proguard/README') expected = [ u'Copyright (c) 2002-2009 Eric Lafortune (eric@graphics.cornell.edu)', ] check_detection(expected, test_file) def test_ics_proguard_docs_acknowledgements_html(self): test_file = self.get_test_loc('ics/proguard-docs/acknowledgements.html') expected = [ u"Copyright (c) 2002-2009 <a href http://www.graphics.cornell.edu/~eric/'>Eric", ] check_detection(expected, test_file) @expectedFailure def test_ics_proguard_docs_acknowledgements_html_markup(self): test_file = self.get_test_loc('ics/proguard-docs/acknowledgements.html') expected = [ u'Copyright (c) 2002-2009 http://www.graphics.cornell.edu/~eric/ Eric Lafortune', ] check_detection(expected, test_file) def test_ics_proguard_docs_gpl_html(self): test_file = self.get_test_loc('ics/proguard-docs/GPL.html') expected = [ u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_proguard_docs_gpl_exception_html(self): test_file = self.get_test_loc('ics/proguard-docs/GPL_exception.html') expected = [ u'Copyright (c) 2002-2009 Eric Lafortune', ] check_detection(expected, test_file) def test_ics_proguard_examples_annotations_src_proguard_annotation_keep_java(self): test_file = self.get_test_loc('ics/proguard-examples-annotations-src-proguard-annotation/Keep.java') expected = [ u'Copyright (c) 2002-2007 Eric Lafortune (eric@graphics.cornell.edu)', ] check_detection(expected, test_file) def test_ics_proguard_src_proguard_argumentwordreader_java(self): test_file = self.get_test_loc('ics/proguard-src-proguard/ArgumentWordReader.java') expected = [ u'Copyright (c) 2002-2009 Eric Lafortune (eric@graphics.cornell.edu)', ] check_detection(expected, test_file) def test_ics_proguard_src_proguard_gui_guiresources_properties(self): test_file = self.get_test_loc('ics/proguard-src-proguard-gui/GUIResources.properties') expected = [ u'Copyright (c) 2002-2009 Eric Lafortune (eric@graphics.cornell.edu)', u'Copyright (c) 2002-2009.', ] check_detection(expected, test_file) def test_ics_protobuf_aclocal_m4(self): test_file = self.get_test_loc('ics/protobuf/aclocal.m4') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 2002, 2003, 2005, 2006, 2007 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2008 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2006 Free Software Foundation, Inc.', u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_configure(self): test_file = self.get_test_loc('ics/protobuf/configure') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 2006 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_install_txt(self): test_file = self.get_test_loc('ics/protobuf/INSTALL.txt') expected = [ u'Copyright 1994, 1995, 1996, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_ltmain_sh(self): test_file = self.get_test_loc('ics/protobuf/ltmain.sh') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 2008 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_readme_txt(self): test_file = self.get_test_loc('ics/protobuf/README.txt') expected = [ u'Copyright 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_editors_proto_vim(self): test_file = self.get_test_loc('ics/protobuf-editors/proto.vim') expected = [ u'Copyright 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_gtest_aclocal_m4(self): test_file = self.get_test_loc('ics/protobuf-gtest/aclocal.m4') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 2008 Free Software Foundation, Inc.', u'Copyright (c) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 2004, 2005, 2007 Free Software Foundation, Inc.', u'Copyright (c) 2004 Free Software Foundation, Inc.', u'Copyright (c) 2004, 2005, 2007 Free Software Foundation, Inc.', u'Copyright (c) 2002, 2003, 2005, 2006, 2007 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2008 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2006 Free Software Foundation, Inc.', u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_gtest_scons_sconscript(self): test_file = self.get_test_loc('ics/protobuf-gtest-scons/SConscript') expected = [ u'Copyright 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_java_src_main_java_com_google_protobuf_abstractmessage_java(self): test_file = self.get_test_loc('ics/protobuf-java-src-main-java-com-google-protobuf/AbstractMessage.java') expected = [ u'Copyright 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_m4_libtool_m4(self): test_file = self.get_test_loc('ics/protobuf-m4/libtool.m4') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.', u'Copyright (c) 2008 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_m4_ltoptions_m4(self): test_file = self.get_test_loc('ics/protobuf-m4/ltoptions.m4') expected = [ u'Copyright (c) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_m4_ltsugar_m4(self): test_file = self.get_test_loc('ics/protobuf-m4/ltsugar.m4') expected = [ u'Copyright (c) 2004, 2005, 2007 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_m4_ltversion_m4(self): test_file = self.get_test_loc('ics/protobuf-m4/ltversion.m4') expected = [ u'Copyright (c) 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_src_google_protobuf_compiler_javamicro_javamicro_params_h(self): test_file = self.get_test_loc('ics/protobuf-src-google-protobuf-compiler-javamicro/javamicro_params.h') expected = [ u'Copyright 2010 Google Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_src_google_protobuf_io_tokenizer_cc(self): test_file = self.get_test_loc('ics/protobuf-src-google-protobuf-io/tokenizer.cc') expected = [ u'Copyright 2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_protobuf_src_google_protobuf_stubs_structurally_valid_cc(self): test_file = self.get_test_loc('ics/protobuf-src-google-protobuf-stubs/structurally_valid.cc') expected = [ u'Copyright 2005-2008 Google Inc.', ] check_detection(expected, test_file) def test_ics_qemu_a_out_h(self): test_file = self.get_test_loc('ics/qemu/a.out.h') expected = [ u'Copyright 1997, 1998, 1999, 2001 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_acl_c(self): test_file = self.get_test_loc('ics/qemu/acl.c') expected = [ u'Copyright (c) 2009 Red Hat, Inc', ] check_detection(expected, test_file) def test_ics_qemu_aio_android_c(self): test_file = self.get_test_loc('ics/qemu/aio-android.c') expected = [ u'Copyright IBM, Corp. 2008', ] check_detection(expected, test_file) def test_ics_qemu_android_trace_h(self): test_file = self.get_test_loc('ics/qemu/android-trace.h') expected = [ u'Copyright (c) 2006-2007 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_qemu_arch_init_c(self): test_file = self.get_test_loc('ics/qemu/arch_init.c') expected = [ u'Copyright (c) 2003-2008 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_arm_dis_c(self): test_file = self.get_test_loc('ics/qemu/arm-dis.c') expected = [ u'Copyright 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004 2007, Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_arm_semi_c(self): test_file = self.get_test_loc('ics/qemu/arm-semi.c') expected = [ u'Copyright (c) 2005, 2007 CodeSourcery.', ] check_detection(expected, test_file) def test_ics_qemu_block_c(self): test_file = self.get_test_loc('ics/qemu/block.c') expected = [ u'Copyright (c) 2003 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_bt_host_c(self): test_file = self.get_test_loc('ics/qemu/bt-host.c') expected = [ u'Copyright (c) 2008 Andrzej Zaborowski <balrog@zabor.org>', ] check_detection(expected, test_file) def test_ics_qemu_console_c(self): test_file = self.get_test_loc('ics/qemu/console.c') expected = [ u'Copyright (c) 2004 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_cpu_all_h(self): test_file = self.get_test_loc('ics/qemu/cpu-all.h') expected = [ u'Copyright (c) 2003 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_cpu_exec_c(self): test_file = self.get_test_loc('ics/qemu/cpu-exec.c') expected = [ u'Copyright (c) 2003-2005 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_curses_c(self): test_file = self.get_test_loc('ics/qemu/curses.c') expected = [ u'Copyright (c) 2005 Andrzej Zaborowski <balrog@zabor.org>', ] check_detection(expected, test_file) def test_ics_qemu_curses_keys_h(self): test_file = self.get_test_loc('ics/qemu/curses_keys.h') expected = [ u'Copyright (c) 2005 Andrzej Zaborowski <balrog@zabor.org>', ] check_detection(expected, test_file) def test_ics_qemu_cutils_c(self): test_file = self.get_test_loc('ics/qemu/cutils.c') expected = [ u'Copyright (c) 2006 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_d3des_c(self): test_file = self.get_test_loc('ics/qemu/d3des.c') expected = [ u'Copyright (c) 1999 AT&T Laboratories Cambridge.', u'Copyright (c) 1988,1989,1990,1991,1992 by Richard Outerbridge.', ] check_detection(expected, test_file) def test_ics_qemu_d3des_h(self): test_file = self.get_test_loc('ics/qemu/d3des.h') expected = [ u'Copyright (c) 1999 AT&T Laboratories Cambridge.', u'Copyright (c) 1988,1989,1990,1991,1992 by Richard Outerbridge', ] check_detection(expected, test_file) def test_ics_qemu_device_tree_c(self): test_file = self.get_test_loc('ics/qemu/device_tree.c') expected = [ u'Copyright 2008 IBM Corporation. Authors Jerone Young <jyoung5@us.ibm.com> Hollis Blanchard <hollisb@us.ibm.com>', ] check_detection(expected, test_file) @expectedFailure def test_ics_qemu_device_tree_c_extra_author(self): test_file = self.get_test_loc('ics/qemu/device_tree.c') expected = [ u'Copyright 2008 IBM Corporation.', ] check_detection(expected, test_file) def test_ics_qemu_dma_helpers_c(self): test_file = self.get_test_loc('ics/qemu/dma-helpers.c') expected = [ u'Copyright (c) 2009 Red Hat', ] check_detection(expected, test_file) def test_ics_qemu_dynlink_h(self): test_file = self.get_test_loc('ics/qemu/dynlink.h') expected = [ u'Copyright (c) 2008 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_qemu_dynlink_static_c(self): test_file = self.get_test_loc('ics/qemu/dynlink-static.c') expected = [ u'Copyright (c) 2010 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_qemu_feature_to_c_sh(self): test_file = self.get_test_loc('ics/qemu/feature_to_c.sh') expected = [ u'Copyright (c) 2007 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_hostregs_helper_h(self): test_file = self.get_test_loc('ics/qemu/hostregs_helper.h') expected = [ u'Copyright (c) 2007 CodeSourcery', ] check_detection(expected, test_file) def test_ics_qemu_host_utils_c(self): test_file = self.get_test_loc('ics/qemu/host-utils.c') expected = [ u'Copyright (c) 2003 Fabrice Bellard', u'Copyright (c) 2007 Aurelien Jarno', ] check_detection(expected, test_file) def test_ics_qemu_host_utils_h(self): test_file = self.get_test_loc('ics/qemu/host-utils.h') expected = [ u'Copyright (c) 2007 Thiemo Seufer', u'Copyright (c) 2007 Jocelyn Mayer', ] check_detection(expected, test_file) def test_ics_qemu_i386_dis_c(self): test_file = self.get_test_loc('ics/qemu/i386-dis.c') expected = [ u'Copyright 1988, 1989, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_json_lexer_c(self): test_file = self.get_test_loc('ics/qemu/json-lexer.c') expected = [ u'Copyright IBM, Corp. 2009', ] check_detection(expected, test_file) def test_ics_qemu_keymaps_c(self): test_file = self.get_test_loc('ics/qemu/keymaps.c') expected = [ u'Copyright (c) 2004 Johannes Schindelin', ] check_detection(expected, test_file) def test_ics_qemu_kqemu_c(self): test_file = self.get_test_loc('ics/qemu/kqemu.c') expected = [ u'Copyright (c) 2005-2008 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_kqemu_h(self): test_file = self.get_test_loc('ics/qemu/kqemu.h') expected = [ u'Copyright (c) 2004-2008 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_loader_c(self): test_file = self.get_test_loc('ics/qemu/loader.c') expected = [ u'Copyright (c) 2006 Fabrice Bellard', u'(c) Copyright 2008 Semihalf', u'(c) Copyright 2000-2005 Wolfgang Denk', ] check_detection(expected, test_file) @expectedFailure def test_ics_qemu_loader_c_trail_name(self): test_file = self.get_test_loc('ics/qemu/loader.c') expected = [ u'Copyright (c) 2006 Fabrice Bellard', u'(c) Copyright 2008 Semihalf', u'(C) Copyright 2000-2005 Wolfgang Denk, DENX Software Engineering, wd@denx.de.', ] check_detection(expected, test_file) def test_ics_qemu_migration_exec_c(self): test_file = self.get_test_loc('ics/qemu/migration-exec.c') expected = [ u'Copyright IBM, Corp. 2008', u'Copyright Dell MessageOne 2008', ] check_detection(expected, test_file) def test_ics_qemu_monitor_c(self): test_file = self.get_test_loc('ics/qemu/monitor.c') expected = [ u'Copyright (c) 2003-2004 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_net_checksum_c(self): test_file = self.get_test_loc('ics/qemu/net-checksum.c') expected = [ u'(c) 2008 Gerd Hoffmann <kraxel@redhat.com>', ] check_detection(expected, test_file) def test_ics_qemu_notify_c(self): test_file = self.get_test_loc('ics/qemu/notify.c') expected = [ u'Copyright IBM, Corp. 2010', ] check_detection(expected, test_file) def test_ics_qemu_os_posix_c(self): test_file = self.get_test_loc('ics/qemu/os-posix.c') expected = [ u'Copyright (c) 2003-2008 Fabrice Bellard', u'Copyright (c) 2010 Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_ppc_dis_c(self): test_file = self.get_test_loc('ics/qemu/ppc-dis.c') expected = [ u'Copyright 1994, 1995, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.', u'Copyright 1994, 1995, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.', u'Copyright 1994, 1995, 1996, 1997, 1998, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_qdict_c(self): test_file = self.get_test_loc('ics/qemu/qdict.c') expected = [ u'Copyright (c) 2009 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_qemu_qemu_error_c(self): test_file = self.get_test_loc('ics/qemu/qemu-error.c') expected = [ u'Copyright (c) 2010 Red Hat Inc.', ] check_detection(expected, test_file) def test_ics_qemu_qemu_io_c(self): test_file = self.get_test_loc('ics/qemu/qemu-io.c') expected = [ u'Copyright (c) 2009 Red Hat, Inc.', u'Copyright (c) 2003-2005 Silicon Graphics, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_qemu_option_c(self): test_file = self.get_test_loc('ics/qemu/qemu-option.c') expected = [ u'Copyright (c) 2003-2008 Fabrice Bellard', u'Copyright (c) 2009 Kevin Wolf <kwolf@redhat.com>', ] check_detection(expected, test_file) def test_ics_qemu_qemu_options_h(self): test_file = self.get_test_loc('ics/qemu/qemu-options.h') expected = [ u'Copyright (c) 2003-2008 Fabrice Bellard', u'Copyright (c) 2010 Jes Sorensen <Jes.Sorensen@redhat.com>', ] check_detection(expected, test_file) def test_ics_qemu_qemu_thread_c(self): test_file = self.get_test_loc('ics/qemu/qemu-thread.c') expected = [ u'Copyright Red Hat, Inc. 2009', ] check_detection(expected, test_file) def test_ics_qemu_softmmu_outside_jit_c(self): test_file = self.get_test_loc('ics/qemu/softmmu_outside_jit.c') expected = [ u'Copyright (c) 2007-2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_qemu_softmmu_semi_h(self): test_file = self.get_test_loc('ics/qemu/softmmu-semi.h') expected = [ u'Copyright (c) 2007 CodeSourcery.', ] check_detection(expected, test_file) def test_ics_qemu_sys_tree_h(self): test_file = self.get_test_loc('ics/qemu/sys-tree.h') expected = [ u'Copyright 2002 Niels Provos <provos@citi.umich.edu>', ] check_detection(expected, test_file) def test_ics_qemu_tap_win32_c(self): test_file = self.get_test_loc('ics/qemu/tap-win32.c') expected = [ u'Copyright (c) Damion K. Wilson, 2003', u'Copyright (c) James Yonan, 2003-2004', ] check_detection(expected, test_file) def test_ics_qemu_tcpdump_c(self): test_file = self.get_test_loc('ics/qemu/tcpdump.c') expected = [ u'Copyright (c) 2008 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_qemu_uboot_image_h(self): test_file = self.get_test_loc('ics/qemu/uboot_image.h') expected = [ u'(c) Copyright 2000-2005 Wolfgang Denk', ] check_detection(expected, test_file) def test_ics_qemu_usb_linux_c(self): test_file = self.get_test_loc('ics/qemu/usb-linux.c') expected = [ u'Copyright (c) 2005 Fabrice Bellard', u'Copyright (c) 2008 Max Krasnyansky Support', u'Copyright 2008 TJ', ] check_detection(expected, test_file) @expectedFailure def test_ics_qemu_usb_linux_c_extra_support_trail_email(self): test_file = self.get_test_loc('ics/qemu/usb-linux.c') expected = [ u'Copyright (c) 2005 Fabrice Bellard', u'Copyright (c) 2008 Max Krasnyansky', u'Copyright 2008 TJ <linux@tjworld.net>', ] check_detection(expected, test_file) def test_ics_qemu_vl_android_c(self): test_file = self.get_test_loc('ics/qemu/vl-android.c') expected = [ u'Copyright (c) 2003-2008 Fabrice Bellard', u'Copyright (c) 2003-2008 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_vnc_android_c(self): test_file = self.get_test_loc('ics/qemu/vnc-android.c') expected = [ u'Copyright (c) 2006 Anthony Liguori <anthony@codemonkey.ws>', u'Copyright (c) 2006 Fabrice Bellard', u'Copyright (c) 2009 Red Hat, Inc', ] check_detection(expected, test_file) def test_ics_qemu_android_android_h(self): test_file = self.get_test_loc('ics/qemu-android/android.h') expected = [ u'Copyright (c) 2007 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_qemu_android_main_c(self): test_file = self.get_test_loc('ics/qemu-android/main.c') expected = [ u'Copyright (c) 2006-2008 The Android Open Source Project', u'Copyright (c) 2006-2011 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_qemu_android_main_common_c(self): test_file = self.get_test_loc('ics/qemu-android/main-common.c') expected = [ u'Copyright (c) 2011 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_qemu_android_qemu_setup_c(self): test_file = self.get_test_loc('ics/qemu-android/qemu-setup.c') expected = [ u'Copyright (c) 2006-2010 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_qemu_android_snapshot_c(self): test_file = self.get_test_loc('ics/qemu-android/snapshot.c') expected = [ u'Copyright (c) 2010 The Android Open Source Project', u'copyright (c) 2003 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_android_utils_mapfile_c(self): test_file = self.get_test_loc('ics/qemu-android-utils/mapfile.c') expected = [ u'Copyright (c) 2007-2010 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_qemu_audio_alsaaudio_c(self): test_file = self.get_test_loc('ics/qemu-audio/alsaaudio.c') expected = [ u'Copyright (c) 2008-2010 The Android Open Source Project', u'Copyright (c) 2005 Vassili Karpov', ] check_detection(expected, test_file) def test_ics_qemu_audio_audio_c(self): test_file = self.get_test_loc('ics/qemu-audio/audio.c') expected = [ u'Copyright (c) 2007-2008 The Android Open Source Project', u'Copyright (c) 2003-2005 Vassili Karpov', ] check_detection(expected, test_file) def test_ics_qemu_audio_audio_h(self): test_file = self.get_test_loc('ics/qemu-audio/audio.h') expected = [ u'Copyright (c) 2003-2005 Vassili Karpov', ] check_detection(expected, test_file) def test_ics_qemu_audio_audio_template_h(self): test_file = self.get_test_loc('ics/qemu-audio/audio_template.h') expected = [ u'Copyright (c) 2005 Vassili Karpov', ] check_detection(expected, test_file) def test_ics_qemu_audio_coreaudio_c(self): test_file = self.get_test_loc('ics/qemu-audio/coreaudio.c') expected = [ u'Copyright (c) 2008 The Android Open Source Project', u'Copyright (c) 2005 Mike Kronenberg', ] check_detection(expected, test_file) def test_ics_qemu_audio_esdaudio_c(self): test_file = self.get_test_loc('ics/qemu-audio/esdaudio.c') expected = [ u'Copyright (c) 2008-2009 The Android Open Source Project', u'Copyright (c) 2006 Frederick Reeve', ] check_detection(expected, test_file) def test_ics_qemu_audio_fmodaudio_c(self): test_file = self.get_test_loc('ics/qemu-audio/fmodaudio.c') expected = [ u'Copyright (c) 2004-2005 Vassili Karpov', ] check_detection(expected, test_file) def test_ics_qemu_audio_mixeng_c(self): test_file = self.get_test_loc('ics/qemu-audio/mixeng.c') expected = [ u'Copyright (c) 2004-2005 Vassili Karpov', u'Copyright (c) 1998 Fabrice Bellard', u'Copyright 1998 Fabrice Bellard.', ] check_detection(expected, test_file) def test_ics_qemu_audio_rate_template_h(self): test_file = self.get_test_loc('ics/qemu-audio/rate_template.h') expected = [ u'Copyright (c) 2004-2005 Vassili Karpov', u'Copyright (c) 1998 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_audio_wavaudio_c(self): test_file = self.get_test_loc('ics/qemu-audio/wavaudio.c') expected = [ u'Copyright (c) 2007 The Android Open Source Project', u'Copyright (c) 2004-2005 Vassili Karpov', ] check_detection(expected, test_file) def test_ics_qemu_audio_winaudio_c(self): test_file = self.get_test_loc('ics/qemu-audio/winaudio.c') expected = [ u'Copyright (c) 2007 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_qemu_block_bochs_c(self): test_file = self.get_test_loc('ics/qemu-block/bochs.c') expected = [ u'Copyright (c) 2005 Alex Beregszaszi', ] check_detection(expected, test_file) def test_ics_qemu_block_cloop_c(self): test_file = self.get_test_loc('ics/qemu-block/cloop.c') expected = [ u'Copyright (c) 2004 Johannes E. Schindelin', ] check_detection(expected, test_file) def test_ics_qemu_block_nbd_c(self): test_file = self.get_test_loc('ics/qemu-block/nbd.c') expected = [ u'Copyright (c) 2008 Bull S.A.S.', u'Copyright (c) 2007 Anthony Liguori <anthony@codemonkey.ws>', ] check_detection(expected, test_file) def test_ics_qemu_block_parallels_c(self): test_file = self.get_test_loc('ics/qemu-block/parallels.c') expected = [ u'Copyright (c) 2007 Alex Beregszaszi', ] check_detection(expected, test_file) def test_ics_qemu_block_qcow_c(self): test_file = self.get_test_loc('ics/qemu-block/qcow.c') expected = [ u'Copyright (c) 2004-2006 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_block_vmdk_c(self): test_file = self.get_test_loc('ics/qemu-block/vmdk.c') expected = [ u'Copyright (c) 2004 Fabrice Bellard', u'Copyright (c) 2005 Filip Navara', ] check_detection(expected, test_file) def test_ics_qemu_block_vpc_c(self): test_file = self.get_test_loc('ics/qemu-block/vpc.c') expected = [ u'Copyright (c) 2005 Alex Beregszaszi', u'Copyright (c) 2009 Kevin Wolf <kwolf@suse.de>', ] check_detection(expected, test_file) def test_ics_qemu_block_vvfat_c(self): test_file = self.get_test_loc('ics/qemu-block/vvfat.c') expected = [ u'Copyright (c) 2004,2005 Johannes E. Schindelin', ] check_detection(expected, test_file) def test_ics_qemu_distrib_libpng_1_2_19_png_c(self): test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/png.c') expected = [ u'Copyright (c) 1998-2007 Glenn Randers-Pehrson', u'Copyright (c) 1996, 1997 Andreas Dilger', u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.', u'Copyright (c) 1998-2007 Glenn Randers-Pehrson', u'Copyright (c) 1996-1997 Andreas Dilger', u'Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_libpng_1_2_19_png_h(self): test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/png.h') expected = [ u'Copyright (c) 1998-2007 Glenn Randers-Pehrson', u'Copyright (c) 1996, 1997 Andreas Dilger', u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.', u'Copyright (c) 2004, 2006-2007 Glenn Randers-Pehrson', u'Copyright (c) 2000-2002 Glenn Randers-Pehrson', u'Copyright (c) 1998, 1999, 2000 Glenn Randers-Pehrson', u'Copyright (c) 1996, 1997 Andreas Dilger', u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_libpng_1_2_19_pngconf_h(self): test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngconf.h') expected = [ u'Copyright (c) 1998-2007 Glenn Randers-Pehrson', u'Copyright (c) 1996, 1997 Andreas Dilger', u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_libpng_1_2_19_pngerror_c(self): test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngerror.c') expected = [ u'Copyright (c) 1998-2007 Glenn Randers-Pehrson', u'Copyright (c) 1996, 1997 Andreas Dilger', u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_libpng_1_2_19_pnggccrd_c(self): test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pnggccrd.c') expected = [ u'Copyright (c) 1998 Intel Corporation', u'Copyright (c) 1999-2002,2007 Greg Roelofs', u'Copyright (c) 1998-2007 Glenn Randers-Pehrson', ] check_detection(expected, test_file) def test_ics_qemu_distrib_libpng_1_2_19_pngmem_c(self): test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngmem.c') expected = [ u'Copyright (c) 1998-2006 Glenn Randers-Pehrson', u'Copyright (c) 1996, 1997 Andreas Dilger', u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_libpng_1_2_19_pngrtran_c(self): test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngrtran.c') expected = [ u'Copyright (c) 1998-2007 Glenn Randers-Pehrson', u'Copyright (c) 1996, 1997 Andreas Dilger', u'Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.', u'Copyright (c) 1998-01-04 Charles Poynton', ] check_detection(expected, test_file) def test_ics_qemu_distrib_libpng_1_2_19_pngvcrd_c(self): test_file = self.get_test_loc('ics/qemu-distrib-libpng-1.2.19/pngvcrd.c') expected = [ u'Copyright (c) 1998-2007 Glenn Randers-Pehrson', u'Copyright (c) 1998, Intel Corporation', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_copying(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12/COPYING') expected = [ u'Copyright (c) 1991, 1999 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_include_begin_code_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-include/begin_code.h') expected = [ u'Copyright (c) 1997-2004 Sam Lantinga', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_include_sdl_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-include/SDL.h') expected = [ u'Copyright (c) 1997-2006 Sam Lantinga', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_include_sdl_opengl_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-include/SDL_opengl.h') expected = [ u'Copyright (c) 1997-2006 Sam Lantinga', u'Copyright (c) 1991-2004 Silicon Graphics, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_audio_sdl_mixer_mmx_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-audio/SDL_mixer_MMX.c') expected = [ u'Copyright (c) 1997-2006 Sam Lantinga', u'Copyright 2002 Stephane Marchesin (stephane.marchesin@wanadoo.fr)', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_audio_sdl_mixer_mmx_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-audio/SDL_mixer_MMX.h') expected = [ u'Copyright 2002 Stephane Marchesin (stephane.marchesin@wanadoo.fr)', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_audio_dc_aica_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-audio-dc/aica.c') expected = [ u'(c) 2000 Dan Potter', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_audio_sun_sdl_sunaudio_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-audio-sun/SDL_sunaudio.c') expected = [ u'Copyright (c) 1997-2006 Sam Lantinga', u'Copyright 1989 by Rich', ] check_detection(expected, test_file) @expectedFailure def test_ics_qemu_distrib_sdl_1_2_12_src_audio_sun_sdl_sunaudio_c_trail_name(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-audio-sun/SDL_sunaudio.c') expected = [ u'Copyright (c) 1997-2006 Sam Lantinga', u'Copyright 1989 by Rich Gopstein and Harris Corporation', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_cdrom_macosx_audiofileplayer_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-cdrom-macosx/AudioFilePlayer.c') expected = [ u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002 Sam Lantinga', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_cdrom_macosx_sdlosxcaguard_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-cdrom-macosx/SDLOSXCAGuard.c') expected = [ u'Copyright (c) 1997, 1998, 1999, 2000, 2001, 2002 Sam Lantinga', u'(c) Copyright 2002 Apple Computer, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_cdrom_macosx_sdlosxcaguard_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-cdrom-macosx/SDLOSXCAGuard.h') expected = [ u'Copyright (c) 1997-2004 Sam Lantinga', u'(c) Copyright 2002 Apple Computer, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_cdrom_osf_sdl_syscdrom_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-cdrom-osf/SDL_syscdrom.c') expected = [ u'DirectMedia Layer Copyright (c) 2003', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_hermes_copying_lib(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-hermes/COPYING.LIB') expected = [ u'Copyright (c) 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_hermes_headmmx_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-hermes/HeadMMX.h') expected = [ u'Copyright (c) 1998 Christian Nentwich (c.nentwich@cs.ucl.ac.uk)', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_hermes_headx86_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-hermes/HeadX86.h') expected = [ u'Copyright (c) 1998 Christian Nentwich (brn@eleet.mcb.at)', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_hermes_readme(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-hermes/README') expected = [ u'(c) 1998 Christian Nentwich', u'(c) Glenn Fielder (gaffer@gaffer.org)', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_joystick_os2_joyos2_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-joystick-os2/joyos2.h') expected = [ u'Copyright (c) 1995 IBM Corporation', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_loadso_macosx_sdl_dlcompat_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-loadso-macosx/SDL_dlcompat.c') expected = [ u'Copyright (c) 1997-2006 Sam Lantinga', u"Copyright (c) 2002 Jorge Acereda <jacereda@users.sourceforge.net> & Peter O'Gorman <ogorman@users.sourceforge.net>", ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_main_win32_version_rc(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-main-win32/version.rc') expected = [ u'Copyright (c) 2007 Sam Lantinga', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_stdlib_sdl_qsort_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-stdlib/SDL_qsort.c') expected = [ u'(c) 1998 Gareth McCaughan', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_thread_win32_win_ce_semaphore_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-thread-win32/win_ce_semaphore.c') expected = [ u'Copyright (c) 1998, Johnson M. Hart', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_timer_macos_fasttimes_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-timer-macos/FastTimes.c') expected = [ u'Copyright (c) Matt Slot, 1999-2000.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_sdl_yuv_sw_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video/SDL_yuv_sw.c') expected = [ u'Copyright (c) 1997-2006 Sam Lantinga', u'Copyright (c) 1995 The Regents of the University of California.', u'Copyright (c) 1995 Erik Corry', u'Copyright (c) 1995 Brown University.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_fbcon_matrox_regs_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-fbcon/matrox_regs.h') expected = [ u'Copyright 1996 The XFree86 Project, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_fbcon_riva_mmio_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-fbcon/riva_mmio.h') expected = [ u'Copyright 1993-1999 NVIDIA, Corporation.', u'Copyright 1993-1999 NVIDIA, Corporation.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_maccommon_sdl_macwm_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-maccommon/SDL_macwm.c') expected = [ u'Copyright (c) 1997-2006 Sam Lantinga', u'Copyright (c) 1999 Apple Computer, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_nanox_sdl_nxevents_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-nanox/SDL_nxevents.c') expected = [ u'Copyright (c) 1997-2004 Sam Lantinga', u'Copyright (c) 2001 Hsieh-Fu Tsai', u'Copyright (c) 2002 Greg Haerr <greg@censoft.com>', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_nanox_sdl_nxevents_c_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-nanox/SDL_nxevents_c.h') expected = [ u'Copyright (c) 1997-2004 Sam Lantinga', u'Copyright (c) 2001 Hsieh-Fu Tsai', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_quartz_cgs_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-quartz/CGS.h') expected = [ u'Copyright (c) 1997-2003 Sam Lantinga', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_extutil_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/extutil.h') expected = [ u'Copyright 1989, 1998 The Open Group', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_panoramixext_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/panoramiXext.h') expected = [ u'Copyright (c) 1991, 1997 Digital Equipment Corporation, Maynard, Massachusetts.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xf86dga_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xf86dga.h') expected = [ u'Copyright (c) 1999 XFree86 Inc', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xf86dga1_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xf86dga1.h') expected = [ u'Copyright (c) 1995 Jon Tombs', u'Copyright (c) 1995 XFree86 Inc', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xf86dga1str_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xf86dga1str.h') expected = [ u'Copyright (c) 1995 Jon Tombs', u'Copyright (c) 1995 XFree86 Inc.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xf86vmode_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xf86vmode.h') expected = [ u'Copyright 1995 Kaleb', ] check_detection(expected, test_file) @expectedFailure def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xf86vmode_h_trail_caps(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xf86vmode.h') expected = [ u'Copyright 1995 Kaleb S. KEITHLEY', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xme_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/xme.h') expected = [ u'Copyright 1993-2001 by Xi Graphics, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_extensions_xv_h_trail_name(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-extensions/Xv.h') expected = [ u'Copyright 1991 by Digital Equipment Corporation, Maynard, Massachusetts, and the Massachusetts Institute of Technology, Cambridge, Massachusetts.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_xv_xvlibint_h(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-Xv/Xvlibint.h') expected = [ u'Copyright 1987 by Digital Equipment Corporation, Maynard, Massachusetts, and the Massachusetts Institute of Technology, Cambridge, Massachusetts.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_xxf86dga_xf86dga_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-Xxf86dga/XF86DGA.c') expected = [ u'Copyright (c) 1995 Jon Tombs', u'Copyright (c) 1995,1996 The XFree86 Project, Inc', ] check_detection(expected, test_file) def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_xxf86vm_xf86vmode_c(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-Xxf86vm/XF86VMode.c') expected = [ u'Copyright (c) 1995 Kaleb', ] check_detection(expected, test_file) @expectedFailure def test_ics_qemu_distrib_sdl_1_2_12_src_video_xext_xxf86vm_xf86vmode_c_trail_caps(self): test_file = self.get_test_loc('ics/qemu-distrib-sdl-1.2.12-src-video-Xext-Xxf86vm/XF86VMode.c') expected = [ u'Copyright (c) 1995 Kaleb S. KEITHLEY', ] check_detection(expected, test_file) def test_ics_qemu_distrib_zlib_1_2_3_compress_c(self): test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/compress.c') expected = [ u'Copyright (c) 1995-2003 Jean-loup Gailly.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_zlib_1_2_3_crc32_c(self): test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/crc32.c') expected = [ u'Copyright (c) 1995-2005 Mark Adler', ] check_detection(expected, test_file) def test_ics_qemu_distrib_zlib_1_2_3_deflate_c(self): test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/deflate.c') expected = [ u'Copyright (c) 1995-2005 Jean-loup Gailly.', u'Copyright 1995-2005 Jean-loup Gailly', ] check_detection(expected, test_file) def test_ics_qemu_distrib_zlib_1_2_3_deflate_h(self): test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/deflate.h') expected = [ u'Copyright (c) 1995-2004 Jean-loup Gailly', ] check_detection(expected, test_file) def test_ics_qemu_distrib_zlib_1_2_3_gzio_c(self): test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/gzio.c') expected = [ u'Copyright (c) 1995-2005 Jean-loup Gailly.', ] check_detection(expected, test_file) def test_ics_qemu_distrib_zlib_1_2_3_inffast_h(self): test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/inffast.h') expected = [ u'Copyright (c) 1995-2003 Mark Adler', ] check_detection(expected, test_file) def test_ics_qemu_distrib_zlib_1_2_3_inftrees_c(self): test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/inftrees.c') expected = [ u'Copyright (c) 1995-2005 Mark Adler', u'Copyright 1995-2005 Mark Adler', ] check_detection(expected, test_file) def test_ics_qemu_distrib_zlib_1_2_3_trees_c(self): test_file = self.get_test_loc('ics/qemu-distrib-zlib-1.2.3/trees.c') expected = [ u'Copyright (c) 1995-2005 Jean-loup Gailly', ] check_detection(expected, test_file) def test_ics_qemu_elff_dwarf_h(self): test_file = self.get_test_loc('ics/qemu-elff/dwarf.h') expected = [ u'Copyright (c) 2000,2001,2003,2004,2005,2006 Silicon Graphics, Inc.', u'Portions Copyright 2002,2007 Sun Microsystems, Inc.', u'Portions Copyright 2007-2009 David Anderson.', ] check_detection(expected, test_file) def test_ics_qemu_gdb_xml_arm_core_xml(self): test_file = self.get_test_loc('ics/qemu-gdb-xml/arm-core.xml') expected = [ u'Copyright (c) 2008 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_gdb_xml_power_altivec_xml(self): test_file = self.get_test_loc('ics/qemu-gdb-xml/power-altivec.xml') expected = [ u'Copyright (c) 2007, 2008 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_hw_apic_c(self): test_file = self.get_test_loc('ics/qemu-hw/apic.c') expected = [ u'Copyright (c) 2004-2005 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_hw_arm_misc_h(self): test_file = self.get_test_loc('ics/qemu-hw/arm-misc.h') expected = [ u'Copyright (c) 2006 CodeSourcery.', ] check_detection(expected, test_file) def test_ics_qemu_hw_armv7m_c(self): test_file = self.get_test_loc('ics/qemu-hw/armv7m.c') expected = [ u'Copyright (c) 2006-2007 CodeSourcery.', ] check_detection(expected, test_file) def test_ics_qemu_hw_baum_h(self): test_file = self.get_test_loc('ics/qemu-hw/baum.h') expected = [ u'Copyright (c) 2008 Samuel Thibault', ] check_detection(expected, test_file) def test_ics_qemu_hw_bt_h(self): test_file = self.get_test_loc('ics/qemu-hw/bt.h') expected = [ u'Copyright (c) 2007 OpenMoko, Inc.', u'Copyright (c) 2000-2001 Qualcomm Incorporated', u'Copyright (c) 2002-2003 Maxim Krasnyansky <maxk@qualcomm.com>', u'Copyright (c) 2002-2006 Marcel Holtmann <marcel@holtmann.org>', ] check_detection(expected, test_file) def test_ics_qemu_hw_bt_hci_c(self): test_file = self.get_test_loc('ics/qemu-hw/bt-hci.c') expected = [ u'Copyright (c) 2007 OpenMoko, Inc.', u'Copyright (c) 2008 Andrzej Zaborowski <balrog@zabor.org>', ] check_detection(expected, test_file) def test_ics_qemu_hw_bt_hid_c(self): test_file = self.get_test_loc('ics/qemu-hw/bt-hid.c') expected = [ u'Copyright (c) 2007-2008 OpenMoko, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_hw_dma_c(self): test_file = self.get_test_loc('ics/qemu-hw/dma.c') expected = [ u'Copyright (c) 2003-2004 Vassili Karpov', ] check_detection(expected, test_file) def test_ics_qemu_hw_fw_cfg_c(self): test_file = self.get_test_loc('ics/qemu-hw/fw_cfg.c') expected = [ u'Copyright (c) 2008 Gleb Natapov', ] check_detection(expected, test_file) def test_ics_qemu_hw_irq_c(self): test_file = self.get_test_loc('ics/qemu-hw/irq.c') expected = [ u'Copyright (c) 2007 CodeSourcery.', ] check_detection(expected, test_file) def test_ics_qemu_hw_mmc_h(self): test_file = self.get_test_loc('ics/qemu-hw/mmc.h') expected = [ u'Copyright 2002 Hewlett-Packard Company', ] check_detection(expected, test_file) def test_ics_qemu_hw_msmouse_c(self): test_file = self.get_test_loc('ics/qemu-hw/msmouse.c') expected = [ u'Copyright (c) 2008 Lubomir Rintel', ] check_detection(expected, test_file) def test_ics_qemu_hw_power_supply_h(self): test_file = self.get_test_loc('ics/qemu-hw/power_supply.h') expected = [ u'Copyright (c) 2007 Anton Vorontsov <cbou@mail.ru>', u'Copyright (c) 2004 Szabolcs Gyurko', u'Copyright (c) 2003 Ian Molton <spyro@f2s.com>', ] check_detection(expected, test_file) def test_ics_qemu_hw_pxa_h(self): test_file = self.get_test_loc('ics/qemu-hw/pxa.h') expected = [ u'Copyright (c) 2006 Openedhand Ltd.', ] check_detection(expected, test_file) def test_ics_qemu_hw_qdev_c(self): test_file = self.get_test_loc('ics/qemu-hw/qdev.c') expected = [ u'Copyright (c) 2009 CodeSourcery', ] check_detection(expected, test_file) def test_ics_qemu_hw_sd_h(self): test_file = self.get_test_loc('ics/qemu-hw/sd.h') expected = [ u'Copyright (c) 2005-2007 Pierre Ossman', ] check_detection(expected, test_file) def test_ics_qemu_hw_smbios_c(self): test_file = self.get_test_loc('ics/qemu-hw/smbios.c') expected = [ u'Copyright (c) 2009 Hewlett-Packard Development Company', ] check_detection(expected, test_file) def test_ics_qemu_hw_smc91c111_c(self): test_file = self.get_test_loc('ics/qemu-hw/smc91c111.c') expected = [ u'Copyright (c) 2005 CodeSourcery, LLC.', ] check_detection(expected, test_file) def test_ics_qemu_hw_usb_hid_c(self): test_file = self.get_test_loc('ics/qemu-hw/usb-hid.c') expected = [ u'Copyright (c) 2005 Fabrice Bellard', u'Copyright (c) 2007 OpenMoko, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_hw_usb_hub_c(self): test_file = self.get_test_loc('ics/qemu-hw/usb-hub.c') expected = [ u'Copyright (c) 2005 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_hw_usb_ohci_c(self): test_file = self.get_test_loc('ics/qemu-hw/usb-ohci.c') expected = [ u'Copyright (c) 2004 Gianni Tedesco', u'Copyright (c) 2006 CodeSourcery', u'Copyright (c) 2006 Openedhand Ltd.', ] check_detection(expected, test_file) def test_ics_qemu_pc_bios_bochs_bochs_h(self): test_file = self.get_test_loc('ics/qemu-pc-bios-bochs/bochs.h') expected = [ u'Copyright (c) 2002 MandrakeSoft S.A.', ] check_detection(expected, test_file) def test_ics_qemu_pc_bios_bochs_config_h_in(self): test_file = self.get_test_loc('ics/qemu-pc-bios-bochs/config.h.in') expected = [ u'Copyright (c) 2001 MandrakeSoft S.A.', ] check_detection(expected, test_file) def test_ics_qemu_pc_bios_bochs_configure(self): test_file = self.get_test_loc('ics/qemu-pc-bios-bochs/configure') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.', u'Copyright (c) 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_qemu_pc_bios_bochs_makefile_in(self): test_file = self.get_test_loc('ics/qemu-pc-bios-bochs/Makefile.in') expected = [ u'Copyright (c) 2002 MandrakeSoft S.A.', ] check_detection(expected, test_file) def test_ics_qemu_pc_bios_bochs_bios_acpi_dsdt_dsl(self): test_file = self.get_test_loc('ics/qemu-pc-bios-bochs-bios/acpi-dsdt.dsl') expected = [ u'Copyright (c) 2006 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_pc_bios_bochs_bios_acpi_dsdt_hex_extra_support(self): test_file = self.get_test_loc('ics/qemu-pc-bios-bochs-bios/acpi-dsdt.hex') expected = [ u'Copyright (c) 2000 - 2006 Intel Corporation', ] check_detection(expected, test_file) def test_ics_qemu_pc_bios_bochs_bios_rombios_c(self): test_file = self.get_test_loc('ics/qemu-pc-bios-bochs-bios/rombios.c') expected = [ u'Copyright (c) 2002 MandrakeSoft S.A.', u'(c) 2002 MandrakeSoft S.A.', u'(c) by Joseph Gil', ] check_detection(expected, test_file) def test_ics_qemu_pc_bios_bochs_bios_rombios_h(self): test_file = self.get_test_loc('ics/qemu-pc-bios-bochs-bios/rombios.h') expected = [ u'Copyright (c) 2006 Volker Ruppert', ] check_detection(expected, test_file) def test_ics_qemu_pc_bios_vgabios_clext_c(self): test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/clext.c') expected = [ u'Copyright (c) 2004 Makoto Suzuki', ] check_detection(expected, test_file) def test_ics_qemu_pc_bios_vgabios_readme(self): test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/README') expected = [ u'(c) by Joseph Gil', ] check_detection(expected, test_file) def test_ics_qemu_pc_bios_vgabios_vbe_c_extra_byte(self): test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/vbe.c') expected = [ u'Copyright (c) 2002 Jeroen Janssen', u'(c) 2003 http://savannah.nongnu.org/projects/vgabios/', ] check_detection(expected, test_file) def test_ics_qemu_pc_bios_vgabios_vgabios_c(self): test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/vgabios.c') expected = [ u'Copyright (c) 2001-2008 the LGPL VGABios developers Team', u'(c) by Joseph Gil', u'(c) 2008 the LGPL VGABios developers Team', ] check_detection(expected, test_file) def test_ics_qemu_pc_bios_vgabios_vgafonts_h(self): test_file = self.get_test_loc('ics/qemu-pc-bios-vgabios/vgafonts.h') expected = [ u'(c) by Joseph Gil', ] check_detection(expected, test_file) def test_ics_qemu_slirp_cksum_c(self): test_file = self.get_test_loc('ics/qemu-slirp/cksum.c') expected = [ u'Copyright (c) 1988, 1992, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_qemu_slirp_copyright(self): test_file = self.get_test_loc('ics/qemu-slirp/COPYRIGHT') expected = [ u'Danny Gasparovski. Copyright (c), 1995,1996', u'Copyright (c) 1995,1996 Danny Gasparovski.' ] check_detection(expected, test_file) def test_ics_qemu_slirp_debug_c(self): test_file = self.get_test_loc('ics/qemu-slirp/debug.c') expected = [ u'Copyright (c) 1995 Danny Gasparovski.', u'Portions copyright (c) 2000 Kelly Price.', ] check_detection(expected, test_file) def test_ics_qemu_slirp_debug_h(self): test_file = self.get_test_loc('ics/qemu-slirp/debug.h') expected = [ u'Copyright (c) 1995 Danny Gasparovski.', ] check_detection(expected, test_file) def test_ics_qemu_slirp_ip_icmp_c(self): test_file = self.get_test_loc('ics/qemu-slirp/ip_icmp.c') expected = [ u'Copyright (c) 1982, 1986, 1988, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_qemu_slirp_ip_input_c(self): test_file = self.get_test_loc('ics/qemu-slirp/ip_input.c') expected = [ u'Copyright (c) 1982, 1986, 1988, 1993 The Regents of the University of California.', u'Copyright (c) 1995 Danny Gasparovski.', ] check_detection(expected, test_file) def test_ics_qemu_slirp_ip_output_c(self): test_file = self.get_test_loc('ics/qemu-slirp/ip_output.c') expected = [ u'Copyright (c) 1982, 1986, 1988, 1990, 1993 The Regents of the University of California.', u'Copyright (c) 1995 Danny Gasparovski.', ] check_detection(expected, test_file) def test_ics_qemu_slirp_mbuf_c(self): test_file = self.get_test_loc('ics/qemu-slirp/mbuf.c') expected = [ u'Copyright (c) 1995 Danny Gasparovski', ] check_detection(expected, test_file) def test_ics_qemu_slirp_misc_c(self): test_file = self.get_test_loc('ics/qemu-slirp/misc.c') expected = [ u'Copyright (c) 1995 Danny Gasparovski.', ] check_detection(expected, test_file) def test_ics_qemu_slirp_tcp_input_c(self): test_file = self.get_test_loc('ics/qemu-slirp/tcp_input.c') expected = [ u'Copyright (c) 1982, 1986, 1988, 1990, 1993, 1994 The Regents of the University of California.', u'Copyright (c) 1995 Danny Gasparovski.', ] check_detection(expected, test_file) def test_ics_qemu_slirp_tcp_timer_c(self): test_file = self.get_test_loc('ics/qemu-slirp/tcp_timer.c') expected = [ u'Copyright (c) 1982, 1986, 1988, 1990, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_qemu_slirp_tcp_var_h(self): test_file = self.get_test_loc('ics/qemu-slirp/tcp_var.h') expected = [ u'Copyright (c) 1982, 1986, 1993, 1994 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_qemu_slirp_tftp_c(self): test_file = self.get_test_loc('ics/qemu-slirp/tftp.c') expected = [ u'Copyright (c) 2004 Magnus Damm <damm@opensource.se>', ] check_detection(expected, test_file) def test_ics_qemu_slirp_android_helper_h(self): test_file = self.get_test_loc('ics/qemu-slirp-android/helper.h') expected = [ u'Copyright (c) 2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_qemu_target_arm_iwmmxt_helper_c(self): test_file = self.get_test_loc('ics/qemu-target-arm/iwmmxt_helper.c') expected = [ u'Copyright (c) 2007 OpenedHand, Ltd.', u'Copyright (c) 2008 CodeSourcery', ] check_detection(expected, test_file) def test_ics_qemu_target_arm_neon_helper_c(self): test_file = self.get_test_loc('ics/qemu-target-arm/neon_helper.c') expected = [ u'Copyright (c) 2007, 2008 CodeSourcery.', ] check_detection(expected, test_file) def test_ics_qemu_target_arm_op_helper_c(self): test_file = self.get_test_loc('ics/qemu-target-arm/op_helper.c') expected = [ u'Copyright (c) 2005-2007 CodeSourcery, LLC', ] check_detection(expected, test_file) def test_ics_qemu_target_arm_translate_c(self): test_file = self.get_test_loc('ics/qemu-target-arm/translate.c') expected = [ u'Copyright (c) 2003 Fabrice Bellard', u'Copyright (c) 2005-2007 CodeSourcery', u'Copyright (c) 2007 OpenedHand, Ltd.', ] check_detection(expected, test_file) def test_ics_qemu_target_i386_helper_template_h(self): test_file = self.get_test_loc('ics/qemu-target-i386/helper_template.h') expected = [ u'Copyright (c) 2008 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_target_i386_kvm_c(self): test_file = self.get_test_loc('ics/qemu-target-i386/kvm.c') expected = [ u'Copyright (c) 2006-2008 Qumranet Technologies', u'Copyright IBM, Corp. 2008', ] check_detection(expected, test_file) def test_ics_qemu_target_i386_ops_sse_h(self): test_file = self.get_test_loc('ics/qemu-target-i386/ops_sse.h') expected = [ u'Copyright (c) 2005 Fabrice Bellard', u'Copyright (c) 2008 Intel Corporation', ] check_detection(expected, test_file) def test_ics_qemu_target_i386_ops_sse_header_h(self): test_file = self.get_test_loc('ics/qemu-target-i386/ops_sse_header.h') expected = [ u'Copyright (c) 2005 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_tcg_tcg_c(self): test_file = self.get_test_loc('ics/qemu-tcg/tcg.c') expected = [ u'Copyright (c) 2008 Fabrice Bellard', ] check_detection(expected, test_file) def test_ics_qemu_tcg_arm_tcg_target_c(self): test_file = self.get_test_loc('ics/qemu-tcg-arm/tcg-target.c') expected = [ u'Copyright (c) 2008 Andrzej Zaborowski', ] check_detection(expected, test_file) def test_ics_qemu_tcg_arm_tcg_target_h(self): test_file = self.get_test_loc('ics/qemu-tcg-arm/tcg-target.h') expected = [ u'Copyright (c) 2008 Fabrice Bellard', u'Copyright (c) 2008 Andrzej Zaborowski', ] check_detection(expected, test_file) def test_ics_quake_androidmanifest_xml(self): test_file = self.get_test_loc('ics/quake/AndroidManifest.xml') expected = [ u'Copyright 2007, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_quake_notice(self): test_file = self.get_test_loc('ics/quake/NOTICE') expected = [ u'Copyright (c) 1996-2000 Id Software Inc.', u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_quake_quake_src_gnu_txt(self): test_file = self.get_test_loc('ics/quake-quake-src/gnu.txt') expected = [ u'Copyright (c) 1989, 1991 Free Software Foundation, Inc.', u'copyrighted by the Free Software Foundation', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_glqwcl_spec_sh(self): test_file = self.get_test_loc('ics/quake-quake-src-QW/glqwcl.spec.sh') expected = [ u'Copyright Restricted Icon', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_client_adivtab_h(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-client/adivtab.h') expected = [ u'Copyright (c) 1999, 2000 Id Software Inc.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_client_anorms_h(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-client/anorms.h') expected = [ u'Copyright (c) 1996-1997 Id Software, Inc.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_client_cd_linux_c(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-client/cd_linux.c') expected = [ u'Copyright (c) 1996-1997 Id Software, Inc.', u'(c) 1996 Id Software, Inc.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_client_cl_demo_c(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-client/cl_demo.c') expected = [ u'Copyright (c) 1996-1997 Id Software, Inc.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_client_exitscrn_txt(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-client/exitscrn.txt') expected = [ u'(c) 1996, 1997 Id Software, inc.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_client_keys_h(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-client/keys.h') expected = [ u'Copyright (c) 1996-1997 Id Software, Inc.', u'(c) Mouse Wheel Support', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_client_md4_c(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-client/md4.c') expected = [ u'Copyright (c) 1996-1997 Id Software, Inc.', u'Copyright (c) 1991-2, RSA Data Security, Inc.', u'Copyright (c) 1990-2, RSA Data Security, Inc.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_client_menu_c(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-client/menu.c') expected = [ u'Copyright (c) 1996-1997 Id Software, Inc.', u'(c) 1996 Id', ] check_detection(expected, test_file) @expectedFailure def test_ics_quake_quake_src_qw_client_menu_c_trail_name(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-client/menu.c') expected = [ u'Copyright (c) 1996-1997 Id Software, Inc.', u'(c) 1996 Id Software', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_client_qwcl_plg(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-client/qwcl.plg') expected = [ u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_d3d_h(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/d3d.h') expected = [ u'Copyright (c) 1995-1996 Microsoft Corporation.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_ddraw_h(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/ddraw.h') expected = [ u'Copyright (c) 1994-1996 Microsoft Corporation.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_dinput_h(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/dinput.h') expected = [ u'Copyright (c) 1996 Microsoft Corporation.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_dplay_h(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/dplay.h') expected = [ u'Copyright (c) 1994-1995 Microsoft Corporation.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_dxsdk_sdk_inc_dsound_h(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-dxsdk-sdk-inc/dsound.h') expected = [ u'Copyright (c) 1995,1996 Microsoft Corporation.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_scitech_include_debug_h(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-scitech-include/debug.h') expected = [ u'Copyright (c) 1996 SciTech Software', ] check_detection(expected, test_file) def test_ics_quake_quake_src_qw_scitech_include_mgldos_h(self): test_file = self.get_test_loc('ics/quake-quake-src-QW-scitech-include/mgldos.h') expected = [ u'Copyright (c) 1996 SciTech Software.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_winquake_3dfx_txt_trail_name(self): test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/3dfx.txt') expected = [ u'Copyright 1997 3Dfx Interactive, Inc.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_winquake_cl_input_cpp(self): test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/cl_input.cpp') expected = [ u'Copyright (c) 1996-1997 Id Software, Inc.', u'(c) 1996 Id Software, Inc.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_winquake_conproc_cpp(self): test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/conproc.cpp') expected = [ u'Copyright (c) 1996-1997 Id Software, Inc.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_winquake_menu_cpp(self): test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/menu.cpp') expected = [ u'Copyright (c) 1996-1997 Id Software, Inc.', u'(c) 1996 Id Software, inc.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_winquake_mpdosock_h(self): test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/mpdosock.h') expected = [ u'Copyright (c) 1993-1995, Microsoft Corp.', u'Copyright (c) 1982-1986 Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_winquake_sys_linux_cpp(self): test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/sys_linux.cpp') expected = [ u'(c) 1996 Id Software, inc.', u'(c) 1996 Id Software, inc.', ] check_detection(expected, test_file) def test_ics_quake_quake_src_winquake_winquake_plg(self): test_file = self.get_test_loc('ics/quake-quake-src-WinQuake/WinQuake.plg') expected = [ u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', u'Copyright (c) Microsoft Corp 1984-1998.', u'Copyright (c) Microsoft Corp 1981-1993.', ] check_detection(expected, test_file) def test_ics_quake_src_com_android_quake_quakeactivity_java(self): test_file = self.get_test_loc('ics/quake-src-com-android-quake/QuakeActivity.java') expected = [ u'Copyright (c) 2007 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_quake_src_com_android_quake_quakelib_java(self): test_file = self.get_test_loc('ics/quake-src-com-android-quake/QuakeLib.java') expected = [ u'Copyright (c) 2007 The Android Open Source Project', u'(c) Mouse Wheel Support', ] check_detection(expected, test_file) def test_ics_quake_src_com_android_quake_quakeview_java(self): test_file = self.get_test_loc('ics/quake-src-com-android-quake/QuakeView.java') expected = [ u'Copyright (c) 2007 The Android Open Source Project', u'Copyright (c) 2008 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_safe_iop_notice(self): test_file = self.get_test_loc('ics/safe-iop/NOTICE') expected = [ u'Copyright (c) 2007,2008 Will Drewry <redpig@dataspill.org>', ] check_detection(expected, test_file) def test_ics_safe_iop_include_safe_iop_h_lead_portion(self): test_file = self.get_test_loc('ics/safe-iop-include/safe_iop.h') expected = [ u'Copyright 2007,2008 redpig@dataspill.org', u'portions copyright The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_safe_iop_src_safe_iop_c_lead_portion(self): test_file = self.get_test_loc('ics/safe-iop-src/safe_iop.c') expected = [ u'Copyright 2007,2008 redpig@dataspill.org', u'portions copyright The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_skia_android_sample_sampleapp_androidmanifest_xml(self): test_file = self.get_test_loc('ics/skia-android_sample-SampleApp/AndroidManifest.xml') expected = [ u'Copyright (c) 2011 Skia', ] check_detection(expected, test_file) def test_ics_skia_android_sample_sampleapp_jni_sample_jni_cpp(self): test_file = self.get_test_loc('ics/skia-android_sample-SampleApp-jni/sample-jni.cpp') expected = [ u'Copyright (c) 2011 Skia', ] check_detection(expected, test_file) def test_ics_skia_emoji_emojifont_cpp(self): test_file = self.get_test_loc('ics/skia-emoji/EmojiFont.cpp') expected = [ u'Copyright 2009, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_skia_gm_strokerects_cpp(self): test_file = self.get_test_loc('ics/skia-gm/strokerects.cpp') expected = [ u'Copyright 2011 Google Inc.', ] check_detection(expected, test_file) def test_ics_skia_gpu_src_grgpu_cpp(self): test_file = self.get_test_loc('ics/skia-gpu-src/GrGpu.cpp') expected = [ u'Copyright 2010 Google Inc.', ] check_detection(expected, test_file) def test_ics_skia_include_core_skbitmap_h(self): test_file = self.get_test_loc('ics/skia-include-core/SkBitmap.h') expected = [ u'Copyright (c) 2006 The Android Open Source Project', u'SkColorGetR (c), SkColorGetG', ] check_detection(expected, test_file) def test_ics_skia_include_core_skcolorpriv_h(self): test_file = self.get_test_loc('ics/skia-include-core/SkColorPriv.h') expected = [ u'Copyright (c) 2006 The Android Open Source Project', u'SkGetPackedG32 (c), SkGetPackedB32', u'SkGetPackedG32 (c), SkGetPackedB32', ] check_detection(expected, test_file) def test_ics_skia_include_core_skregion_h(self): test_file = self.get_test_loc('ics/skia-include-core/SkRegion.h') expected = [ u'Copyright (c) 2005 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_skia_include_core_skscalar_h(self): test_file = self.get_test_loc('ics/skia-include-core/SkScalar.h') expected = [ u'Copyright (c) 2006 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_skia_include_core_sktregistry_h(self): test_file = self.get_test_loc('ics/skia-include-core/SkTRegistry.h') expected = [ u'Copyright 2009, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_skia_include_ports_skharfbuzzfont_h(self): test_file = self.get_test_loc('ics/skia-include-ports/SkHarfBuzzFont.h') expected = [ u'Copyright (c) 2009, Google Inc.', ] check_detection(expected, test_file) def test_ics_skia_include_views_skoswindow_wxwidgets_h(self): test_file = self.get_test_loc('ics/skia-include-views/SkOSWindow_wxwidgets.h') expected = [ u'Copyright (c) 2006 The Android Open Source Project', u'Copyright 2005 MyCompanyName', ] check_detection(expected, test_file) def test_ics_skia_src_animator_skoperanditerpolator_cpp(self): test_file = self.get_test_loc('ics/skia-src-animator/SkOperandIterpolator.cpp') expected = [ u'Copyright 2006, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_skia_src_core_skbitmap_cpp(self): test_file = self.get_test_loc('ics/skia-src-core/SkBitmap.cpp') expected = [ u'Copyright (c) 2006-2008 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_skia_src_core_skbitmapprocstate_matrixprocs_cpp(self): test_file = self.get_test_loc('ics/skia-src-core/SkBitmapProcState_matrixProcs.cpp') expected = [ u'(c) COPYRIGHT 2009 Motorola', ] check_detection(expected, test_file) def test_ics_skia_src_core_skblitter_4444_cpp(self): test_file = self.get_test_loc('ics/skia-src-core/SkBlitter_4444.cpp') expected = [ u'Copyright 2006, The Android Open Source Project', u'SkColorGetG (c), SkColorGetB', ] check_detection(expected, test_file) def test_ics_skia_src_core_skcolortable_cpp(self): test_file = self.get_test_loc('ics/skia-src-core/SkColorTable.cpp') expected = [ u'Copyright (c) 2006-2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_skia_src_core_skfilterproc_h(self): test_file = self.get_test_loc('ics/skia-src-core/SkFilterProc.h') expected = [ u'Copyright (c) 2006-2008 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_skia_src_images_skimagedecoder_libjpeg_cpp(self): test_file = self.get_test_loc('ics/skia-src-images/SkImageDecoder_libjpeg.cpp') expected = [ u'Copyright 2007, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_skia_src_opts_opts_check_arm_cpp(self): test_file = self.get_test_loc('ics/skia-src-opts/opts_check_arm.cpp') expected = [ u'Copyright (c) 2010, Code Aurora Forum.', u'Copyright 2006-2010, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_skia_src_pdf_skpdffont_cpp(self): test_file = self.get_test_loc('ics/skia-src-pdf/SkPDFFont.cpp') expected = [ u'Copyright (c) 2011 Google Inc.', ] check_detection(expected, test_file) def test_ics_skia_src_ports_skdebug_brew_cpp(self): test_file = self.get_test_loc('ics/skia-src-ports/SkDebug_brew.cpp') expected = [ u'Copyright 2009, The Android Open Source Project', u'Copyright 2009, Company 100, Inc.', ] check_detection(expected, test_file) def test_ics_skia_src_ports_skfonthost_fontconfig_cpp(self): test_file = self.get_test_loc('ics/skia-src-ports/SkFontHost_fontconfig.cpp') expected = [ u'Copyright 2008, Google Inc.', ] check_detection(expected, test_file) def test_ics_skia_src_ports_skfonthost_none_cpp(self): test_file = self.get_test_loc('ics/skia-src-ports/SkFontHost_none.cpp') expected = [ u'Copyright 2006-2008, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_skia_src_ports_skosfile_brew_cpp(self): test_file = self.get_test_loc('ics/skia-src-ports/SkOSFile_brew.cpp') expected = [ u'Copyright 2006, The Android Open Source Project', u'Copyright 2009, Company 100, Inc.', ] check_detection(expected, test_file) def test_ics_skia_src_ports_skxmlparser_empty_cpp(self): test_file = self.get_test_loc('ics/skia-src-ports/SkXMLParser_empty.cpp') expected = [ u'Copyright 2006, The Android Open Source Project', u'Copyright Skia Inc. 2004 - 2005', ] check_detection(expected, test_file) def test_ics_skia_tests_fillpathtest_cpp(self): test_file = self.get_test_loc('ics/skia-tests/FillPathTest.cpp') expected = [ u'Copyright (c) 2010 The Chromium Authors.', ] check_detection(expected, test_file) def test_ics_sonivox_notice(self): test_file = self.get_test_loc('ics/sonivox/NOTICE') expected = [ u'Copyright (c) 2004-2006 Sonic Network Inc.', ] check_detection(expected, test_file) def test_ics_sonivox_arm_fm_22k_host_src_eas_h(self): test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas.h') expected = [ u'Copyright Sonic Network Inc. 2005, 2006', ] check_detection(expected, test_file) def test_ics_sonivox_arm_fm_22k_host_src_eas_build_h(self): test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_build.h') expected = [ u'Copyright Sonic Network Inc. 2006', ] check_detection(expected, test_file) def test_ics_sonivox_arm_fm_22k_host_src_eas_config_c(self): test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_config.c') expected = [ u'Copyright Sonic Network Inc. 2004-2006', ] check_detection(expected, test_file) def test_ics_sonivox_arm_fm_22k_host_src_eas_config_h(self): test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_config.h') expected = [ u'Copyright 2005 Sonic Network Inc.', ] check_detection(expected, test_file) def test_ics_sonivox_arm_fm_22k_host_src_eas_main_c(self): test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_main.c') expected = [ u'Copyright Sonic Network Inc. 2004', ] check_detection(expected, test_file) def test_ics_sonivox_arm_fm_22k_host_src_eas_types_h(self): test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_types.h') expected = [ u'Copyright Sonic Network Inc. 2004', ] check_detection(expected, test_file) def test_ics_sonivox_arm_fm_22k_host_src_eas_wave_c(self): test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-host_src/eas_wave.c') expected = [ u'Copyright Sonic Network Inc. 2005', ] check_detection(expected, test_file) def test_ics_sonivox_arm_fm_22k_lib_src_eas_ctype_h(self): test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_ctype.h') expected = [ u'Copyright (c) 2005 Sonic Network Inc.', ] check_detection(expected, test_file) def test_ics_sonivox_arm_fm_22k_lib_src_eas_data_h(self): test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_data.h') expected = [ u'Copyright 2004 Sonic Network Inc.', ] check_detection(expected, test_file) def test_ics_sonivox_arm_fm_22k_lib_src_eas_fmengine_c(self): test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_fmengine.c') expected = [ u'Copyright Sonic Network Inc. 2004, 2005', ] check_detection(expected, test_file) def test_ics_sonivox_arm_fm_22k_lib_src_eas_fmsndlib_c(self): test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_fmsndlib.c') expected = [ u'(c) Copyright 2005 Sonic Network, Inc.', ] check_detection(expected, test_file) def test_ics_sonivox_arm_fm_22k_lib_src_eas_smfdata_h(self): test_file = self.get_test_loc('ics/sonivox-arm-fm-22k-lib_src/eas_smfdata.h') expected = [ u'Copyright Sonic Network Inc. 2005', ] check_detection(expected, test_file) def test_ics_sonivox_arm_hybrid_22k_lib_src_eas_wtengine_c(self): test_file = self.get_test_loc('ics/sonivox-arm-hybrid-22k-lib_src/eas_wtengine.c') expected = [ u'Copyright Sonic Network Inc. 2004-2005', ] check_detection(expected, test_file) def test_ics_sonivox_arm_hybrid_22k_lib_src_hybrid_22khz_mcu_c(self): test_file = self.get_test_loc('ics/sonivox-arm-hybrid-22k-lib_src/hybrid_22khz_mcu.c') expected = [ u'Copyright (c) 2006 Sonic Network Inc.', ] check_detection(expected, test_file) def test_ics_sonivox_arm_wt_22k_lib_src_dls_h(self): test_file = self.get_test_loc('ics/sonivox-arm-wt-22k-lib_src/dls.h') expected = [ u'Copyright (c) 1996 Sonic Foundry', ] check_detection(expected, test_file) def test_ics_sonivox_arm_wt_22k_lib_src_jet_data_h(self): test_file = self.get_test_loc('ics/sonivox-arm-wt-22k-lib_src/jet_data.h') expected = [ u'Copyright (c) 2006 Sonic Network Inc.', ] check_detection(expected, test_file) def test_ics_sonivox_arm_wt_22k_lib_src_wt_22khz_c(self): test_file = self.get_test_loc('ics/sonivox-arm-wt-22k-lib_src/wt_22khz.c') expected = [ u'Copyright (c) 2009 Sonic Network Inc.', ] check_detection(expected, test_file) def test_ics_sonivox_docs_jet_authoring_guidelines_html(self): test_file = self.get_test_loc('ics/sonivox-docs/JET_Authoring_Guidelines.html') expected = [ u'Copyright 2009 techdoc.dot Jennifer Hruska', u'Copyright (c) 2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_sonivox_docs_jet_creator_user_manual_html(self): test_file = self.get_test_loc('ics/sonivox-docs/JET_Creator_User_Manual.html') expected = [ u'Copyright 2009 Confidential Information', u'Copyright (c) 2009 The Android Open Source Project', ] check_detection(expected, test_file) @expectedFailure def test_ics_sonivox_docs_jet_creator_user_manual_html_markup_lead_name(self): test_file = self.get_test_loc('ics/sonivox-docs/JET_Creator_User_Manual.html') expected = [ u'Jennifer Hruska Copyright 2009 Confidential Information', u'Copyright (c) 2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_sonivox_docs_jet_programming_manual_html(self): test_file = self.get_test_loc('ics/sonivox-docs/JET_Programming_Manual.html') expected = [ u'Copyright (c) 2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_sonivox_jet_tools_jetcreator_jetaudition_py(self): test_file = self.get_test_loc('ics/sonivox-jet_tools-JetCreator/JetAudition.py') expected = [ u'Copyright (c) 2008 Android Open Source Project', ] check_detection(expected, test_file) def test_ics_speex_notice(self): test_file = self.get_test_loc('ics/speex/NOTICE') expected = [ u'Copyright (c) 2002-2008 Jean-Marc Valin', u'Copyright (c) 2002 Jean-Marc Valin & David Rowe', u'Copyright (c) 2003 Epic Games', u'Copyright (c) 2003 Epic Games', u'Copyright (c) 2004-2006 Epic Games', u'Copyright (c) 2005 Analog Devices', u'Copyright (c) 2005 Jean-Marc Valin, CSIRO, Christopher Montgomery', u'Copyright (c) 2006 David Rowe', u'Copyright (c) 2006-2008 CSIRO, Jean-Marc Valin, Xiph.Org Foundation', u'Copyright (c) 2008 Thorvald Natvig', u'Copyright (c) 2003-2004, Mark Borgerding', u'Copyright (c) 2005-2007, Jean-Marc Valin', u'Copyright (c) 2011 Jyri Sarha, Texas Instruments', u'Copyright 1992, 1993, 1994 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin', ] check_detection(expected, test_file) def test_ics_speex_include_speex_speex_h(self): test_file = self.get_test_loc('ics/speex-include-speex/speex.h') expected = [ u'Copyright (c) 2002-2006 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_include_speex_speex_bits_h(self): test_file = self.get_test_loc('ics/speex-include-speex/speex_bits.h') expected = [ u'Copyright (c) 2002 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_include_speex_speex_buffer_h(self): test_file = self.get_test_loc('ics/speex-include-speex/speex_buffer.h') expected = [ u'Copyright (c) 2007 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_include_speex_speex_echo_h(self): test_file = self.get_test_loc('ics/speex-include-speex/speex_echo.h') expected = [ u'Copyright (c) Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_include_speex_speex_preprocess_h(self): test_file = self.get_test_loc('ics/speex-include-speex/speex_preprocess.h') expected = [ u'Copyright (c) 2003 Epic Games', ] check_detection(expected, test_file) def test_ics_speex_include_speex_speex_types_h(self): test_file = self.get_test_loc('ics/speex-include-speex/speex_types.h') expected = [ u'(c) COPYRIGHT 1994-2002 by the Xiph.Org Foundation', ] check_detection(expected, test_file) def test_ics_speex_libspeex_kiss_fft_guts_h(self): test_file = self.get_test_loc('ics/speex-libspeex/_kiss_fft_guts.h') expected = [ u'Copyright (c) 2003-2004, Mark Borgerding', ] check_detection(expected, test_file) def test_ics_speex_libspeex_arch_h(self): test_file = self.get_test_loc('ics/speex-libspeex/arch.h') expected = [ u'Copyright (c) 2003 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_libspeex_bits_c(self): test_file = self.get_test_loc('ics/speex-libspeex/bits.c') expected = [ u'Copyright (c) 2002 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_libspeex_cb_search_c(self): test_file = self.get_test_loc('ics/speex-libspeex/cb_search.c') expected = [ u'Copyright (c) 2002-2006 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_libspeex_cb_search_h(self): test_file = self.get_test_loc('ics/speex-libspeex/cb_search.h') expected = [ u'Copyright (c) 2002 Jean-Marc Valin & David Rowe', ] check_detection(expected, test_file) def test_ics_speex_libspeex_cb_search_arm4_h(self): test_file = self.get_test_loc('ics/speex-libspeex/cb_search_arm4.h') expected = [ u'Copyright (c) 2004 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_libspeex_cb_search_bfin_h(self): test_file = self.get_test_loc('ics/speex-libspeex/cb_search_bfin.h') expected = [ u'Copyright (c) 2005 Analog Devices', ] check_detection(expected, test_file) def test_ics_speex_libspeex_fftwrap_c(self): test_file = self.get_test_loc('ics/speex-libspeex/fftwrap.c') expected = [ u'Copyright (c) 2005-2006 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_libspeex_fftwrap_h(self): test_file = self.get_test_loc('ics/speex-libspeex/fftwrap.h') expected = [ u'Copyright (c) 2005 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_libspeex_filterbank_c(self): test_file = self.get_test_loc('ics/speex-libspeex/filterbank.c') expected = [ u'Copyright (c) 2006 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_libspeex_fixed_bfin_h(self): test_file = self.get_test_loc('ics/speex-libspeex/fixed_bfin.h') expected = [ u'Copyright (c) 2005 Analog Devices Author Jean-Marc Valin', ] check_detection(expected, test_file) @expectedFailure def test_ics_speex_libspeex_fixed_bfin_h_extra_author(self): test_file = self.get_test_loc('ics/speex-libspeex/fixed_bfin.h') expected = [ u'Copyright (c) 2005 Analog Devices', ] check_detection(expected, test_file) def test_ics_speex_libspeex_kiss_fft_c(self): test_file = self.get_test_loc('ics/speex-libspeex/kiss_fft.c') expected = [ u'Copyright (c) 2003-2004, Mark Borgerding', u'Copyright (c) 2005-2007, Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_libspeex_kiss_fftr_c(self): test_file = self.get_test_loc('ics/speex-libspeex/kiss_fftr.c') expected = [ u'Copyright (c) 2003-2004, Mark Borgerding', ] check_detection(expected, test_file) def test_ics_speex_libspeex_lpc_c(self): test_file = self.get_test_loc('ics/speex-libspeex/lpc.c') expected = [ u'Copyright 1992, 1993, 1994 by Jutta Degener and Carsten Bormann, Technische Universitaet Berlin', ] check_detection(expected, test_file) def test_ics_speex_libspeex_lsp_c(self): test_file = self.get_test_loc('ics/speex-libspeex/lsp.c') expected = [ u'Jean-Marc Valin (c) 2002-2006', ] check_detection(expected, test_file) def test_ics_speex_libspeex_lsp_bfin_h(self): test_file = self.get_test_loc('ics/speex-libspeex/lsp_bfin.h') expected = [ u'Copyright (c) 2006 David Rowe', ] check_detection(expected, test_file) def test_ics_speex_libspeex_mdf_c(self): test_file = self.get_test_loc('ics/speex-libspeex/mdf.c') expected = [ u'Copyright (c) 2003-2008 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_libspeex_modes_wb_c(self): test_file = self.get_test_loc('ics/speex-libspeex/modes_wb.c') expected = [ u'Copyright (c) 2002-2007 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_libspeex_preprocess_c(self): test_file = self.get_test_loc('ics/speex-libspeex/preprocess.c') expected = [ u'Copyright (c) 2003 Epic Games', u'Copyright (c) 2004-2006 Epic Games', ] check_detection(expected, test_file) def test_ics_speex_libspeex_pseudofloat_h(self): test_file = self.get_test_loc('ics/speex-libspeex/pseudofloat.h') expected = [ u'Copyright (c) 2005 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_speex_libspeex_resample_c(self): test_file = self.get_test_loc('ics/speex-libspeex/resample.c') expected = [ u'Copyright (c) 2007-2008 Jean-Marc Valin', u'Copyright (c) 2008 Thorvald Natvig', ] check_detection(expected, test_file) def test_ics_speex_libspeex_resample_neon_h(self): test_file = self.get_test_loc('ics/speex-libspeex/resample_neon.h') expected = [ u'Copyright (c) 2007-2008 Jean-Marc Valin', u'Copyright (c) 2008 Thorvald Natvig', u'Copyright (c) 2011 Jyri Sarha, Texas Instruments', ] check_detection(expected, test_file) def test_ics_speex_libspeex_resample_sse_h(self): test_file = self.get_test_loc('ics/speex-libspeex/resample_sse.h') expected = [ u'Copyright (c) 2007-2008 Jean-Marc Valin', u'Copyright (c) 2008 Thorvald Natvig', ] check_detection(expected, test_file) def test_ics_speex_libspeex_scal_c(self): test_file = self.get_test_loc('ics/speex-libspeex/scal.c') expected = [ u'Copyright (c) 2006-2008 CSIRO, Jean-Marc Valin, Xiph.Org Foundation', ] check_detection(expected, test_file) def test_ics_speex_libspeex_smallft_c(self): test_file = self.get_test_loc('ics/speex-libspeex/smallft.c') expected = [ u'(c) COPYRIGHT 1994-2001 by the XIPHOPHORUS Company', ] check_detection(expected, test_file) def test_ics_speex_libspeex_vorbis_psy_h(self): test_file = self.get_test_loc('ics/speex-libspeex/vorbis_psy.h') expected = [ u'Copyright (c) 2005 Jean-Marc Valin, CSIRO, Christopher Montgomery', ] check_detection(expected, test_file) def test_ics_speex_libspeex_window_c(self): test_file = self.get_test_loc('ics/speex-libspeex/window.c') expected = [ u'Copyright (c) 2006 Jean-Marc Valin', ] check_detection(expected, test_file) def test_ics_srec_notice(self): test_file = self.get_test_loc('ics/srec/NOTICE') expected = [ u'Copyright 2007, 2008 Nuance Communications', ] check_detection(expected, test_file) def test_ics_srec_audio_audioin_unix_include_audioin_h(self): test_file = self.get_test_loc('ics/srec-audio-AudioIn-UNIX-include/audioin.h') expected = [ u'Copyright 2007, 2008 Nuance Communciations, Inc.', ] check_detection(expected, test_file) def test_ics_srec_audio_audioin_unix_src_audioinwrapper_cpp(self): test_file = self.get_test_loc('ics/srec-audio-AudioIn-UNIX-src/audioinwrapper.cpp') expected = [ u'Copyright 2007, 2008 Nuance Communciations, Inc.', ] check_detection(expected, test_file) def test_ics_srec_audio_audioin_unix_src_filter_c(self): test_file = self.get_test_loc('ics/srec-audio-AudioIn-UNIX-src/filter.c') expected = [ u'Copyright 2007, 2008 Nuance Communciations, Inc.', ] check_detection(expected, test_file) def test_ics_srec_doc_srec_doxygen(self): test_file = self.get_test_loc('ics/srec-doc/srec.doxygen') expected = [ u'(c) Copyright 2003-2007 Nuance', ] check_detection(expected, test_file) def test_ics_srec_srec_srec_doxygen(self): test_file = self.get_test_loc('ics/srec-srec/srec.doxygen') expected = [ u'(c) Copyright 2003 Speechworks International', ] check_detection(expected, test_file) def test_ics_srec_srec_jni_android_speech_srec_microphoneinputstream_cpp(self): test_file = self.get_test_loc('ics/srec-srec_jni/android_speech_srec_MicrophoneInputStream.cpp') expected = [ u'Copyright 2007 Nuance Communciations, Inc.', ] check_detection(expected, test_file) def test_ics_srec_tools_grxmlcompile_grxmlcompile_cpp(self): test_file = self.get_test_loc('ics/srec-tools-grxmlcompile/grxmlcompile.cpp') expected = [ u'Copyright 2007, 2008 Nuance Communciations, Inc.', u'Copyright (c) 2007 Project Admins leethomason', ] check_detection(expected, test_file) def test_ics_srtp_config_guess(self): test_file = self.get_test_loc('ics/srtp/config.guess') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.', u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_srtp_config_log(self): test_file = self.get_test_loc('ics/srtp/config.log') expected = [ u'Copyright (c) 2007 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_srtp_license(self): test_file = self.get_test_loc('ics/srtp/LICENSE') expected = [ u'Copyright (c) 2001-2006 Cisco Systems, Inc.', ] check_detection(expected, test_file) def test_ics_srtp_crypto_cipher_aes_c(self): test_file = self.get_test_loc('ics/srtp-crypto-cipher/aes.c') expected = [ u'Copyright (c) 2001-2006, Cisco Systems, Inc.', ] check_detection(expected, test_file) def test_ics_srtp_crypto_hash_hmac_c(self): test_file = self.get_test_loc('ics/srtp-crypto-hash/hmac.c') expected = [ u'Copyright (c) 2001-2006 Cisco Systems, Inc.', ] check_detection(expected, test_file) def test_ics_srtp_crypto_include_auth_h(self): test_file = self.get_test_loc('ics/srtp-crypto-include/auth.h') expected = [ u'Copyright (c) 2001-2006, Cisco Systems, Inc.', ] check_detection(expected, test_file) def test_ics_srtp_crypto_include_kernel_compat_h(self): test_file = self.get_test_loc('ics/srtp-crypto-include/kernel_compat.h') expected = [ u'Copyright (c) 2005 Ingate Systems AB', ] check_detection(expected, test_file) def test_ics_srtp_doc_header_template(self): test_file = self.get_test_loc('ics/srtp-doc/header.template') expected = [ u'copyright 2001-2005 by David', ] check_detection(expected, test_file) def test_ics_srtp_doc_intro_txt(self): test_file = self.get_test_loc('ics/srtp-doc/intro.txt') expected = [ u'Copyright (c) 2001-2005 Cisco Systems, Inc.', ] check_detection(expected, test_file) def test_ics_srtp_doc_rfc3711_txt(self): test_file = self.get_test_loc('ics/srtp-doc/rfc3711.txt') expected = [ u'Copyright (c) The Internet Society (2004).', u'Full Copyright Statement', u'Full Copyright Statement', u'Copyright (c) The Internet Society (2004).', ] check_detection(expected, test_file) def test_ics_srtp_include_ekt_h(self): test_file = self.get_test_loc('ics/srtp-include/ekt.h') expected = [ u'Copyright (c) 2001-2005 Cisco Systems, Inc.', ] check_detection(expected, test_file) def test_ics_stlport_configure_bat(self): test_file = self.get_test_loc('ics/stlport/configure.bat') expected = [ u'Copyright (c) 2004,2005 Michael Fink', ] check_detection(expected, test_file) def test_ics_stlport_license(self): test_file = self.get_test_loc('ics/stlport/LICENSE') expected = [ u'Copyright 1999,2000 Boris Fomitchev', u'Copyright 1994 Hewlett-Packard Company', u'Copyright 1996,97 Silicon Graphics Computer Systems, Inc.', u'Copyright 1997 Moscow Center for SPARC Technology.', ] check_detection(expected, test_file) def test_ics_stlport_makefile(self): test_file = self.get_test_loc('ics/stlport/Makefile') expected = [ u'Copyright (c) 2004-2008 Petr Ovtchenkov', ] check_detection(expected, test_file) def test_ics_stlport_readme(self): test_file = self.get_test_loc('ics/stlport/README') expected = [ u'Copyright (c) 1994 Hewlett-Packard Company', u'Copyright (c) 1996-1999 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1997 Moscow Center for SPARC Technology', u'Copyright (c) 1999-2003 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_src_acquire_release_h(self): test_file = self.get_test_loc('ics/stlport-src/acquire_release.h') expected = [ u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_src_allocators_cpp(self): test_file = self.get_test_loc('ics/stlport-src/allocators.cpp') expected = [ u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1997 Moscow Center for SPARC Technology', u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_src_bitset_cpp(self): test_file = self.get_test_loc('ics/stlport-src/bitset.cpp') expected = [ u'Copyright (c) 1998 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_src_ctype_cpp(self): test_file = self.get_test_loc('ics/stlport-src/ctype.cpp') expected = [ u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_src_dll_main_cpp(self): test_file = self.get_test_loc('ics/stlport-src/dll_main.cpp') expected = [ u'Copyright (c) 1994 Hewlett-Packard Company', u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1997 Moscow Center for SPARC Technology', u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_src_lock_free_slist_h(self): test_file = self.get_test_loc('ics/stlport-src/lock_free_slist.h') expected = [ u'Copyright (c) 1997-1999 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_src_stlport_rc(self): test_file = self.get_test_loc('ics/stlport-src/stlport.rc') expected = [ u'Copyright (c) Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_src_c_locale_dummy_c_locale_dummy_c(self): test_file = self.get_test_loc('ics/stlport-src-c_locale_dummy/c_locale_dummy.c') expected = [ u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_src_c_locale_win32_c_wlocale_win32_c(self): test_file = self.get_test_loc('ics/stlport-src-c_locale_win32/c_wlocale_win32.c') expected = [ u'Copyright (c) 2007 2008 Francois Dumont', ] check_detection(expected, test_file) def test_ics_stlport_stlport_assert_h(self): test_file = self.get_test_loc('ics/stlport-stlport/assert.h') expected = [ u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_stlport_exception(self): test_file = self.get_test_loc('ics/stlport-stlport/exception') expected = [ u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_stlport_limits(self): test_file = self.get_test_loc('ics/stlport-stlport/limits') expected = [ u'Copyright (c) 1997 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_stlport_locale(self): test_file = self.get_test_loc('ics/stlport-stlport/locale') expected = [ u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_stlport_numeric(self): test_file = self.get_test_loc('ics/stlport-stlport/numeric') expected = [ u'Copyright (c) 1994 Hewlett-Packard Company', u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_stlport_rope(self): test_file = self.get_test_loc('ics/stlport-stlport/rope') expected = [ u'Copyright (c) 1997 Silicon Graphics Computer Systems, Inc.', ] check_detection(expected, test_file) def test_ics_stlport_stlport_type_traits(self): test_file = self.get_test_loc('ics/stlport-stlport/type_traits') expected = [ u'Copyright (c) 2007, 2008 Petr Ovtchenkov', ] check_detection(expected, test_file) def test_ics_stlport_stlport_unordered_map(self): test_file = self.get_test_loc('ics/stlport-stlport/unordered_map') expected = [ u'Copyright (c) 2004,2005 Francois Dumont', ] check_detection(expected, test_file) def test_ics_stlport_stlport_stl_carray_h(self): test_file = self.get_test_loc('ics/stlport-stlport-stl/_carray.h') expected = [ u'Copyright (c) 2005 Francois Dumont', ] check_detection(expected, test_file) def test_ics_stlport_stlport_stl_function_h(self): test_file = self.get_test_loc('ics/stlport-stlport-stl/_function.h') expected = [ u'Copyright (c) 1994 Hewlett-Packard Company', u'Copyright (c) 1996-1998 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1997 Moscow Center for SPARC Technology', u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_stlport_stl_function_adaptors_h(self): test_file = self.get_test_loc('ics/stlport-stlport-stl/_function_adaptors.h') expected = [ u'Copyright (c) 1994 Hewlett-Packard Company', u'Copyright (c) 1996-1998 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1997 Moscow Center for SPARC Technology', u'Copyright (c) 1999 Boris Fomitchev', u'Copyright (c) 2000 Pavel Kuznetsov', u"Copyright (c) 2001 Meridian'93", ] check_detection(expected, test_file) def test_ics_stlport_stlport_stl_hash_fun_h(self): test_file = self.get_test_loc('ics/stlport-stlport-stl/_hash_fun.h') expected = [ u'Copyright (c) 1996-1998 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1994 Hewlett-Packard Company', ] check_detection(expected, test_file) def test_ics_stlport_stlport_stl_heap_h(self): test_file = self.get_test_loc('ics/stlport-stlport-stl/_heap.h') expected = [ u'Copyright (c) 1994 Hewlett-Packard Company', u'Copyright (c) 1997 Silicon Graphics Computer Systems, Inc.', ] check_detection(expected, test_file) def test_ics_stlport_stlport_stl_limits_c(self): test_file = self.get_test_loc('ics/stlport-stlport-stl/_limits.c') expected = [ u'Copyright (c) 1998,1999 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1999 Boris Fomitchev', ] check_detection(expected, test_file) def test_ics_stlport_stlport_stl_string_base_h(self): test_file = self.get_test_loc('ics/stlport-stlport-stl/_string_base.h') expected = [ u'Copyright (c) 1997-1999 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1999 Boris Fomitchev', u'Copyright (c) 2003 Francois Dumont', ] check_detection(expected, test_file) def test_ics_stlport_stlport_stl_boost_type_traits_h(self): test_file = self.get_test_loc('ics/stlport-stlport-stl/boost_type_traits.h') expected = [ u'Copyright (c) 2004 Francois Dumont', ] check_detection(expected, test_file) def test_ics_stlport_stlport_stl_concept_checks_h(self): test_file = self.get_test_loc('ics/stlport-stlport-stl/concept_checks.h') expected = [ u'Copyright (c) 1999 Silicon Graphics Computer Systems, Inc.', ] check_detection(expected, test_file) def test_ics_stlport_stlport_stl_msl_string_h_trail_inc(self): test_file = self.get_test_loc('ics/stlport-stlport-stl/msl_string.h') expected = [ u'Copyright (c) 1998 Mark of the Unicorn, Inc.', ] check_detection(expected, test_file) def test_ics_stlport_stlport_stl_type_manips_h(self): test_file = self.get_test_loc('ics/stlport-stlport-stl/type_manips.h') expected = [ u'Copyright (c) 2003 Francois Dumont', ] check_detection(expected, test_file) def test_ics_stlport_stlport_stl_type_traits_h(self): test_file = self.get_test_loc('ics/stlport-stlport-stl/type_traits.h') expected = [ u'Copyright (c) 1996,1997 Silicon Graphics Computer Systems, Inc.', u'Copyright (c) 1997 Moscow Center for SPARC Technology', u'Copyright (c) 1999 Boris Fomitchev', u'Copyright 2000 Adobe Systems Incorporated', ] check_detection(expected, test_file) def test_ics_stlport_stlport_stl_config_native_headers_h(self): test_file = self.get_test_loc('ics/stlport-stlport-stl-config/_native_headers.h') expected = [ u'Copyright (c) 2006 Francois Dumont', ] check_detection(expected, test_file) def test_ics_stlport_test_eh_main_cpp_trail_inc(self): test_file = self.get_test_loc('ics/stlport-test-eh/main.cpp') expected = [ u'Copyright (c) 1997 Mark of the Unicorn, Inc.', u'Copyright (c) 1997 Moscow Center for SPARC Technology', ] check_detection(expected, test_file) def test_ics_stlport_test_eh_mwerks_console_os_x_c(self): test_file = self.get_test_loc('ics/stlport-test-eh/mwerks_console_OS_X.c') expected = [ u'Copyright (c) 1995-2002 Metrowerks Corporation.', ] check_detection(expected, test_file) def test_ics_stlport_test_eh_random_number_h_trail_inc(self): test_file = self.get_test_loc('ics/stlport-test-eh/random_number.h') expected = [ u'Copyright (c) 1997-1998 Mark of the Unicorn, Inc.', ] check_detection(expected, test_file) def test_ics_stlport_test_eh_test_insert_h_trail_inc(self): test_file = self.get_test_loc('ics/stlport-test-eh/test_insert.h') expected = [ u'Copyright (c) 1997 Mark of the Unicorn, Inc.', ] check_detection(expected, test_file) def test_ics_stlport_test_unit_limits_test_cpp(self): test_file = self.get_test_loc('ics/stlport-test-unit/limits_test.cpp') expected = [ u'Copyright Jens Maurer 2000', ] check_detection(expected, test_file) def test_ics_stlport_test_unit_cppunit_cppunit_mini_h(self): test_file = self.get_test_loc('ics/stlport-test-unit-cppunit/cppunit_mini.h') expected = [ u'Copyright (c) 2003, 2004 Zdenek Nemec', ] check_detection(expected, test_file) def test_ics_strace_aclocal_m4(self): test_file = self.get_test_loc('ics/strace/aclocal.m4') expected = [ u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1998, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1997, 1999, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2003, 2004, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2002, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 1996, 1997, 2000, 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2001, 2003, 2005 Free Software Foundation, Inc.', u'Copyright (c) 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_strace_bjm_c(self): test_file = self.get_test_loc('ics/strace/bjm.c') expected = [ u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>', u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>', u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>', ] check_detection(expected, test_file) def test_ics_strace_config_log(self): test_file = self.get_test_loc('ics/strace/config.log') expected = [ u'Copyright (c) 2006 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_strace_copyright(self): test_file = self.get_test_loc('ics/strace/COPYRIGHT') expected = [ u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>', u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>', u'Copyright (c) 1993 Ulrich Pegelow <pegelow@moorea.uni-muenster.de>', u'Copyright (c) 1995, 1996 Michael Elizabeth Chastain <mec@duracef.shout.net>', u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', u'Copyright (c) 1998-2001 Wichert Akkerman <wakkerma@deephackmode.org>', # this is redundant and rare junk u'COPYRIGHT,v 1.3 2002/03/31 18:43:00 wichert', ] check_detection(expected, test_file) def test_ics_strace_defs_h(self): test_file = self.get_test_loc('ics/strace/defs.h') expected = [ u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>', u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>', u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', ] check_detection(expected, test_file) def test_ics_strace_depcomp(self): test_file = self.get_test_loc('ics/strace/depcomp') expected = [ u'Copyright (c) 1999, 2000, 2003 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_strace_errnoent_sh(self): test_file = self.get_test_loc('ics/strace/errnoent.sh') expected = [ u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>', ] check_detection(expected, test_file) def test_ics_strace_ioctl_c(self): test_file = self.get_test_loc('ics/strace/ioctl.c') expected = [ u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>', u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>', u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', u'Copyright (c) 1996-2001 Wichert Akkerman <wichert@cistron.nl>', ] check_detection(expected, test_file) def test_ics_strace_ioctlsort_c(self): test_file = self.get_test_loc('ics/strace/ioctlsort.c') expected = [ u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>', u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>', ] check_detection(expected, test_file) def test_ics_strace_ipc_c(self): test_file = self.get_test_loc('ics/strace/ipc.c') expected = [ u'Copyright (c) 1993 Ulrich Pegelow <pegelow@moorea.uni-muenster.de>', u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>', u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>', ] check_detection(expected, test_file) def test_ics_strace_makefile_in(self): test_file = self.get_test_loc('ics/strace/Makefile.in') expected = [ u'Copyright (c) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_strace_mem_c(self): test_file = self.get_test_loc('ics/strace/mem.c') expected = [ u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>', u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>', u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>', u'Copyright (c) 2000 PocketPenguins Inc.', ] check_detection(expected, test_file) def test_ics_strace_net_c(self): test_file = self.get_test_loc('ics/strace/net.c') expected = [ u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>', u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>', u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', u'Copyright (c) 1996-2000 Wichert Akkerman <wichert@cistron.nl>', ] check_detection(expected, test_file) def test_ics_strace_proc_c(self): test_file = self.get_test_loc('ics/strace/proc.c') expected = [ u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>', ] check_detection(expected, test_file) def test_ics_strace_process_c(self): test_file = self.get_test_loc('ics/strace/process.c') expected = [ u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>', u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>', u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>', u'Copyright (c) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation', u'Copyright (c) 2000 PocketPenguins Inc.', ] check_detection(expected, test_file) def test_ics_strace_signal_c(self): test_file = self.get_test_loc('ics/strace/signal.c') expected = [ u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>', u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>', u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>', u'Copyright (c) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation', ] check_detection(expected, test_file) def test_ics_strace_signalent_sh(self): test_file = self.get_test_loc('ics/strace/signalent.sh') expected = [ u'Copyright (c) 1996 Rick Sladkey <jrs@world.std.com>', ] check_detection(expected, test_file) def test_ics_strace_sock_c(self): test_file = self.get_test_loc('ics/strace/sock.c') expected = [ u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', ] check_detection(expected, test_file) def test_ics_strace_strace_1(self): test_file = self.get_test_loc('ics/strace/strace.1') expected = [ u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>', u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>', u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', ] check_detection(expected, test_file) def test_ics_strace_strace_graph(self): test_file = self.get_test_loc('ics/strace/strace-graph') expected = [ u'Copyright (c) 1998 by Richard Braakman <dark@xs4all.nl>.', ] check_detection(expected, test_file) def test_ics_strace_stream_c(self): test_file = self.get_test_loc('ics/strace/stream.c') expected = [ u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', u'Copyright (c) 1996-1999 Wichert Akkerman <wichert@cistron.nl>', ] check_detection(expected, test_file) def test_ics_strace_syscallent_sh(self): test_file = self.get_test_loc('ics/strace/syscallent.sh') expected = [ u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', ] check_detection(expected, test_file) def test_ics_strace_linux_dummy_h(self): test_file = self.get_test_loc('ics/strace-linux/dummy.h') expected = [ u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>', u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>', ] check_detection(expected, test_file) def test_ics_strace_linux_ioctlent_sh(self): test_file = self.get_test_loc('ics/strace-linux/ioctlent.sh') expected = [ u'Copyright (c) 2001 Wichert Akkerman <wichert@cistron.nl>', ] check_detection(expected, test_file) def test_ics_strace_strace_linux_hppa_syscallent_h(self): test_file = self.get_test_loc('ics/strace-strace-linux-hppa/syscallent.h') expected = [ u'Copyright (c) 2001 Hewlett-Packard, Matthew Wilcox', ] check_detection(expected, test_file) def test_ics_strace_strace_linux_ia64_syscallent_h(self): test_file = self.get_test_loc('ics/strace-strace-linux-ia64/syscallent.h') expected = [ u'Copyright (c) 1999, 2001 Hewlett-Packard Co David Mosberger-Tang <davidm@hpl.hp.com>', ] check_detection(expected, test_file) def test_ics_strace_strace_linux_mips_ioctlent_sh(self): test_file = self.get_test_loc('ics/strace-strace-linux-mips/ioctlent.sh') expected = [ u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>', u'Copyright (c) 1995, 1996 Michael Elizabeth Chastain <mec@duracef.shout.net>', ] check_detection(expected, test_file) def test_ics_strace_strace_linux_s390_syscallent_h(self): test_file = self.get_test_loc('ics/strace-strace-linux-s390/syscallent.h') expected = [ u'Copyright (c) 2000 IBM Deutschland Entwicklung GmbH, IBM Coporation Authors', ] check_detection(expected, test_file) @expectedFailure def test_ics_strace_strace_linux_s390_syscallent_h_extra_author(self): test_file = self.get_test_loc('ics/strace-strace-linux-s390/syscallent.h') expected = [ u'Copyright (c) 2000 IBM Deutschland Entwicklung GmbH, IBM Coporation', ] check_detection(expected, test_file) def test_ics_strace_strace_linux_sh_syscallent_h(self): test_file = self.get_test_loc('ics/strace-strace-linux-sh/syscallent.h') expected = [ u'Copyright (c) 1993 Branko Lankester <branko@hacktic.nl>', u'Copyright (c) 1993, 1994, 1995 Rick Sladkey <jrs@world.std.com>', u'Copyright (c) 2000 PocketPenguins Inc.', ] check_detection(expected, test_file) def test_ics_strace_strace_linux_sparc_syscall_h(self): test_file = self.get_test_loc('ics/strace-strace-linux-sparc/syscall.h') expected = [ u'Copyright (c) 1991, 1992 Paul Kranenburg <pk@cs.few.eur.nl>', u'Copyright (c) 1993, 1994, 1995, 1996 Rick Sladkey <jrs@world.std.com>', ] check_detection(expected, test_file) def test_ics_svox_pico_androidmanifest_xml(self): test_file = self.get_test_loc('ics/svox-pico/AndroidManifest.xml') expected = [ u'Copyright 2009, The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_svox_pico_resources_tools_lingwarebuilding_readme_txt(self): test_file = self.get_test_loc('ics/svox-pico_resources-tools-LingwareBuilding/Readme.txt') expected = [ u'Copyright (c) 2008-2009 SVOX AG', ] check_detection(expected, test_file) def test_ics_svox_pico_resources_tools_lingwarebuilding_picolingware_source_files_textana_en_gb_en_gb_lexpos_utf(self): test_file = self.get_test_loc('ics/svox-pico_resources-tools-LingwareBuilding-PicoLingware_source_files-textana-en-GB/en-GB_lexpos.utf') expected = [ u'Copyright (c) 2008-2009 SVOX AG', ] check_detection(expected, test_file) def test_ics_svox_pico_resources_tools_lingwarebuilding_picolingware_tools_windows_tools_buildbin_sh(self): test_file = self.get_test_loc('ics/svox-pico_resources-tools-LingwareBuilding-PicoLingware_tools_windows-tools/buildbin.sh') expected = [ u'Copyright (c) 2009 SVOX AG.', ] check_detection(expected, test_file) def test_ics_svox_pico_compat_jni_com_android_tts_compat_synthproxy_cpp(self): test_file = self.get_test_loc('ics/svox-pico-compat-jni/com_android_tts_compat_SynthProxy.cpp') expected = [ u'Copyright (c) 2009-2010 Google Inc.', ] check_detection(expected, test_file) def test_ics_svox_pico_lib_notice(self): test_file = self.get_test_loc('ics/svox-pico-lib/NOTICE') expected = [ u'Copyright (c) 2008-2009 SVOX AG', ] check_detection(expected, test_file) def test_ics_svox_pico_lib_picoacph_c(self): test_file = self.get_test_loc('ics/svox-pico-lib/picoacph.c') expected = [ u'Copyright (c) 2008-2009 SVOX AG', u'Copyright (c) 2008-2009 SVOX AG', ] check_detection(expected, test_file) def test_ics_svox_pico_lib_picofftsg_c(self): test_file = self.get_test_loc('ics/svox-pico-lib/picofftsg.c') expected = [ u'Copyright (c) 2008-2009 SVOX AG', u'Copyright (c) 2008-2009 SVOX AG', u'(Copyright Takuya OOURA, 1996-2001)', ] check_detection(expected, test_file) def test_ics_svox_pico_lib_picoos_c(self): test_file = self.get_test_loc('ics/svox-pico-lib/picoos.c') expected = [ u'Copyright (c) 2008-2009 SVOX AG', u'Copyright (c) 2008-2009 SVOX AG', u'(c) SVOX AG', ] check_detection(expected, test_file) def test_ics_svox_pico_res_xml_tts_engine_xml(self): test_file = self.get_test_loc('ics/svox-pico-res-xml/tts_engine.xml') expected = [ u'Copyright (c) 2011 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_svox_pico_res_xml_voices_list_xml(self): test_file = self.get_test_loc('ics/svox-pico-res-xml/voices_list.xml') expected = [ u'Copyright (c) 2009 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_svox_pico_tts_com_svox_picottsengine_cpp(self): test_file = self.get_test_loc('ics/svox-pico-tts/com_svox_picottsengine.cpp') expected = [ u'Copyright (c) 2008-2009 SVOX AG', ] check_detection(expected, test_file) def test_ics_tagsoup_src_org_ccil_cowan_tagsoup_autodetector_java(self): test_file = self.get_test_loc('ics/tagsoup-src-org-ccil-cowan-tagsoup/AutoDetector.java') expected = [ u'Copyright 2002-2008 by John Cowan.', ] check_detection(expected, test_file) def test_ics_tcpdump_aclocal_m4_trail_name_m4_dnl_comment(self): test_file = self.get_test_loc('ics/tcpdump/aclocal.m4') expected = [ u'Copyright (c) 1995, 1996, 1997, 1998 The Regents of the University of California.', u'Copyright (c) 1999 WIDE Project.', ] check_detection(expected, test_file) def test_ics_tcpdump_addrtoname_c(self): test_file = self.get_test_loc('ics/tcpdump/addrtoname.c') expected = [ u'Copyright (c) 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_addrtoname_h(self): test_file = self.get_test_loc('ics/tcpdump/addrtoname.h') expected = [ u'Copyright (c) 1990, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_af_c(self): test_file = self.get_test_loc('ics/tcpdump/af.c') expected = [ u'Copyright (c) 1998-2006 The TCPDUMP project', ] check_detection(expected, test_file) def test_ics_tcpdump_aodv_h(self): test_file = self.get_test_loc('ics/tcpdump/aodv.h') expected = [ u'Copyright (c) 2003 Bruce M. Simpson <bms@spc.org>', ] check_detection(expected, test_file) def test_ics_tcpdump_appletalk_h(self): test_file = self.get_test_loc('ics/tcpdump/appletalk.h') expected = [ u'Copyright (c) 1988, 1989, 1990, 1993, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_atm_h(self): test_file = self.get_test_loc('ics/tcpdump/atm.h') expected = [ u'Copyright (c) 2002 Guy Harris.', ] check_detection(expected, test_file) def test_ics_tcpdump_bootp_h(self): test_file = self.get_test_loc('ics/tcpdump/bootp.h') expected = [ u'Copyright 1988 by Carnegie Mellon.', ] check_detection(expected, test_file) def test_ics_tcpdump_chdlc_h(self): test_file = self.get_test_loc('ics/tcpdump/chdlc.h') expected = [ u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_cpack_c(self): test_file = self.get_test_loc('ics/tcpdump/cpack.c') expected = [ u'Copyright (c) 2003, 2004 David Young.', ] check_detection(expected, test_file) def test_ics_tcpdump_dccp_h(self): test_file = self.get_test_loc('ics/tcpdump/dccp.h') expected = [ u'Copyright (c) Arnaldo', u'Copyright (c) Ian McDonald 2005 <iam4@cs.waikato.ac.nz>', u'Copyright (c) Yoshifumi Nishida 2005', ] check_detection(expected, test_file) @expectedFailure def test_ics_tcpdump_dccp_h_trail_name(self): test_file = self.get_test_loc('ics/tcpdump/dccp.h') expected = [ u'Copyright (c) Arnaldo Carvalho de Melo 2004', u'Copyright (c) Ian McDonald 2005 <iam4@cs.waikato.ac.nz>', u'Copyright (c) Yoshifumi Nishida 2005', ] check_detection(expected, test_file) def test_ics_tcpdump_decnet_h(self): test_file = self.get_test_loc('ics/tcpdump/decnet.h') expected = [ u'Copyright (c) 1992, 1994, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_decode_prefix_h(self): test_file = self.get_test_loc('ics/tcpdump/decode_prefix.h') expected = [ u'Copyright (c) 1999 WIDE Project.', ] check_detection(expected, test_file) def test_ics_tcpdump_enc_h(self): test_file = self.get_test_loc('ics/tcpdump/enc.h') expected = [ u'Copyright (c) 1995, 1996, 1997, 1998 by John Ioannidis, Angelos D. Keromytis and Niels Provos.', u'Copyright (c) 2001, Angelos D. Keromytis.', ] check_detection(expected, test_file) def test_ics_tcpdump_gmt2local_c(self): test_file = self.get_test_loc('ics/tcpdump/gmt2local.c') expected = [ u'Copyright (c) 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_icmp6_h(self): test_file = self.get_test_loc('ics/tcpdump/icmp6.h') expected = [ u'Copyright (c) 1995, 1996, 1997, and 1998 WIDE Project.', u'Copyright (c) 1982, 1986, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_ieee802_11_h(self): test_file = self.get_test_loc('ics/tcpdump/ieee802_11.h') expected = [ u'Copyright (c) 2001 Fortress Technologies Charlie Lenahan', ] check_detection(expected, test_file) @expectedFailure def test_ics_tcpdump_ieee802_11_h_trail_email(self): test_file = self.get_test_loc('ics/tcpdump/ieee802_11.h') expected = [ u'Copyright (c) 2001 Fortress Technologies Charlie Lenahan ( clenahan@fortresstech.com )', ] check_detection(expected, test_file) def test_ics_tcpdump_interface_h(self): test_file = self.get_test_loc('ics/tcpdump/interface.h') expected = [ u'Copyright (c) 1988-2002 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_ipproto_h(self): test_file = self.get_test_loc('ics/tcpdump/ipproto.h') expected = [ u'Copyright (c) 1982, 1986, 1990, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_l2tp_h(self): test_file = self.get_test_loc('ics/tcpdump/l2tp.h') expected = [ u'Copyright (c) 1991, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_machdep_c(self): test_file = self.get_test_loc('ics/tcpdump/machdep.c') expected = [ u'Copyright (c) 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_makefile_in(self): test_file = self.get_test_loc('ics/tcpdump/Makefile.in') expected = [ u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_makemib(self): test_file = self.get_test_loc('ics/tcpdump/makemib') expected = [ u'Copyright (c) 1990, 1996 John Robert LoVerso.', u'copyright (c) 1999 William C. Fenner.', ] check_detection(expected, test_file) def test_ics_tcpdump_mpls_h(self): test_file = self.get_test_loc('ics/tcpdump/mpls.h') expected = [ u'Copyright (c) 2001 WIDE Project.', ] check_detection(expected, test_file) def test_ics_tcpdump_nameser_h(self): test_file = self.get_test_loc('ics/tcpdump/nameser.h') expected = [ u'Copyright (c) 1983, 1989, 1993 The Regents of the University of California.', u'Portions Copyright (c) 1993 by Digital Equipment Corporation.', ] check_detection(expected, test_file) def test_ics_tcpdump_netdissect_h(self): test_file = self.get_test_loc('ics/tcpdump/netdissect.h') expected = [ u'Copyright (c) 1988-1997 The Regents of the University of California.', u'Copyright (c) 1998-2004 Michael Richardson <mcr@tcpdump.org> The TCPDUMP project', ] check_detection(expected, test_file) def test_ics_tcpdump_nfs_h(self): test_file = self.get_test_loc('ics/tcpdump/nfs.h') expected = [ u'Copyright (c) 1989, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_nfsfh_h(self): test_file = self.get_test_loc('ics/tcpdump/nfsfh.h') expected = [ u'Copyright (c) 1993, 1994 Jeffrey', u'Copyright (c) 2001 Compaq Computer Corporation.', ] check_detection(expected, test_file) @expectedFailure def test_ics_tcpdump_nfsfh_h_trail_name(self): test_file = self.get_test_loc('ics/tcpdump/nfsfh.h') expected = [ u'Copyright (c) 1993, 1994 Jeffrey C. Mogul, Digital Equipment Corporation, Western Research Laboratory.', u'Copyright (c) 2001 Compaq Computer Corporation.', ] check_detection(expected, test_file) def test_ics_tcpdump_parsenfsfh_c(self): test_file = self.get_test_loc('ics/tcpdump/parsenfsfh.c') expected = [ u'Copyright (c) 1993, 1994 Jeffrey', u'Copyright (c) 2001 Compaq Computer Corporation.', ] check_detection(expected, test_file) @expectedFailure def test_ics_tcpdump_parsenfsfh_c_trail_name(self): test_file = self.get_test_loc('ics/tcpdump/parsenfsfh.c') expected = [ u'Copyright (c) 1993, 1994 Jeffrey C. Mogul, Digital Equipment Corporation, Western Research Laboratory.', u'Copyright (c) 2001 Compaq Computer Corporation.', ] check_detection(expected, test_file) def test_ics_tcpdump_pmap_prot_h(self): test_file = self.get_test_loc('ics/tcpdump/pmap_prot.h') expected = [ u'Copyright (c) 1984, Sun Microsystems, Inc.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_ah_c(self): test_file = self.get_test_loc('ics/tcpdump/print-ah.c') expected = [ u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_ap1394_c(self): test_file = self.get_test_loc('ics/tcpdump/print-ap1394.c') expected = [ u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 2000 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_ascii_c(self): test_file = self.get_test_loc('ics/tcpdump/print-ascii.c') expected = [ u'Copyright (c) 1997, 1998 The NetBSD Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_atm_c(self): test_file = self.get_test_loc('ics/tcpdump/print-atm.c') expected = [ u'Copyright (c) 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_beep_c(self): test_file = self.get_test_loc('ics/tcpdump/print-beep.c') expected = [ u'Copyright (c) 2000, Richard Sharpe', ] check_detection(expected, test_file) def test_ics_tcpdump_print_bootp_c(self): test_file = self.get_test_loc('ics/tcpdump/print-bootp.c') expected = [ u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_cdp_c(self): test_file = self.get_test_loc('ics/tcpdump/print-cdp.c') expected = [ u'Copyright (c) 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_cnfp_c(self): test_file = self.get_test_loc('ics/tcpdump/print-cnfp.c') expected = [ u'Copyright (c) 1998 Michael Shalayeff', ] check_detection(expected, test_file) def test_ics_tcpdump_print_dccp_c(self): test_file = self.get_test_loc('ics/tcpdump/print-dccp.c') expected = [ u'Copyright (c) Arnaldo', u'Copyright (c) Ian McDonald 2005', u'Copyright (c) Yoshifumi Nishida 2005', ] check_detection(expected, test_file) @expectedFailure def test_ics_tcpdump_print_dccp_c_trail_name(self): test_file = self.get_test_loc('ics/tcpdump/print-dccp.c') expected = [ u'Copyright (c) Arnaldo Carvalho de Melo 2004', u'Copyright (c) Ian McDonald 2005', u'Copyright (c) Yoshifumi Nishida 2005', ] check_detection(expected, test_file) def test_ics_tcpdump_print_dhcp6_c(self): test_file = self.get_test_loc('ics/tcpdump/print-dhcp6.c') expected = [ u'Copyright (c) 1998 and 1999 WIDE Project.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_dvmrp_c(self): test_file = self.get_test_loc('ics/tcpdump/print-dvmrp.c') expected = [ u'Copyright (c) 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_eap_c(self): test_file = self.get_test_loc('ics/tcpdump/print-eap.c') expected = [ u'Copyright (c) 2004 - Michael Richardson <mcr@xelerance.com>', ] check_detection(expected, test_file) def test_ics_tcpdump_print_egp_c(self): test_file = self.get_test_loc('ics/tcpdump/print-egp.c') expected = [ u'Copyright (c) 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_eigrp_c(self): test_file = self.get_test_loc('ics/tcpdump/print-eigrp.c') expected = [ u'Copyright (c) 1998-2004 Hannes Gredler <hannes@tcpdump.org> The TCPDUMP project', ] check_detection(expected, test_file) def test_ics_tcpdump_print_enc_c(self): test_file = self.get_test_loc('ics/tcpdump/print-enc.c') expected = [ u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_fddi_c(self): test_file = self.get_test_loc('ics/tcpdump/print-fddi.c') expected = [ u'Copyright (c) 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_frag6_c(self): test_file = self.get_test_loc('ics/tcpdump/print-frag6.c') expected = [ u'Copyright (c) 1988, 1989, 1990, 1991, 1993, 1994 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_gre_c(self): test_file = self.get_test_loc('ics/tcpdump/print-gre.c') expected = [ u'Copyright (c) 2002 Jason L. Wright (jason@thought.net)', ] check_detection(expected, test_file) def test_ics_tcpdump_print_hsrp_c(self): test_file = self.get_test_loc('ics/tcpdump/print-hsrp.c') expected = [ u'Copyright (c) 2001 Julian Cowley', ] check_detection(expected, test_file) def test_ics_tcpdump_print_ip6opts_c(self): test_file = self.get_test_loc('ics/tcpdump/print-ip6opts.c') expected = [ u'Copyright (c) 1998 WIDE Project.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_krb_c(self): test_file = self.get_test_loc('ics/tcpdump/print-krb.c') expected = [ u'Copyright (c) 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_lwres_c(self): test_file = self.get_test_loc('ics/tcpdump/print-lwres.c') expected = [ u'Copyright (c) 2001 WIDE Project.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_mobile_c(self): test_file = self.get_test_loc('ics/tcpdump/print-mobile.c') expected = [ u'(c) 1998 The NetBSD Foundation, Inc.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_mobility_c(self): test_file = self.get_test_loc('ics/tcpdump/print-mobility.c') expected = [ u'Copyright (c) 2002 WIDE Project.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_msdp_c(self): test_file = self.get_test_loc('ics/tcpdump/print-msdp.c') expected = [ u'Copyright (c) 2001 William C. Fenner.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_olsr_c(self): test_file = self.get_test_loc('ics/tcpdump/print-olsr.c') expected = [ u'Copyright (c) 1998-2007 The TCPDUMP project', ] check_detection(expected, test_file) def test_ics_tcpdump_print_radius_c(self): test_file = self.get_test_loc('ics/tcpdump/print-radius.c') expected = [ u'Copyright (c) 2000 Alfredo Andres Omella.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_rip_c(self): test_file = self.get_test_loc('ics/tcpdump/print-rip.c') expected = [ u'Copyright (c) 1989, 1990, 1991, 1993, 1994, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_ripng_c(self): test_file = self.get_test_loc('ics/tcpdump/print-ripng.c') expected = [ u'Copyright (c) 1989, 1990, 1991, 1993, 1994 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_rx_c(self): test_file = self.get_test_loc('ics/tcpdump/print-rx.c') expected = [ u'Copyright (c) 2000 United States Government', ] check_detection(expected, test_file) @expectedFailure def test_ics_tcpdump_print_rx_c_trail_name(self): test_file = self.get_test_loc('ics/tcpdump/print-rx.c') expected = [ u'Copyright: (c) 2000 United States Government as represented by the Secretary of the Navy.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_sctp_c_trail_name(self): test_file = self.get_test_loc('ics/tcpdump/print-sctp.c') expected = [ u'Copyright (c) 2001 NETLAB, Temple University', u'Copyright (c) 2001 Protocol Engineering Lab, University of Delaware', ] check_detection(expected, test_file) def test_ics_tcpdump_print_sl_c(self): test_file = self.get_test_loc('ics/tcpdump/print-sl.c') expected = [ u'Copyright (c) 1989, 1990, 1991, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_slow_c(self): test_file = self.get_test_loc('ics/tcpdump/print-slow.c') expected = [ u'Copyright (c) 1998-2005 The TCPDUMP project', ] check_detection(expected, test_file) def test_ics_tcpdump_print_smb_c(self): test_file = self.get_test_loc('ics/tcpdump/print-smb.c') expected = [ u'Copyright (c) Andrew Tridgell 1995-1999', ] check_detection(expected, test_file) def test_ics_tcpdump_print_snmp_c(self): test_file = self.get_test_loc('ics/tcpdump/print-snmp.c') expected = [ u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 John Robert LoVerso.', u'J. Schoenwaelder, Copyright (c) 1999.', u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 this software was produced', ] check_detection(expected, test_file) @expectedFailure def test_ics_tcpdump_print_snmp_c_trail_name_lead_name_trail_name_complex(self): test_file = self.get_test_loc('ics/tcpdump/print-snmp.c') expected = [ u'Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997 John Robert LoVerso.', u'J. Schoenwaelder, Copyright (c) 1999.', u'Los Alamos National Laboratory Copyright (c) 1990, 1991, 1993, 1994, 1995, 1996, 1997', ] check_detection(expected, test_file) def test_ics_tcpdump_print_stp_c(self): test_file = self.get_test_loc('ics/tcpdump/print-stp.c') expected = [ u'Copyright (c) 2000 Lennert Buytenhek', ] check_detection(expected, test_file) def test_ics_tcpdump_print_tcp_c(self): test_file = self.get_test_loc('ics/tcpdump/print-tcp.c') expected = [ u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997 The Regents of the University of California.', u'Copyright (c) 1999-2004 The tcpdump.org', ] check_detection(expected, test_file) def test_ics_tcpdump_print_telnet_c(self): test_file = self.get_test_loc('ics/tcpdump/print-telnet.c') expected = [ u'Copyright (c) 1997, 1998 The NetBSD Foundation, Inc.', u'Copyright (c) 1994, Simon J. Gerraty.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_timed_c(self): test_file = self.get_test_loc('ics/tcpdump/print-timed.c') expected = [ u'Copyright (c) 2000 Ben Smithurst <ben@scientia.demon.co.uk>', ] check_detection(expected, test_file) def test_ics_tcpdump_print_token_c(self): test_file = self.get_test_loc('ics/tcpdump/print-token.c') expected = [ u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_vrrp_c(self): test_file = self.get_test_loc('ics/tcpdump/print-vrrp.c') expected = [ u'Copyright (c) 2000 William C. Fenner.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_wb_c(self): test_file = self.get_test_loc('ics/tcpdump/print-wb.c') expected = [ u'Copyright (c) 1993, 1994, 1995, 1996 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_print_zephyr_c(self): test_file = self.get_test_loc('ics/tcpdump/print-zephyr.c') expected = [ u'Copyright (c) 2001 Nickolai Zeldovich <kolya@MIT.EDU>', ] check_detection(expected, test_file) def test_ics_tcpdump_route6d_h(self): test_file = self.get_test_loc('ics/tcpdump/route6d.h') expected = [ u'Copyright (c) 1995, 1996, 1997 and 1998 WIDE Project.', ] check_detection(expected, test_file) def test_ics_tcpdump_sctpconstants_h_trail_name(self): test_file = self.get_test_loc('ics/tcpdump/sctpConstants.h') expected = [ u'Implementation Copyright (c) 1999 Cisco And Motorola', ] check_detection(expected, test_file) def test_ics_tcpdump_slcompress_h(self): test_file = self.get_test_loc('ics/tcpdump/slcompress.h') expected = [ u'Copyright (c) 1989, 1990, 1992, 1993 Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_slip_h(self): test_file = self.get_test_loc('ics/tcpdump/slip.h') expected = [ u'Copyright (c) 1990 Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_strcasecmp_c(self): test_file = self.get_test_loc('ics/tcpdump/strcasecmp.c') expected = [ u'Copyright (c) 1987 Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_tcpdump_1_trail_name(self): test_file = self.get_test_loc('ics/tcpdump/tcpdump.1') expected = [ u'Copyright (c) 1987, 1988, 1989, 1990, 1991, 1992, 1994, 1995, 1996, 1997 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_tcpdump_c(self): test_file = self.get_test_loc('ics/tcpdump/tcpdump.c') expected = [ u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 2000 The Regents of the University of California.', u'Copyright (c) 2001 Seth Webster <swebster@sst.ll.mit.edu>', u'Copyright (c) 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 2000 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_telnet_h(self): test_file = self.get_test_loc('ics/tcpdump/telnet.h') expected = [ u'Copyright (c) 1983, 1993 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_token_h(self): test_file = self.get_test_loc('ics/tcpdump/token.h') expected = [ u'Copyright (c) 1998, Larry Lile', ] check_detection(expected, test_file) def test_ics_tcpdump_vfprintf_c(self): test_file = self.get_test_loc('ics/tcpdump/vfprintf.c') expected = [ u'Copyright (c) 1995 The Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_tcpdump_missing_inet_aton_c(self): test_file = self.get_test_loc('ics/tcpdump-missing/inet_aton.c') expected = [ u'Copyright (c) 1995, 1996, 1997 Kungliga Tekniska Hogskolan Royal Institute of Technology', ] check_detection(expected, test_file) @expectedFailure def test_ics_tcpdump_missing_inet_aton_c_trail_place(self): test_file = self.get_test_loc('ics/tcpdump-missing/inet_aton.c') expected = [ u'Copyright (c) 1995, 1996, 1997 Kungliga Tekniska Hogskolan (Royal Institute of Technology, Stockholm, Sweden)', ] check_detection(expected, test_file) def test_ics_tcpdump_missing_inet_ntop_c(self): test_file = self.get_test_loc('ics/tcpdump-missing/inet_ntop.c') expected = [ u'Copyright (c) 1999 Kungliga Tekniska Hogskolan Royal Institute of Technology', ] check_detection(expected, test_file) @expectedFailure def test_ics_tcpdump_missing_inet_ntop_c_trail_place(self): test_file = self.get_test_loc('ics/tcpdump-missing/inet_ntop.c') expected = [ u'Copyright (c) 1999 Kungliga Tekniska Hogskolan (Royal Institute of Technology, Stockholm, Sweden)', ] check_detection(expected, test_file) def test_ics_tinyxml_android_mk(self): test_file = self.get_test_loc('ics/tinyxml/Android.mk') expected = [ u'Copyright 2005 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_tinyxml_tinyxml_cpp(self): test_file = self.get_test_loc('ics/tinyxml/tinyxml.cpp') expected = [ u'copyright (c) 2000-2002 Lee Thomason', ] check_detection(expected, test_file) def test_ics_tinyxml_tinyxml_h(self): test_file = self.get_test_loc('ics/tinyxml/tinyxml.h') expected = [ u'copyright (c) 2000-2002 Lee Thomason', ] check_detection(expected, test_file) def test_ics_tremolo_notice(self): test_file = self.get_test_loc('ics/tremolo/NOTICE') expected = [ u'Copyright (c) 2002-2009, Xiph.org', u'Copyright (c) 2010, Robin Watts for Pinknoise Productions Ltd', ] check_detection(expected, test_file) def test_ics_tremolo_tremolo_asm_arm_h(self): test_file = self.get_test_loc('ics/tremolo-Tremolo/asm_arm.h') expected = [ u'Copyright (c) 2002-2009, Xiph.org', u'Copyright (c) 2010, Robin Watts for Pinknoise Productions Ltd', ] check_detection(expected, test_file) def test_ics_webp_examples_dwebp_c(self): test_file = self.get_test_loc('ics/webp-examples/dwebp.c') expected = [ u'Copyright 2010 Google Inc.', ] check_detection(expected, test_file) def test_ics_webp_include_webp_encode_h(self): test_file = self.get_test_loc('ics/webp-include-webp/encode.h') expected = [ u'Copyright 2011 Google Inc.', ] check_detection(expected, test_file) def test_ics_webp_src_dec_android_mk(self): test_file = self.get_test_loc('ics/webp-src-dec/Android.mk') expected = [ u'Copyright 2010 The Android Open Source Project', ] check_detection(expected, test_file) def test_ics_webp_src_enc_dsp_c(self): test_file = self.get_test_loc('ics/webp-src-enc/dsp.c') expected = [ u'Copyright 2011 Google Inc.', ] check_detection(expected, test_file) def test_ics_webrtc_android_webrtc_mk(self): test_file = self.get_test_loc('ics/webrtc/android-webrtc.mk') expected = [ u'Copyright (c) 2011 The WebRTC project', ] check_detection(expected, test_file) def test_ics_webrtc_notice(self): test_file = self.get_test_loc('ics/webrtc/NOTICE') expected = [ u'Copyright (c) 2011 The WebRTC project', u'Copyright (c) 2010 The Android Open Source Project', u'Copyright Takuya OOURA, 1996-2001', u'Copyright Takuya OOURA, 1996-2001', u'Copyright Steven J. Ross 2001 - 2009.', ] check_detection(expected, test_file) def test_ics_webrtc_src_common_types_h(self): test_file = self.get_test_loc('ics/webrtc-src/common_types.h') expected = [ u'Copyright (c) 2011 The WebRTC project', ] check_detection(expected, test_file) def test_ics_webrtc_src_modules_audio_processing_aec_main_source_aec_rdft_c(self): test_file = self.get_test_loc('ics/webrtc-src-modules-audio_processing-aec-main-source/aec_rdft.c') expected = [ u'Copyright Takuya OOURA, 1996-2001', ] check_detection(expected, test_file) def test_ics_webrtc_src_system_wrappers_source_spreadsortlib_spreadsort_hpp(self): test_file = self.get_test_loc('ics/webrtc-src-system_wrappers-source-spreadsortlib/spreadsort.hpp') expected = [ u'Copyright Steven J. Ross 2001 - 2009.', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_aes_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/aes.c') expected = [ u'Copyright (c) 2003-2005, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_aes_h(self): test_file = self.get_test_loc('ics/wpa_supplicant/aes.h') expected = [ u'Copyright (c) 2003-2006, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_aes_wrap_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/aes_wrap.c') expected = [ u'Copyright (c) 2003-2007, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_asn1_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/asn1.c') expected = [ u'Copyright (c) 2006, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_base64_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/base64.c') expected = [ u'Copyright (c) 2005, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_build_config_h(self): test_file = self.get_test_loc('ics/wpa_supplicant/build_config.h') expected = [ u'Copyright (c) 2005-2006, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_common_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/common.c') expected = [ u'Copyright (c) 2002-2006, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_config_h(self): test_file = self.get_test_loc('ics/wpa_supplicant/config.h') expected = [ u'Copyright (c) 2003-2005, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_crypto_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/crypto.c') expected = [ u'Copyright (c) 2004-2005, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_ctrl_iface_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/ctrl_iface.c') expected = [ u'Copyright (c) 2004-2006, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_ctrl_iface_dbus_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/ctrl_iface_dbus.c') expected = [ u'Copyright (c) 2006, Dan Williams <dcbw@redhat.com> and Red Hat, Inc.', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_driver_atmel_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/driver_atmel.c') expected = [ u'Copyright (c) 2000-2005, ATMEL Corporation', u'Copyright (c) 2004-2007, Jouni Malinen <j@w1.fi>', u'Copyright 2000-2001 ATMEL Corporation.', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_driver_broadcom_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/driver_broadcom.c') expected = [ u'Copyright (c) 2004, Nikki Chumkov <nikki@gattaca.ru>', u'Copyright (c) 2004, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_driver_bsd_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/driver_bsd.c') expected = [ u'Copyright (c) 2004, Sam Leffler <sam@errno.com>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_driver_ipw_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/driver_ipw.c') expected = [ u'Copyright (c) 2005 Zhu Yi <yi.zhu@intel.com>', u'Copyright (c) 2004 Lubomir Gelo <lgelo@cnc.sk>', u'Copyright (c) 2003-2004, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_driver_madwifi_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/driver_madwifi.c') expected = [ u'Copyright (c) 2004, Sam Leffler <sam@errno.com>', u'Copyright (c) 2004-2005, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_driver_ndiswrapper_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/driver_ndiswrapper.c') expected = [ u'Copyright (c) 2004-2006, Giridhar Pemmasani <giri@lmc.cs.sunysb.edu>', u'Copyright (c) 2004-2006, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_driver_prism54_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/driver_prism54.c') expected = [ u'Copyright (c) 2003-2005, Jouni Malinen <j@w1.fi>', u'Copyright (c) 2004, Luis R. Rodriguez <mcgrof@ruslug.rutgers.edu>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_driver_wired_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/driver_wired.c') expected = [ u'Copyright (c) 2005-2007, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_eap_gpsk_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/eap_gpsk.c') expected = [ u'Copyright (c) 2006-2007, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_eap_psk_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/eap_psk.c') expected = [ u'Copyright (c) 2004-2007, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_eap_sim_common_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/eap_sim_common.c') expected = [ u'Copyright (c) 2004-2008, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_eapol_test_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/eapol_test.c') expected = [ u'Copyright (c) 2003-2006, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_eloop_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/eloop.c') expected = [ u'Copyright (c) 2002-2005, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_l2_packet_freebsd_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/l2_packet_freebsd.c') expected = [ u'Copyright (c) 2003-2005, Jouni Malinen <j@w1.fi>', u'Copyright (c) 2005, Sam Leffler <sam@errno.com>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_mlme_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/mlme.c') expected = [ u'Copyright (c) 2003-2006, Jouni Malinen <j@w1.fi>', u'Copyright (c) 2004, Instant802 Networks, Inc.', u'Copyright (c) 2005-2006, Devicescape Software, Inc.', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_notice(self): test_file = self.get_test_loc('ics/wpa_supplicant/NOTICE') expected = [ u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_radius_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/radius.c') expected = [ u'Copyright (c) 2002-2008, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_tls_none_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/tls_none.c') expected = [ u'Copyright (c) 2004, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_wireless_copy_h(self): test_file = self.get_test_loc('ics/wpa_supplicant/wireless_copy.h') expected = [ u'Copyright (c) 1997-2007 Jean Tourrilhes', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_wpa_cli_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/wpa_cli.c') expected = [ u'Copyright (c) 2004-2008, Jouni Malinen <j@w1.fi>', u'Copyright (c) 2004-2008, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_wpa_supplicant_c(self): test_file = self.get_test_loc('ics/wpa_supplicant/wpa_supplicant.c') expected = [ u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>', u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_wpa_gui_wpagui_ui_h(self): test_file = self.get_test_loc('ics/wpa_supplicant-wpa_gui/wpagui.ui.h') expected = [ u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_wpa_supplicant_wpa_gui_qt4_wpagui_cpp(self): test_file = self.get_test_loc('ics/wpa_supplicant-wpa_gui-qt4/wpagui.cpp') expected = [ u'Copyright (c) 2005-2008, Jouni Malinen <j@w1.fi>', u'Copyright (c) 2003-2008, Jouni Malinen <j@w1.fi>', ] check_detection(expected, test_file) def test_ics_xmlwriter_src_org_jheer_xmlwriter_java(self): test_file = self.get_test_loc('ics/xmlwriter-src-org-jheer/XMLWriter.java') expected = [ u'Copyright (c) 2004-2006 Regents of the University of California.', ] check_detection(expected, test_file) def test_ics_yaffs2_yaffs2_devextras_h(self): test_file = self.get_test_loc('ics/yaffs2-yaffs2/devextras.h') expected = [ u'Copyright (c) 2002 Aleph One Ltd. for Toby Churchill Ltd and Brightstar Engineering', ] check_detection(expected, test_file) def test_ics_yaffs2_yaffs2_patch_ker_sh(self): test_file = self.get_test_loc('ics/yaffs2-yaffs2/patch-ker.sh') expected = [ u'Copyright (c) 2002 Aleph One Ltd.', ] check_detection(expected, test_file) def test_ics_yaffs2_yaffs2_yaffs_qsort_h(self): test_file = self.get_test_loc('ics/yaffs2-yaffs2/yaffs_qsort.h') expected = [ u'Copyright (c) 2000-2002 Silicon Graphics, Inc.', ] check_detection(expected, test_file) def test_ics_yaffs2_yaffs2_direct_makefile(self): test_file = self.get_test_loc('ics/yaffs2-yaffs2-direct/Makefile') expected = [ u'Copyright (c) 2003 Aleph One Ltd.', ] check_detection(expected, test_file) def test_ics_zlib_adler32_c(self): test_file = self.get_test_loc('ics/zlib/adler32.c') expected = [ u'Copyright (c) 1995-2007 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_crc32_c(self): test_file = self.get_test_loc('ics/zlib/crc32.c') expected = [ u'Copyright (c) 1995-2006, 2010 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_deflate_c(self): test_file = self.get_test_loc('ics/zlib/deflate.c') expected = [ u'Copyright (c) 1995-2010 Jean-loup Gailly and Mark Adler', u'Copyright 1995-2010 Jean-loup Gailly and Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_deflate_h(self): test_file = self.get_test_loc('ics/zlib/deflate.h') expected = [ u'Copyright (c) 1995-2010 Jean-loup Gailly', ] check_detection(expected, test_file) def test_ics_zlib_example_c(self): test_file = self.get_test_loc('ics/zlib/example.c') expected = [ u'Copyright (c) 1995-2006 Jean-loup Gailly.', ] check_detection(expected, test_file) def test_ics_zlib_gzclose_c(self): test_file = self.get_test_loc('ics/zlib/gzclose.c') expected = [ u'Copyright (c) 2004, 2010 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_gzguts_h(self): test_file = self.get_test_loc('ics/zlib/gzguts.h') expected = [ u'Copyright (c) 2004, 2005, 2010 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_infback_c(self): test_file = self.get_test_loc('ics/zlib/infback.c') expected = [ u'Copyright (c) 1995-2009 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_inffast_c(self): test_file = self.get_test_loc('ics/zlib/inffast.c') expected = [ u'Copyright (c) 1995-2008, 2010 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_inffast_h(self): test_file = self.get_test_loc('ics/zlib/inffast.h') expected = [ u'Copyright (c) 1995-2003, 2010 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_inflate_c(self): test_file = self.get_test_loc('ics/zlib/inflate.c') expected = [ u'Copyright (c) 1995-2010 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_inftrees_c(self): test_file = self.get_test_loc('ics/zlib/inftrees.c') expected = [ u'Copyright (c) 1995-2010 Mark Adler', u'Copyright 1995-2010 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_inftrees_h(self): test_file = self.get_test_loc('ics/zlib/inftrees.h') expected = [ u'Copyright (c) 1995-2005, 2010 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_makefile_in(self): test_file = self.get_test_loc('ics/zlib/Makefile.in') expected = [ u'Copyright (c) 1995-2010 Jean-loup Gailly.', ] check_detection(expected, test_file) def test_ics_zlib_minigzip_c(self): test_file = self.get_test_loc('ics/zlib/minigzip.c') expected = [ u'Copyright (c) 1995-2006, 2010 Jean-loup Gailly.', ] check_detection(expected, test_file) def test_ics_zlib_notice(self): test_file = self.get_test_loc('ics/zlib/NOTICE') expected = [ u'(c) 1995-2004 Jean-loup Gailly and Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_readme(self): test_file = self.get_test_loc('ics/zlib/README') expected = [ u'(c) 1995-2010 Jean-loup Gailly and Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_trees_c(self): test_file = self.get_test_loc('ics/zlib/trees.c') expected = [ u'Copyright (c) 1995-2010 Jean-loup Gailly', ] check_detection(expected, test_file) def test_ics_zlib_uncompr_c(self): test_file = self.get_test_loc('ics/zlib/uncompr.c') expected = [ u'Copyright (c) 1995-2003, 2010 Jean-loup Gailly.', ] check_detection(expected, test_file) def test_ics_zlib_zconf_h(self): test_file = self.get_test_loc('ics/zlib/zconf.h') expected = [ u'Copyright (c) 1995-2010 Jean-loup Gailly.', ] check_detection(expected, test_file) def test_ics_zlib_zlib_h(self): test_file = self.get_test_loc('ics/zlib/zlib.h') expected = [ u'Copyright (c) 1995-2010 Jean-loup Gailly and Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_zutil_c(self): test_file = self.get_test_loc('ics/zlib/zutil.c') expected = [ u'Copyright (c) 1995-2005, 2010 Jean-loup Gailly.', ] check_detection(expected, test_file) def test_ics_zlib_amiga_makefile_pup(self): test_file = self.get_test_loc('ics/zlib-amiga/Makefile.pup') expected = [ u'Copyright (c) 1998 by Andreas R. Kleinert', ] check_detection(expected, test_file) def test_ics_zlib_contrib_ada_buffer_demo_adb(self): test_file = self.get_test_loc('ics/zlib-contrib-ada/buffer_demo.adb') expected = [ u'Copyright (c) 2002-2004 Dmitriy Anisimkov', ] check_detection(expected, test_file) def test_ics_zlib_contrib_ada_mtest_adb(self): test_file = self.get_test_loc('ics/zlib-contrib-ada/mtest.adb') expected = [ u'Copyright (c) 2002-2003 Dmitriy Anisimkov', ] check_detection(expected, test_file) def test_ics_zlib_contrib_ada_zlib_ads(self): test_file = self.get_test_loc('ics/zlib-contrib-ada/zlib.ads') expected = [ u'Copyright (c) 2002-2004 Dmitriy Anisimkov', ] check_detection(expected, test_file) def test_ics_zlib_contrib_blast_blast_c(self): test_file = self.get_test_loc('ics/zlib-contrib-blast/blast.c') expected = [ u'Copyright (c) 2003 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_contrib_blast_blast_h(self): test_file = self.get_test_loc('ics/zlib-contrib-blast/blast.h') expected = [ u'Copyright (c) 2003 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_contrib_delphi_readme_txt(self): test_file = self.get_test_loc('ics/zlib-contrib-delphi/readme.txt') expected = [ u'Copyright (c) 1997,99 Borland Corp.', ] check_detection(expected, test_file) def test_ics_zlib_contrib_dotzlib_readme_txt(self): test_file = self.get_test_loc('ics/zlib-contrib-dotzlib/readme.txt') expected = [ u'Copyright (c) Henrik Ravn 2004', ] check_detection(expected, test_file) def test_ics_zlib_contrib_dotzlib_dotzlib_checksumimpl_cs(self): test_file = self.get_test_loc('ics/zlib-contrib-dotzlib-DotZLib/ChecksumImpl.cs') expected = [ u'(c) Copyright Henrik Ravn 2004', ] check_detection(expected, test_file) def test_ics_zlib_contrib_dotzlib_dotzlib_assemblyinfo_cs(self): test_file = self.get_test_loc('ics/zlib-contrib-dotzlib-DotZLib/AssemblyInfo.cs') expected = [ u'(c) 2004 by Henrik Ravn', ] check_detection(expected, test_file) def test_ics_zlib_contrib_infback9_infback9_c(self): test_file = self.get_test_loc('ics/zlib-contrib-infback9/infback9.c') expected = [ u'Copyright (c) 1995-2008 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_contrib_infback9_infback9_h(self): test_file = self.get_test_loc('ics/zlib-contrib-infback9/infback9.h') expected = [ u'Copyright (c) 2003 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_contrib_inflate86_inffas86_c(self): test_file = self.get_test_loc('ics/zlib-contrib-inflate86/inffas86.c') expected = [ u'Copyright (c) 1995-2003 Mark Adler', u'Copyright (c) 2003 Chris Anderson <christop@charm.net>', ] check_detection(expected, test_file) def test_ics_zlib_contrib_masmx86_gvmat32c_c(self): test_file = self.get_test_loc('ics/zlib-contrib-masmx86/gvmat32c.c') expected = [ u'Copyright (c) 1995-1996 Jean-loup Gailly and Gilles Vollant.', ] check_detection(expected, test_file) def test_ics_zlib_contrib_minizip_crypt_h(self): test_file = self.get_test_loc('ics/zlib-contrib-minizip/crypt.h') expected = [ u'Copyright (c) 1998-2005 Gilles Vollant', ] check_detection(expected, test_file) def test_ics_zlib_contrib_minizip_ioapi_c(self): test_file = self.get_test_loc('ics/zlib-contrib-minizip/ioapi.c') expected = [ u'Copyright (c) 1998-2010 Gilles Vollant', u'Copyright (c) 2009-2010 Mathias Svensson', ] check_detection(expected, test_file) def test_ics_zlib_contrib_minizip_miniunz_c(self): test_file = self.get_test_loc('ics/zlib-contrib-minizip/miniunz.c') expected = [ u'Copyright (c) 1998-2010 Gilles Vollant', u'Copyright (c) 2007-2008 Even Rouault', u'Copyright (c) 2009-2010 Mathias Svensson', ] check_detection(expected, test_file) def test_ics_zlib_contrib_minizip_minizip64_info_txt(self): test_file = self.get_test_loc('ics/zlib-contrib-minizip/MiniZip64_info.txt') expected = [ u'Copyright (c) 1998-2010 - by Gilles Vollant', ] check_detection(expected, test_file) def test_ics_zlib_contrib_minizip_unzip_c(self): test_file = self.get_test_loc('ics/zlib-contrib-minizip/unzip.c') expected = [ u'Copyright (c) 1998-2010 Gilles Vollant', u'Copyright (c) 2007-2008 Even Rouault', u'Copyright (c) 2009-2010 Mathias Svensson', u'Copyright (c) 1990-2000 Info-ZIP.', u'Copyright (c) 2007-2008 Even Rouault', u'Copyright (c) 1998 - 2010 Gilles Vollant, Even Rouault, Mathias Svensson', u'Copyright 1998-2004 Gilles Vollant', ] check_detection(expected, test_file) def test_ics_zlib_contrib_minizip_zip_c(self): test_file = self.get_test_loc('ics/zlib-contrib-minizip/zip.c') expected = [ u'Copyright (c) 1998-2010 Gilles Vollant', u'Copyright (c) 2009-2010 Mathias Svensson', u'Copyright 1998-2004 Gilles Vollant', ] check_detection(expected, test_file) def test_ics_zlib_contrib_pascal_readme_txt(self): test_file = self.get_test_loc('ics/zlib-contrib-pascal/readme.txt') expected = [ u'Copyright (c) 1995-2003 Jean-loup Gailly and Mark Adler.', u'Copyright (c) 1998 by Bob Dellaca.', u'Copyright (c) 2003 by Cosmin Truta.', u'Copyright (c) 1995-2003 by Jean-loup Gailly.', u'Copyright (c) 1998,1999,2000 by Jacques Nomssi Nzali.', u'Copyright (c) 2003 by Cosmin Truta.', ] check_detection(expected, test_file) def test_ics_zlib_contrib_puff_puff_c(self): test_file = self.get_test_loc('ics/zlib-contrib-puff/puff.c') expected = [ u'Copyright (c) 2002-2010 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_contrib_puff_puff_h(self): test_file = self.get_test_loc('ics/zlib-contrib-puff/puff.h') expected = [ u'Copyright (c) 2002-2010 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_contrib_vstudio_vc10_zlib_rc(self): test_file = self.get_test_loc('ics/zlib-contrib-vstudio-vc10/zlib.rc') expected = [ u'(c) 1995-2010 Jean-loup Gailly & Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_contrib_vstudio_vc7_zlib_rc(self): test_file = self.get_test_loc('ics/zlib-contrib-vstudio-vc7/zlib.rc') expected = [ u'(c) 1995-2003 Jean-loup Gailly & Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_doc_rfc1950_txt(self): test_file = self.get_test_loc('ics/zlib-doc/rfc1950.txt') expected = [ u'Copyright (c) 1996 L. Peter Deutsch and Jean-Loup Gailly', ] check_detection(expected, test_file) def test_ics_zlib_doc_rfc1951_txt(self): test_file = self.get_test_loc('ics/zlib-doc/rfc1951.txt') expected = [ u'Copyright (c) 1996 L. Peter Deutsch', ] check_detection(expected, test_file) def test_ics_zlib_examples_enough_c(self): test_file = self.get_test_loc('ics/zlib-examples/enough.c') expected = [ u'Copyright (c) 2007, 2008 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_examples_gun_c(self): test_file = self.get_test_loc('ics/zlib-examples/gun.c') expected = [ u'Copyright (c) 2003, 2005, 2008, 2010 Mark Adler', u'Copyright (c) 2003-2010 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_examples_gzappend_c(self): test_file = self.get_test_loc('ics/zlib-examples/gzappend.c') expected = [ u'Copyright (c) 2003 Mark Adler', u'Copyright (c) 2003 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_examples_gzjoin_c(self): test_file = self.get_test_loc('ics/zlib-examples/gzjoin.c') expected = [ u'Copyright (c) 2004 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_examples_gzlog_c(self): test_file = self.get_test_loc('ics/zlib-examples/gzlog.c') expected = [ u'Copyright (c) 2004, 2008 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_examples_gzlog_h(self): test_file = self.get_test_loc('ics/zlib-examples/gzlog.h') expected = [ u'Copyright (c) 2004, 2008 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_examples_zlib_how_html(self): test_file = self.get_test_loc('ics/zlib-examples/zlib_how.html') expected = [ u'Copyright (c) 2004, 2005 Mark Adler.', u'Copyright (c) 2004, 2005 by Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_examples_zran_c(self): test_file = self.get_test_loc('ics/zlib-examples/zran.c') expected = [ u'Copyright (c) 2005 Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_msdos_makefile_dj2(self): test_file = self.get_test_loc('ics/zlib-msdos/Makefile.dj2') expected = [ u'Copyright (c) 1995-1998 Jean-loup Gailly.', ] check_detection(expected, test_file) def test_ics_zlib_old_zlib_html(self): test_file = self.get_test_loc('ics/zlib-old/zlib.html') expected = [ u'Copyright (c) 1995-2002 Jean-loup Gailly and Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_old_visualc6_readme_txt(self): test_file = self.get_test_loc('ics/zlib-old-visualc6/README.txt') expected = [ u'Copyright (c) 2000-2004 Simon-Pierre Cadieux.', u'Copyright (c) 2004 Cosmin Truta.', ] check_detection(expected, test_file) def test_ics_zlib_win32_makefile_gcc(self): test_file = self.get_test_loc('ics/zlib-win32/Makefile.gcc') expected = [ u'Copyright (c) 1995-2003 Jean-loup Gailly.', ] check_detection(expected, test_file) def test_ics_zlib_win32_makefile_msc(self): test_file = self.get_test_loc('ics/zlib-win32/Makefile.msc') expected = [ u'copyright (c) 1995-2006 Jean-loup Gailly and Mark Adler', ] check_detection(expected, test_file) def test_ics_zlib_win32_zlib1_rc(self): test_file = self.get_test_loc('ics/zlib-win32/zlib1.rc') expected = [ u'(c) 1995-2006 Jean-loup Gailly & Mark Adler', ] check_detection(expected, test_file)
apache-2.0
CS-SI/QGIS
tests/src/python/test_qgsopacitywidget.py
22
1554
# -*- coding: utf-8 -*- """QGIS Unit tests for QgsOpacityWidget .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ __author__ = 'Nyall Dawson' __date__ = '30/05/2017' __copyright__ = 'Copyright 2017, The QGIS Project' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import qgis # NOQA from qgis.gui import QgsOpacityWidget from qgis.PyQt.QtTest import QSignalSpy from qgis.testing import start_app, unittest start_app() class TestQgsOpacityWidget(unittest.TestCase): def testGettersSetters(self): """ test widget getters/setters """ w = qgis.gui.QgsOpacityWidget() w.setOpacity(0.2) self.assertEqual(w.opacity(), 0.2) # bad values w.setOpacity(-0.2) self.assertEqual(w.opacity(), 0.0) w.setOpacity(100) self.assertEqual(w.opacity(), 1.0) def test_ChangedSignals(self): """ test that signals are correctly emitted when setting opacity""" w = qgis.gui.QgsOpacityWidget() spy = QSignalSpy(w.opacityChanged) w.setOpacity(0.2) self.assertEqual(len(spy), 1) self.assertEqual(spy[0][0], 0.2) # bad value w.setOpacity(100) self.assertEqual(len(spy), 2) self.assertEqual(spy[1][0], 1.0) if __name__ == '__main__': unittest.main()
gpl-2.0
JimCircadian/ansible
lib/ansible/modules/windows/win_toast.py
24
3175
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2017, Jon Hawkesworth (@jhawkesworth) <figs@unity.demon.co.uk> # Copyright: (c) 2017, Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # this is a windows documentation stub. actual code lives in the .ps1 # file of the same name ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: win_toast version_added: "2.4" short_description: Sends Toast windows notification to logged in users on Windows 10 or later hosts description: - Sends alerts which appear in the Action Center area of the windows desktop. options: expire: description: - How long in seconds before the notification expires. default: 45 group: description: - Which notification group to add the notification to. default: Powershell msg: description: - The message to appear inside the notification. - May include \n to format the message to appear within the Action Center. default: Hello, World! popup: description: - If C(no), the notification will not pop up and will only appear in the Action Center. type: bool default: yes tag: description: - The tag to add to the notification. default: Ansible title: description: - The notification title, which appears in the pop up.. default: Notification HH:mm author: - Jon Hawkesworth (@jhawkesworth) notes: - This module must run on a windows 10 or Server 2016 host, so ensure your play targets windows hosts, or delegates to a windows host. - The module does not fail if there are no logged in users to notify. - Messages are only sent to the local host where the module is run. - You must run this module with async, otherwise it will hang until the expire period has passed. ''' EXAMPLES = r''' - name: Warn logged in users of impending upgrade (note use of async to stop the module from waiting until notification expires). win_toast: expire: 60 title: System Upgrade Notification msg: Automated upgrade about to start. Please save your work and log off before {{ deployment_start_time }} async: 60 poll: 0 ''' RETURN = r''' expire_at_utc: description: Calculated utc date time when the notification expires. returned: allways type: string sample: 07 July 2017 04:50:54 no_toast_sent_reason: description: Text containing the reason why a notification was not sent. returned: when no logged in users are detected type: string sample: No logged in users to notify sent_localtime: description: local date time when the notification was sent. returned: allways type: string sample: 07 July 2017 05:45:54 time_taken: description: How long the module took to run on the remote windows host in seconds. returned: allways type: float sample: 0.3706631999999997 toast_sent: description: Whether the module was able to send a toast notification or not. returned: allways type: boolean sample: false '''
gpl-3.0
DolphinDream/sverchok
utils/sv_gist_tools.py
2
3737
# ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### import os import json import base64 from time import gmtime, strftime from urllib.request import Request import webbrowser import bpy from sverchok.utils.logging import info, debug, error from sverchok.utils.context_managers import sv_preferences from sverchok.utils.sv_requests import urlopen API_URL = 'https://api.github.com/gists' TOKEN_HELP_URL = "https://github.com/nortikin/sverchok/wiki/Set-up-GitHub-account-for-exporting-node-trees-from-Sverchok" def show_token_help(): webbrowser.open(TOKEN_HELP_URL) def main_upload_function(gist_filename, gist_description, gist_body, show_browser=False): gist_post_data = { 'description': gist_description, 'public': True, 'files': {gist_filename: {'content': gist_body}} } json_post_data = json.dumps(gist_post_data).encode('utf-8') def get_gist_url(found_json): wfile = json.JSONDecoder() wjson = wfile.decode(found_json) gist_url = wjson['html_url'] if show_browser: import webbrowser print(gist_url) webbrowser.open(gist_url) return gist_url def upload_gist(): with sv_preferences() as prefs: token = prefs.github_token if not token: info("GitHub API access token is not specified") show_token_help() return info("Uploading: %s", gist_filename) headers = {"Authorization": "token " + token} req = Request(API_URL, data=json_post_data, headers=headers) json_to_parse = urlopen(req, data=json_post_data) info('Received response from server') found_json = json_to_parse.read().decode() return get_gist_url(found_json) return upload_gist() def write_or_append_datafiles(gist_url, layout_name): """ usage: write_or_append_datafiles("some_long_url", "some_name") the first time this function is called - it will generate a file at YYYY_MM_gist_uploads.csv with column headings: - gist_url, layout_name, time_stamp, sha - then fill out the first line any following time this function is called it will append the next line. if the YYYY_MM changes, you get a new empty file ..and the same thing will happen. """ filename = strftime("%Y_%m", gmtime()) + "_gist_uploads.csv" dirpath = os.path.join(bpy.utils.user_resource('DATAFILES', path='sverchok', create=True)) fullpath = os.path.join(dirpath, filename) # create fullpath if it doesn't exist if not os.path.exists(fullpath): with open(fullpath, 'w') as ofile: ofile.write('gist_url, layout_name, time_stamp, sha\n') with open(fullpath, 'a') as ofile: raw_time_stamp = strftime("%Y_%m_%d_%H_%M", gmtime()) ofile.write(gist_url + ', ' + layout_name + ', ' + raw_time_stamp + ', no_sha\n')
gpl-3.0
havard024/prego
venv/lib/python2.7/site-packages/django/core/servers/fastcgi.py
241
6638
""" FastCGI (or SCGI, or AJP1.3 ...) server that implements the WSGI protocol. Uses the flup python package: http://www.saddi.com/software/flup/ This is a adaptation of the flup package to add FastCGI server support to run Django apps from Web servers that support the FastCGI protocol. This module can be run standalone or from the django-admin / manage.py scripts using the "runfcgi" directive. Run with the extra option "help" for a list of additional options you can pass to this server. """ import os import sys from django.utils import importlib __version__ = "0.1" __all__ = ["runfastcgi"] FASTCGI_OPTIONS = { 'protocol': 'fcgi', 'host': None, 'port': None, 'socket': None, 'method': 'fork', 'daemonize': None, 'workdir': '/', 'pidfile': None, 'maxspare': 5, 'minspare': 2, 'maxchildren': 50, 'maxrequests': 0, 'debug': None, 'outlog': None, 'errlog': None, 'umask': None, } FASTCGI_HELP = r""" Run this project as a fastcgi (or some other protocol supported by flup) application. To do this, the flup package from http://www.saddi.com/software/flup/ is required. runfcgi [options] [fcgi settings] Optional Fcgi settings: (setting=value) protocol=PROTOCOL fcgi, scgi, ajp, ... (default %(protocol)s) host=HOSTNAME hostname to listen on. port=PORTNUM port to listen on. socket=FILE UNIX socket to listen on. method=IMPL prefork or threaded (default %(method)s). maxrequests=NUMBER number of requests a child handles before it is killed and a new child is forked (0 = no limit). maxspare=NUMBER max number of spare processes / threads (default %(maxspare)s). minspare=NUMBER min number of spare processes / threads (default %(minspare)s). maxchildren=NUMBER hard limit number of processes / threads (default %(maxchildren)s). daemonize=BOOL whether to detach from terminal. pidfile=FILE write the spawned process-id to this file. workdir=DIRECTORY change to this directory when daemonizing (default %(workdir)s). debug=BOOL set to true to enable flup tracebacks. outlog=FILE write stdout to this file. errlog=FILE write stderr to this file. umask=UMASK umask to use when daemonizing, in octal notation (default 022). Examples: Run a "standard" fastcgi process on a file-descriptor (for Web servers which spawn your processes for you) $ manage.py runfcgi method=threaded Run a scgi server on a TCP host/port $ manage.py runfcgi protocol=scgi method=prefork host=127.0.0.1 port=8025 Run a fastcgi server on a UNIX domain socket (posix platforms only) $ manage.py runfcgi method=prefork socket=/tmp/fcgi.sock Run a fastCGI as a daemon and write the spawned PID in a file $ manage.py runfcgi socket=/tmp/fcgi.sock method=prefork \ daemonize=true pidfile=/var/run/django-fcgi.pid """ % FASTCGI_OPTIONS def fastcgi_help(message=None): print(FASTCGI_HELP) if message: print(message) return False def runfastcgi(argset=[], **kwargs): options = FASTCGI_OPTIONS.copy() options.update(kwargs) for x in argset: if "=" in x: k, v = x.split('=', 1) else: k, v = x, True options[k.lower()] = v if "help" in options: return fastcgi_help() try: import flup except ImportError as e: sys.stderr.write("ERROR: %s\n" % e) sys.stderr.write(" Unable to load the flup package. In order to run django\n") sys.stderr.write(" as a FastCGI application, you will need to get flup from\n") sys.stderr.write(" http://www.saddi.com/software/flup/ If you've already\n") sys.stderr.write(" installed flup, then make sure you have it in your PYTHONPATH.\n") return False flup_module = 'server.' + options['protocol'] if options['method'] in ('prefork', 'fork'): wsgi_opts = { 'maxSpare': int(options["maxspare"]), 'minSpare': int(options["minspare"]), 'maxChildren': int(options["maxchildren"]), 'maxRequests': int(options["maxrequests"]), } flup_module += '_fork' elif options['method'] in ('thread', 'threaded'): wsgi_opts = { 'maxSpare': int(options["maxspare"]), 'minSpare': int(options["minspare"]), 'maxThreads': int(options["maxchildren"]), } else: return fastcgi_help("ERROR: Implementation must be one of prefork or " "thread.") wsgi_opts['debug'] = options['debug'] is not None try: module = importlib.import_module('.%s' % flup_module, 'flup') WSGIServer = module.WSGIServer except Exception: print("Can't import flup." + flup_module) return False # Prep up and go from django.core.servers.basehttp import get_internal_wsgi_application if options["host"] and options["port"] and not options["socket"]: wsgi_opts['bindAddress'] = (options["host"], int(options["port"])) elif options["socket"] and not options["host"] and not options["port"]: wsgi_opts['bindAddress'] = options["socket"] elif not options["socket"] and not options["host"] and not options["port"]: wsgi_opts['bindAddress'] = None else: return fastcgi_help("Invalid combination of host, port, socket.") if options["daemonize"] is None: # Default to daemonizing if we're running on a socket/named pipe. daemonize = (wsgi_opts['bindAddress'] is not None) else: if options["daemonize"].lower() in ('true', 'yes', 't'): daemonize = True elif options["daemonize"].lower() in ('false', 'no', 'f'): daemonize = False else: return fastcgi_help("ERROR: Invalid option for daemonize " "parameter.") daemon_kwargs = {} if options['outlog']: daemon_kwargs['out_log'] = options['outlog'] if options['errlog']: daemon_kwargs['err_log'] = options['errlog'] if options['umask']: daemon_kwargs['umask'] = int(options['umask'], 8) if daemonize: from django.utils.daemonize import become_daemon become_daemon(our_home_dir=options["workdir"], **daemon_kwargs) if options["pidfile"]: with open(options["pidfile"], "w") as fp: fp.write("%d\n" % os.getpid()) WSGIServer(get_internal_wsgi_application(), **wsgi_opts).run() if __name__ == '__main__': runfastcgi(sys.argv[1:])
mit
havard024/prego
crm/lib/python2.7/site-packages/whoosh/filedb/structfile.py
96
12453
# Copyright 2009 Matt Chaput. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO # EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, # OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, # EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # The views and conclusions contained in the software and documentation are # those of the authors and should not be interpreted as representing official # policies, either expressed or implied, of Matt Chaput. from array import array from copy import copy from struct import calcsize from whoosh.compat import BytesIO, bytes_type from whoosh.compat import dump as dump_pickle from whoosh.compat import load as load_pickle from whoosh.compat import array_frombytes, array_tobytes from whoosh.system import _INT_SIZE, _SHORT_SIZE, _FLOAT_SIZE, _LONG_SIZE from whoosh.system import IS_LITTLE from whoosh.system import pack_byte, unpack_byte, pack_sbyte, unpack_sbyte from whoosh.system import pack_ushort, unpack_ushort from whoosh.system import pack_ushort_le, unpack_ushort_le from whoosh.system import pack_int, unpack_int, pack_uint, unpack_uint from whoosh.system import pack_uint_le, unpack_uint_le from whoosh.system import pack_long, unpack_long, pack_ulong, unpack_ulong from whoosh.system import pack_float, unpack_float from whoosh.util.varints import varint, read_varint from whoosh.util.varints import signed_varint, decode_signed_varint _SIZEMAP = dict((typecode, calcsize(typecode)) for typecode in "bBiIhHqQf") _ORDERMAP = {"little": "<", "big": ">"} _types = (("sbyte", "b"), ("ushort", "H"), ("int", "i"), ("long", "q"), ("float", "f")) # Main function class StructFile(object): """Returns a "structured file" object that wraps the given file object and provides numerous additional methods for writing structured data, such as "write_varint" and "write_long". """ def __init__(self, fileobj, name=None, onclose=None): self.file = fileobj self._name = name self.onclose = onclose self.is_closed = False self.is_real = hasattr(fileobj, "fileno") if self.is_real: self.fileno = fileobj.fileno def __repr__(self): return "%s(%r)" % (self.__class__.__name__, self._name) def __str__(self): return self._name def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() def __iter__(self): return iter(self.file) def raw_file(self): return self.file def read(self, *args, **kwargs): return self.file.read(*args, **kwargs) def readline(self, *args, **kwargs): return self.file.readline(*args, **kwargs) def write(self, *args, **kwargs): return self.file.write(*args, **kwargs) def tell(self, *args, **kwargs): return self.file.tell(*args, **kwargs) def seek(self, *args, **kwargs): return self.file.seek(*args, **kwargs) def truncate(self, *args, **kwargs): return self.file.truncate(*args, **kwargs) def flush(self): """Flushes the buffer of the wrapped file. This is a no-op if the wrapped file does not have a flush method. """ if hasattr(self.file, "flush"): self.file.flush() def close(self): """Closes the wrapped file. """ if self.is_closed: raise Exception("This file is already closed") if self.onclose: self.onclose(self) if hasattr(self.file, "close"): self.file.close() self.is_closed = True def subset(self, offset, length, name=None): from whoosh.filedb.compound import SubFile name = name or self._name return StructFile(SubFile(self.file, offset, length), name=name) def write_string(self, s): """Writes a string to the wrapped file. This method writes the length of the string first, so you can read the string back without having to know how long it was. """ self.write_varint(len(s)) self.write(s) def write_string2(self, s): self.write(pack_ushort(len(s)) + s) def write_string4(self, s): self.write(pack_int(len(s)) + s) def read_string(self): """Reads a string from the wrapped file. """ return self.read(self.read_varint()) def read_string2(self): l = self.read_ushort() return self.read(l) def read_string4(self): l = self.read_int() return self.read(l) def get_string2(self, pos): l = self.get_ushort(pos) base = pos + _SHORT_SIZE return self.get(base, l), base + l def get_string4(self, pos): l = self.get_int(pos) base = pos + _INT_SIZE return self.get(base, l), base + l def skip_string(self): l = self.read_varint() self.seek(l, 1) def write_varint(self, i): """Writes a variable-length unsigned integer to the wrapped file. """ self.write(varint(i)) def write_svarint(self, i): """Writes a variable-length signed integer to the wrapped file. """ self.write(signed_varint(i)) def read_varint(self): """Reads a variable-length encoded unsigned integer from the wrapped file. """ return read_varint(self.read) def read_svarint(self): """Reads a variable-length encoded signed integer from the wrapped file. """ return decode_signed_varint(read_varint(self.read)) def write_tagint(self, i): """Writes a sometimes-compressed unsigned integer to the wrapped file. This is similar to the varint methods but uses a less compressed but faster format. """ # Store numbers 0-253 in one byte. Byte 254 means "an unsigned 16-bit # int follows." Byte 255 means "An unsigned 32-bit int follows." if i <= 253: self.write(chr(i)) elif i <= 65535: self.write("\xFE" + pack_ushort(i)) else: self.write("\xFF" + pack_uint(i)) def read_tagint(self): """Reads a sometimes-compressed unsigned integer from the wrapped file. This is similar to the varint methods but uses a less compressed but faster format. """ tb = ord(self.read(1)) if tb == 254: return self.read_ushort() elif tb == 255: return self.read_uint() else: return tb def write_byte(self, n): """Writes a single byte to the wrapped file, shortcut for ``file.write(chr(n))``. """ self.write(pack_byte(n)) def read_byte(self): return ord(self.read(1)) def write_pickle(self, obj, protocol=-1): """Writes a pickled representation of obj to the wrapped file. """ dump_pickle(obj, self.file, protocol) def read_pickle(self): """Reads a pickled object from the wrapped file. """ return load_pickle(self.file) def write_sbyte(self, n): self.write(pack_sbyte(n)) def write_int(self, n): self.write(pack_int(n)) def write_uint(self, n): self.write(pack_uint(n)) def write_uint_le(self, n): self.write(pack_uint_le(n)) def write_ushort(self, n): self.write(pack_ushort(n)) def write_ushort_le(self, n): self.write(pack_ushort_le(n)) def write_long(self, n): self.write(pack_long(n)) def write_ulong(self, n): self.write(pack_ulong(n)) def write_float(self, n): self.write(pack_float(n)) def write_array(self, arry): if IS_LITTLE: arry = copy(arry) arry.byteswap() if self.is_real: arry.tofile(self.file) else: self.write(array_tobytes(arry)) def read_sbyte(self): return unpack_sbyte(self.read(1))[0] def read_int(self): return unpack_int(self.read(_INT_SIZE))[0] def read_uint(self): return unpack_uint(self.read(_INT_SIZE))[0] def read_uint_le(self): return unpack_uint_le(self.read(_INT_SIZE))[0] def read_ushort(self): return unpack_ushort(self.read(_SHORT_SIZE))[0] def read_ushort_le(self): return unpack_ushort_le(self.read(_SHORT_SIZE))[0] def read_long(self): return unpack_long(self.read(_LONG_SIZE))[0] def read_ulong(self): return unpack_ulong(self.read(_LONG_SIZE))[0] def read_float(self): return unpack_float(self.read(_FLOAT_SIZE))[0] def read_array(self, typecode, length): a = array(typecode) if self.is_real: a.fromfile(self.file, length) else: array_frombytes(a, self.read(length * _SIZEMAP[typecode])) if IS_LITTLE: a.byteswap() return a def get(self, position, length): self.seek(position) return self.read(length) def get_byte(self, position): return unpack_byte(self.get(position, 1))[0] def get_sbyte(self, position): return unpack_sbyte(self.get(position, 1))[0] def get_int(self, position): return unpack_int(self.get(position, _INT_SIZE))[0] def get_uint(self, position): return unpack_uint(self.get(position, _INT_SIZE))[0] def get_ushort(self, position): return unpack_ushort(self.get(position, _SHORT_SIZE))[0] def get_long(self, position): return unpack_long(self.get(position, _LONG_SIZE))[0] def get_ulong(self, position): return unpack_ulong(self.get(position, _LONG_SIZE))[0] def get_float(self, position): return unpack_float(self.get(position, _FLOAT_SIZE))[0] def get_array(self, position, typecode, length): self.seek(position) return self.read_array(typecode, length) class BufferFile(StructFile): def __init__(self, buf, name=None, onclose=None): self._buf = buf self._name = name self.file = BytesIO(buf) self.onclose = onclose self.is_real = False self.is_closed = False def subset(self, position, length, name=None): name = name or self._name return BufferFile(self.get(position, length), name=name) def get(self, position, length): return bytes_type(self._buf[position:position + length]) def get_array(self, position, typecode, length): a = array(typecode) array_frombytes(a, self.get(position, length * _SIZEMAP[typecode])) if IS_LITTLE: a.byteswap() return a class ChecksumFile(StructFile): def __init__(self, *args, **kwargs): StructFile.__init__(self, *args, **kwargs) self._check = 0 self._crc32 = __import__("zlib").crc32 def __iter__(self): for line in self.file: self._check = self._crc32(line, self._check) yield line def seek(self, *args): raise Exception("Cannot seek on a ChecksumFile") def read(self, *args, **kwargs): b = self.file.read(*args, **kwargs) self._check = self._crc32(b, self._check) return b def write(self, b): self._check = self._crc32(b, self._check) self.file.write(b) def checksum(self): return self._check & 0xffffffff
mit
mhruscak/pyparted
src/parted/__init__.py
2
16458
# # __init__.py # Python bindings for libparted (built on top of the _ped Python module). # # Copyright (C) 2007-2015 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # # Author(s): David Cantrell <dcantrell@redhat.com> # Chris Lumens <clumens@redhat.com> # Alex Skinner <alex@lx.lc> # from __future__ import division import platform import re import sys import warnings import _ped __all__ = ['Alignment', 'Constraint', 'Device', 'Disk', 'FileSystem', 'Geometry', 'Partition'] from _ped import AlignmentException from _ped import CreateException from _ped import ConstraintException from _ped import DeviceException from _ped import DiskException from _ped import DiskLabelException from _ped import FileSystemException from _ped import GeometryException from _ped import IOException from _ped import NotNeededException from _ped import PartedException from _ped import PartitionException from _ped import TimerException from _ped import UnknownDeviceException from _ped import UnknownTypeException from _ped import register_exn_handler from _ped import clear_exn_handler from parted.alignment import Alignment from parted.constraint import Constraint from parted.device import Device from parted.disk import Disk from parted.disk import diskType from parted.disk import diskFlag from parted.filesystem import FileSystem from parted.filesystem import fileSystemType from parted.geometry import Geometry from parted.partition import Partition from parted.partition import partitionFlag # the enumerated types in _ped need to be available from here too from _ped import UNIT_SECTOR from _ped import UNIT_BYTE from _ped import UNIT_KILOBYTE from _ped import UNIT_MEGABYTE from _ped import UNIT_GIGABYTE from _ped import UNIT_TERABYTE from _ped import UNIT_COMPACT from _ped import UNIT_CYLINDER from _ped import UNIT_CHS from _ped import UNIT_PERCENT from _ped import UNIT_KIBIBYTE from _ped import UNIT_MEBIBYTE from _ped import UNIT_GIBIBYTE from _ped import UNIT_TEBIBYTE from _ped import DEVICE_UNKNOWN from _ped import DEVICE_SCSI from _ped import DEVICE_IDE from _ped import DEVICE_DAC960 from _ped import DEVICE_CPQARRAY from _ped import DEVICE_FILE from _ped import DEVICE_ATARAID from _ped import DEVICE_I2O from _ped import DEVICE_UBD from _ped import DEVICE_DASD from _ped import DEVICE_VIODASD from _ped import DEVICE_SX8 from _ped import DEVICE_DM from _ped import DEVICE_XVD from _ped import DEVICE_SDMMC from _ped import DEVICE_VIRTBLK from _ped import PARTITION_NORMAL from _ped import PARTITION_LOGICAL from _ped import PARTITION_EXTENDED from _ped import PARTITION_FREESPACE from _ped import PARTITION_METADATA from _ped import PARTITION_PROTECTED from _ped import PARTITION_BOOT from _ped import PARTITION_ROOT from _ped import PARTITION_SWAP from _ped import PARTITION_HIDDEN from _ped import PARTITION_RAID from _ped import PARTITION_LVM from _ped import PARTITION_LBA from _ped import PARTITION_HPSERVICE from _ped import PARTITION_PALO from _ped import PARTITION_PREP from _ped import PARTITION_MSFT_RESERVED from _ped import PARTITION_APPLE_TV_RECOVERY from _ped import PARTITION_BIOS_GRUB from _ped import PARTITION_DIAG from _ped import PARTITION_LEGACY_BOOT if hasattr(_ped, 'PARTITION_MSFT_DATA'): # pylint: disable=E0611 from _ped import PARTITION_MSFT_DATA if hasattr(_ped, 'PARTITION_IRST'): # pylint: disable=E0611 from _ped import PARTITION_IRST if hasattr(_ped, 'PARTITION_ESP'): # pylint: disable=E0611 from _ped import PARTITION_ESP if hasattr(_ped, 'PARTITION_NONFS'): # pylint: disable=E0611 from _ped import PARTITION_NONFS from _ped import DISK_CYLINDER_ALIGNMENT from _ped import DISK_GPT_PMBR_BOOT from _ped import DISK_TYPE_EXTENDED from _ped import DISK_TYPE_PARTITION_NAME from _ped import EXCEPTION_TYPE_INFORMATION from _ped import EXCEPTION_TYPE_WARNING from _ped import EXCEPTION_TYPE_ERROR from _ped import EXCEPTION_TYPE_FATAL from _ped import EXCEPTION_TYPE_BUG from _ped import EXCEPTION_TYPE_NO_FEATURE from _ped import EXCEPTION_RESOLVE_UNHANDLED from _ped import EXCEPTION_RESOLVE_FIX from _ped import EXCEPTION_RESOLVE_YES from _ped import EXCEPTION_RESOLVE_NO from _ped import EXCEPTION_RESOLVE_OK from _ped import EXCEPTION_RESOLVE_RETRY from _ped import EXCEPTION_RESOLVE_IGNORE from _ped import EXCEPTION_RESOLVE_CANCEL from _ped import EXCEPTION_OPT_OK_CANCEL from _ped import EXCEPTION_OPT_YES_NO from _ped import EXCEPTION_OPT_YES_NO_CANCEL from _ped import EXCEPTION_OPT_IGNORE_CANCEL from _ped import EXCEPTION_OPT_RETRY_CANCEL from _ped import EXCEPTION_OPT_RETRY_IGNORE_CANCEL from parted.decorators import localeC if sys.version_info >= (3,): string_types = str else: string_types = basestring # pylint: disable=undefined-variable partitionTypesDict = { 0x00: "Empty", 0x01: "DOS 12-bit FAT", 0x02: "XENIX root", 0x03: "XENIX usr", 0x04: "DOS 16-bit <32M", 0x05: "Extended", 0x06: "DOS 16-bit >=32M", 0x07: "NTFS/HPFS", 0x08: "AIX", 0x09: "AIX bootable", 0x0a: "OS/2 Boot Manager", 0x0b: "Win95 FAT32", 0x0c: "Win95 FAT32", 0x0e: "Win95 FAT16", 0x0f: "Win95 Ext'd", 0x10: "OPUS", 0x11: "Hidden FAT12", 0x12: "Compaq Setup", 0x14: "Hidden FAT16 <32M", 0x16: "Hidden FAT16", 0x17: "Hidden HPFS/NTFS", 0x18: "AST SmartSleep", 0x1b: "Hidden Win95 FAT32", 0x1c: "Hidden Win95 FAT32 (LBA)", 0x1e: "Hidden Win95 FAT16 (LBA)", 0x24: "NEC_DOS", 0x39: "Plan 9", 0x40: "Venix 80286", 0x41: "PPC_PReP Boot", 0x42: "SFS", 0x4d: "QNX4.x", 0x4e: "QNX4.x 2nd part", 0x4f: "QNX4.x 2nd part", 0x51: "Novell?", 0x52: "Microport", 0x63: "GNU HURD", 0x64: "Novell Netware 286", 0x65: "Novell Netware 386", 0x75: "PC/IX", 0x80: "Old MINIX", 0x81: "Linux/MINIX", 0x82: "Linux swap", 0x83: "Linux native", 0x84: "OS/2 hidden C:", 0x85: "Linux Extended", 0x86: "NTFS volume set", 0x87: "NTFS volume set", 0x8e: "Linux LVM", 0x93: "Amoeba", 0x94: "Amoeba BBT", 0x9f: "BSD/OS", 0xa0: "IBM Thinkpad hibernation", 0xa5: "BSD/386", 0xa6: "OpenBSD", 0xb7: "BSDI fs", 0xb8: "BSDI swap", 0xbf: "Solaris", 0xc7: "Syrinx", 0xdb: "CP/M", 0xde: "Dell Utility", 0xe1: "DOS access", 0xe3: "DOS R/O", 0xeb: "BEOS", 0xee: "EFI GPT", 0xef: "EFI (FAT-12/16/32)", 0xf2: "DOS secondary", 0xfd: "Linux RAID", 0xff: "BBT" } # Exponents for 1024 used when converting sizes to byte-sized # units for display. The keys are: # b bytes 1024^0 = 1 # kb kilobytes 1024^1 = 1024 # mb megabytes 1024^2 = 1048576 # gb gigabytes 1024^3 = 1073741824 # tb terabytes 1024^4 = 1099511627776 # pb petabytes 1024^5 = 1125899906842624 # eb exabytes 1024^6 = 1152921504606846976 # zb zettabytes 1024^7 = 1180591620717411303424 # yb yottabytes 1024^8 = 1208925819614629174706176 # The resulting value for 1024 raised to the power is used as # the divisor for conversion functions. _exponent = {'b': 0, 'kb': 1, 'mb': 2, 'gb': 3, 'tb': 4, 'pb': 5, 'eb': 6, 'zb': 7, 'yb': 8} # Refercences: # # 1. NIST Special Publication 330, 2008 Edition, Barry N. Taylor and Ambler # Thompson, Editors # The International System of Units (SI) # Available from: http://physics.nist.gov/cuu/pdf/sp811.pdf # # 2. International standard IEC 60027-2, third edition, # Letter symbols to be used in electrical technology -- # Part 2: Telecommunications and electronics. # # See the links below for quick online summaries: # # SI units: http://physics.nist.gov/cuu/Units/prefixes.html # IEC units: http://physics.nist.gov/cuu/Units/binary.html __exponents = { "B": 1, # byte "kB": 1000**1, # kilobyte "MB": 1000**2, # megabyte "GB": 1000**3, # gigabyte "TB": 1000**4, # terabyte "PB": 1000**5, # petabyte "EB": 1000**6, # exabyte "ZB": 1000**7, # zettabyte "YB": 1000**8, # yottabyte "KiB": 1024**1, # kibibyte "MiB": 1024**2, # mebibyte "GiB": 1024**3, # gibibyte "TiB": 1024**4, # tebibyte "PiB": 1024**5, # pebibyte "EiB": 1024**6, # exbibyte "ZiB": 1024**7, # zebibyte "YiB": 1024**8 # yobibyte } def formatBytes(bytes_, unit): """Convert bytes_ using an SI or IEC prefix. Note that unit is a case sensitive string that must exactly match one of the IEC or SI prefixes followed by 'B' (e.g. 'GB').""" if unit not in __exponents.keys(): raise SyntaxError("{:} is not a valid SI or IEC byte unit".format(unit)) else: return (bytes_ / __exponents[unit]) def sizeToSectors(bytes_, unit, sector_size): """Convert bytes_ of unit to a number of sectors. Note that unit is a case sensitive string that must exactly match one of the IEC or SI prefixes followed by 'B' (e.g. 'GB').""" if unit not in __exponents.keys(): raise SyntaxError("{:} is not a valid SI or IEC byte unit".format(unit)) else: return bytes_ * __exponents[unit] // sector_size # Valid disk labels per architecture type. The list of label # names map to keys in the parted.diskType hash table. archLabels = {'i386': ['msdos', 'gpt'], 's390': ['dasd', 'msdos'], 'alpha': ['bsd', 'msdos'], 'sparc': ['sun'], 'ia64': ['msdos', 'gpt'], 'ppc': ['msdos', 'mac', 'amiga', 'gpt'], 'ppc64': ['msdos', 'mac', 'amiga', 'gpt'], 'ppc64le': ['msdos', 'gpt'], 'x86_64': ['msdos', 'gpt'], 'aarch64': ['msdos', 'gpt'], 'armv7l': ['msdos', 'gpt']} # Adapted from: # http://stackoverflow.com/questions/922550/how-to-mark-a-global-as-deprecated-in-python # # Remember that DeprecationWarnings are ignored by default as they are not really # useful to users. Developers can turn on DeprecationWarning notices by passing # the -Wd option to python or by setting PYTHONWARNINGS=d in the environment. def Deprecated(mod, deprecated=None): """ Return a wrapped object that warns about deprecated accesses. """ if not deprecated: deprecated = {} class Wrapper(object): warnmsg = "%s is deprecated and will be removed in a future release." def __getattr__(self, attr): if attr in deprecated.keys(): msg = self.warnmsg + " " + deprecated[attr] warnings.warn(msg % attr, DeprecationWarning) return getattr(mod, attr) def __setattr__(self, attr, value): if attr in deprecated.keys(): msg = self.warnmsg + " " + deprecated[attr] warnings.warn(msg % attr, DeprecationWarning) setattr(mod, attr, value) return Wrapper() # Valid disk labels and their applicable architectures. The label names map # to keys in the parted.diskType hash table. __archLabels = (('amiga', 'ppc(64)?$'), ('bsd', 'alpha$'), ('dasd', 's390x?$'), ('gpt', 'i[3-6]86$|x86_64$|ia64$|ppc(64|64le)?$|aarch64$|armv7l$'), ('mac', 'ppc(64)?$'), ('msdos', 'i[3-6]86$|x86_64$|s390x?$|alpha$|ia64$|ppc(64|64le)?$|aarch64$|armv7l$'), ('sun', 'sparc(64)?$')) def getLabels(arch=None): """Return a set containing the disk labels compatible with the architecture of the computer calling this function. If an architecture is passed, return the labels compatible with that architecture.""" labels = set() if arch is None: arch = platform.machine() for label, regex in __archLabels: if re.match(regex, arch): labels.add(label) return labels class ReadOnlyProperty(Exception): """Exception raised when a write operation occurs on a read-only property.""" # pylint: disable=W0231 def __init__(self, prop=''): self.message = "%s is a read-only property" % (prop,) class WriteOnlyProperty(Exception): """Exception raised when a read operation occurs on a write-only property.""" # pylint: disable=W0231 def __init__(self, prop=''): self.message = "%s is a write-only property" % (prop,) @localeC def getDevice(path): """Given the operating system level path to a device node, return a Device object for that disk. Raises DeviceException if an invalid path is given.""" return Device(path=path) @localeC def getAllDevices(): """Return a list of Device objects for all devices in the system.""" from _ped import device_probe_all from _ped import device_get_next lst = [] device = None device_probe_all() while True: try: if not device: device = device_get_next() else: device = device_get_next(device) lst.append(Device(PedDevice=device)) except IndexError: return lst @localeC def freeAllDevices(): """Free all Device objects. There is no reason to call this function.""" from _ped import device_free_all return device_free_all() @localeC def probeForSpecificFileSystem(fstype, geometry): """Call the _ped.file_system_probe_specific() function given the filesystem type and geometry. fstype must be a string representing a valid _ped.FileSystemType, geometry is a parted.Geometry.""" from _ped import file_system_probe_specific geom = file_system_probe_specific(fileSystemType[fstype], geometry.getPedGeometry()) return geometry.Geometry(PedGeometry=geom) @localeC def probeFileSystem(geometry): """Return the name of the filesystem detected on the given Geometry. Returns None is no filesystem found.""" from _ped import file_system_probe fstype = file_system_probe(geometry.getPedGeometry()) return fstype.name @localeC def freshDisk(device, ty): """Return a Disk object for this Device and using this DiskType. The type should be a member of the parted.diskType hash, either a key or a value. The new label is not written to disk until commitToDevice() is called on the Disk.""" from _ped import disk_new_fresh, DiskType if isinstance(ty, string_types): ty = diskType[ty] elif not isinstance(ty, DiskType): raise TypeError("type must be a key or value in parted.diskType", ty) peddisk = disk_new_fresh(device.getPedDevice(), ty) return Disk(PedDisk=peddisk) @localeC def newDisk(device): """Return a Disk object for this Device. Read the partition table off a device (if one is found).""" from _ped import disk_new peddisk = disk_new(device.getPedDevice()) return Disk(PedDisk=peddisk) @localeC def version(): """Return a dict containing the pyparted and libparted versions.""" from _ped import libparted_version from _ped import pyparted_version ver = {} ver['libparted'] = libparted_version() ver['pyparted'] = pyparted_version() return ver # Mark deprecated items _deprecated = {"partitionTypesDict": "DOS disk label types are not provided " "by libparted, so the codes are not " "useful.", "_exponent": "Use __exponents instead.", "archLabels": "Use getLabels() instead.", } sys.modules[__name__] = Deprecated(sys.modules[__name__], _deprecated)
gpl-2.0
yonchev/brotli
python/tests/roundtrip_test.py
98
1504
#!/usr/bin/env python from __future__ import print_function import sys import os from subprocess import check_call, Popen, PIPE from test_utils import PYTHON, BRO, TEST_ENV, diff_q INPUTS = """\ testdata/alice29.txt testdata/asyoulik.txt testdata/lcet10.txt testdata/plrabn12.txt ../enc/encode.cc ../enc/dictionary.h ../dec/decode.c %s """ % BRO os.chdir(os.path.abspath("../../tests")) for filename in INPUTS.splitlines(): for quality in (1, 6, 9, 11): filename = os.path.abspath(filename) print('Roundtrip testing file "%s" at quality %d' % (os.path.basename(filename), quality)) compressed = os.path.splitext(filename)[0] + ".bro" uncompressed = os.path.splitext(filename)[0] + ".unbro" check_call([PYTHON, BRO, "-f", "-q", str(quality), "-i", filename, "-o", compressed], env=TEST_ENV) check_call([PYTHON, BRO, "-f", "-d", "-i", compressed, "-o", uncompressed], env=TEST_ENV) if diff_q(filename, uncompressed) != 0: sys.exit(1) # Test the streaming version with open(filename, "rb") as infile, \ open(uncompressed, "wb") as outfile: p = Popen([PYTHON, BRO, "-q", str(quality)], stdin=infile, stdout=PIPE, env=TEST_ENV) check_call([PYTHON, BRO, "-d"], stdin=p.stdout, stdout=outfile, env=TEST_ENV) if diff_q(filename, uncompressed) != 0: sys.exit(1)
apache-2.0
isandlaTech/cohorte-demos
led/dump/led-demo-raspberry/cohorte/dist/cohorte-1.0.0-20141209.234423-41-python-distribution/repo/sleekxmpp/plugins/xep_0199/ping.py
11
6167
""" SleekXMPP: The Sleek XMPP Library Copyright (C) 2010 Nathanael C. Fritz This file is part of SleekXMPP. See the file LICENSE for copying permission. """ import time import logging from sleekxmpp.jid import JID from sleekxmpp.stanza import Iq from sleekxmpp.exceptions import IqError, IqTimeout from sleekxmpp.xmlstream import register_stanza_plugin from sleekxmpp.xmlstream.matcher import StanzaPath from sleekxmpp.xmlstream.handler import Callback from sleekxmpp.plugins import BasePlugin from sleekxmpp.plugins.xep_0199 import stanza, Ping log = logging.getLogger(__name__) class XEP_0199(BasePlugin): """ XEP-0199: XMPP Ping Given that XMPP is based on TCP connections, it is possible for the underlying connection to be terminated without the application's awareness. Ping stanzas provide an alternative to whitespace based keepalive methods for detecting lost connections. Also see <http://www.xmpp.org/extensions/xep-0199.html>. Attributes: keepalive -- If True, periodically send ping requests to the server. If a ping is not answered, the connection will be reset. interval -- Time in seconds between keepalive pings. Defaults to 300 seconds. timeout -- Time in seconds to wait for a ping response. Defaults to 30 seconds. Methods: send_ping -- Send a ping to a given JID, returning the round trip time. """ name = 'xep_0199' description = 'XEP-0199: XMPP Ping' dependencies = set(['xep_0030']) stanza = stanza default_config = { 'keepalive': False, 'interval': 300, 'timeout': 30 } def plugin_init(self): """ Start the XEP-0199 plugin. """ register_stanza_plugin(Iq, Ping) self.xmpp.register_handler( Callback('Ping', StanzaPath('iq@type=get/ping'), self._handle_ping)) if self.keepalive: self.xmpp.add_event_handler('session_start', self.enable_keepalive, threaded=True) self.xmpp.add_event_handler('session_end', self.disable_keepalive) def plugin_end(self): self.xmpp['xep_0030'].del_feature(feature=Ping.namespace) self.xmpp.remove_handler('Ping') if self.keepalive: self.xmpp.del_event_handler('session_start', self.enable_keepalive) self.xmpp.del_event_handler('session_end', self.disable_keepalive) def session_bind(self, jid): self.xmpp['xep_0030'].add_feature(Ping.namespace) def enable_keepalive(self, interval=None, timeout=None): if interval: self.interval = interval if timeout: self.timeout = timeout self.keepalive = True self.xmpp.schedule('Ping keepalive', self.interval, self._keepalive, repeat=True) def disable_keepalive(self, event=None): self.xmpp.scheduler.remove('Ping keepalive') def _keepalive(self, event=None): log.debug("Keepalive ping...") try: rtt = self.ping(self.xmpp.boundjid.host, timeout=self.timeout) except IqTimeout: log.debug("Did not recieve ping back in time." + \ "Requesting Reconnect.") self.xmpp.reconnect() else: log.debug('Keepalive RTT: %s' % rtt) def _handle_ping(self, iq): """Automatically reply to ping requests.""" log.debug("Pinged by %s", iq['from']) iq.reply().send() def send_ping(self, jid, ifrom=None, block=True, timeout=None, callback=None): """Send a ping request. Arguments: jid -- The JID that will receive the ping. ifrom -- Specifiy the sender JID. block -- Indicate if execution should block until a pong response is received. Defaults to True. timeout -- Time in seconds to wait for a response. Defaults to self.timeout. callback -- Optional handler to execute when a pong is received. Useful in conjunction with the option block=False. """ if not timeout: timeout = self.timeout iq = self.xmpp.Iq() iq['type'] = 'get' iq['to'] = jid iq['from'] = ifrom iq.enable('ping') return iq.send(block=block, timeout=timeout, callback=callback) def ping(self, jid=None, ifrom=None, timeout=None): """Send a ping request and calculate RTT. Arguments: jid -- The JID that will receive the ping. ifrom -- Specifiy the sender JID. timeout -- Time in seconds to wait for a response. Defaults to self.timeout. """ own_host = False if not jid: if self.xmpp.is_component: jid = self.xmpp.server else: jid = self.xmpp.boundjid.host jid = JID(jid) if jid == self.xmpp.boundjid.host or \ self.xmpp.is_component and jid == self.xmpp.server: own_host = True if not timeout: timeout = self.timeout start = time.time() log.debug('Pinging %s' % jid) try: self.send_ping(jid, ifrom=ifrom, timeout=timeout) except IqError as e: if own_host: rtt = time.time() - start log.debug('Pinged %s, RTT: %s', jid, rtt) return rtt else: raise e else: rtt = time.time() - start log.debug('Pinged %s, RTT: %s', jid, rtt) return rtt
apache-2.0
2ndQuadrant/ansible
test/integration/targets/win_unzip/files/create_crafty_zip_files.py
3
1618
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type import os import shutil import sys import zipfile # Each key is a zip file and the vaule is the list of files that will be created # and placed in the archive zip_files = { 'hat1': [r'hat/..\rabbit.txt'], 'hat2': [r'hat/..\..\rabbit.txt'], 'handcuffs': [r'..\..\houidini.txt'], 'prison': [r'..\houidini.txt'], } # Accept an argument of where to create the files, defaulting to # the current working directory. try: output_dir = sys.argv[1] except IndexError: output_dir = os.getcwd() if not os.path.isdir(output_dir): os.mkdir(output_dir) os.chdir(output_dir) for name, files in zip_files.items(): # Create the files to go in the zip archive for entry in files: dirname = os.path.dirname(entry) if dirname: if os.path.isdir(dirname): shutil.rmtree(dirname) os.mkdir(dirname) with open(entry, 'w') as e: e.write('escape!\n') # Create the zip archive with the files filename = '%s.zip' % name if os.path.isfile(filename): os.unlink(filename) with zipfile.ZipFile(filename, 'w') as zf: for entry in files: zf.write(entry) # Cleanup if dirname: shutil.rmtree(dirname) for entry in files: try: os.unlink(entry) except OSError: pass
gpl-3.0
Danfocus/Flexget
flexget/components/imdb/utils.py
2
15129
from __future__ import unicode_literals, division, absolute_import from builtins import * # noqa pylint: disable=unused-import, redefined-builtin from past.builtins import basestring import difflib import json import logging import re import random from bs4.element import Tag from flexget.utils.soup import get_soup from flexget.utils.requests import Session, TimedLimiter from flexget.utils.tools import str_to_int from flexget import plugin log = logging.getLogger('imdb.utils') # IMDb delivers a version of the page which is unparsable to unknown (and some known) user agents, such as requests' # Spoof the old urllib user agent to keep results consistent requests = Session() requests.headers.update({'User-Agent': 'Python-urllib/2.6'}) # requests.headers.update({'User-Agent': random.choice(USERAGENTS)}) # this makes most of the titles to be returned in english translation, but not all of them requests.headers.update({'Accept-Language': 'en-US,en;q=0.8'}) requests.headers.update( {'X-Forwarded-For': '24.110.%d.%d' % (random.randint(0, 254), random.randint(0, 254))} ) # give imdb a little break between requests (see: http://flexget.com/ticket/129#comment:1) requests.add_domain_limiter(TimedLimiter('imdb.com', '3 seconds')) def is_imdb_url(url): """Tests the url to see if it's for imdb.com.""" if not isinstance(url, basestring): return # Probably should use urlparse. return re.match(r'https?://[^/]*imdb\.com/', url) def is_valid_imdb_title_id(value): """ Return True if `value` is a valid IMDB ID for titles (movies, series, etc). """ if not isinstance(value, basestring): raise TypeError("is_valid_imdb_title_id expects a string but got {0}".format(type(value))) # IMDB IDs for titles have 'tt' followed by 7 or 8 digits return re.match(r'tt\d{7,8}', value) is not None def is_valid_imdb_person_id(value): """ Return True if `value` is a valid IMDB ID for a person. """ if not isinstance(value, basestring): raise TypeError("is_valid_imdb_person_id expects a string but got {0}".format(type(value))) # An IMDB ID for a person is formed by 'nm' followed by 7 digits return re.match(r'nm\d{7,8}', value) is not None def extract_id(url): """Return IMDb ID of the given URL. Return None if not valid or if URL is not a string.""" if not isinstance(url, basestring): return m = re.search(r'((?:nm|tt)\d{7,8})', url) if m: return m.group(1) def make_url(imdb_id): """Return IMDb URL of the given ID""" return u'https://www.imdb.com/title/%s/' % imdb_id class ImdbSearch(object): def __init__(self): # de-prioritize aka matches a bit self.aka_weight = 0.95 # prioritize first self.first_weight = 1.1 self.min_match = 0.7 self.min_diff = 0.01 self.debug = False self.max_results = 50 def ireplace(self, text, old, new, count=0): """Case insensitive string replace""" pattern = re.compile(re.escape(old), re.I) return re.sub(pattern, new, text, count) def smart_match(self, raw_name, single_match=True): """Accepts messy name, cleans it and uses information available to make smartest and best match""" parser = plugin.get('parsing', 'imdb_search').parse_movie(raw_name) name = parser.name year = parser.year if not name: log.critical('Failed to parse name from %s', raw_name) return None log.debug('smart_match name=%s year=%s' % (name, str(year))) return self.best_match(name, year, single_match) def best_match(self, name, year=None, single_match=True): """Return single movie that best matches name criteria or None""" movies = self.search(name) if not movies: log.debug('search did not return any movies') return None # remove all movies below min_match, and different year for movie in movies[:]: if year and movie.get('year'): if movie['year'] != year: log.debug( 'best_match removing %s - %s (wrong year: %s)' % (movie['name'], movie['url'], str(movie['year'])) ) movies.remove(movie) continue if movie['match'] < self.min_match: log.debug('best_match removing %s (min_match)', movie['name']) movies.remove(movie) continue if not movies: log.debug('FAILURE: no movies remain') return None # if only one remains .. if len(movies) == 1: log.debug('SUCCESS: only one movie remains') return movies[0] # check min difference between best two hits diff = movies[0]['match'] - movies[1]['match'] if diff < self.min_diff: log.debug( 'unable to determine correct movie, min_diff too small (`%s` <-?-> `%s`)' % (movies[0], movies[1]) ) for m in movies: log.debug('remain: %s (match: %s) %s' % (m['name'], m['match'], m['url'])) return None else: return movies[0] if single_match else movies def search(self, name): """Return array of movie details (dict)""" log.debug('Searching: %s', name) url = u'https://www.imdb.com/find' # This may include Shorts and TV series in the results params = {'q': name, 's': 'tt'} log.debug('Search query: %s', repr(url)) page = requests.get(url, params=params) actual_url = page.url movies = [] soup = get_soup(page.text) # in case we got redirected to movie page (perfect match) re_m = re.match(r'.*\.imdb\.com/title/tt\d+/', actual_url) if re_m: actual_url = re_m.group(0) imdb_id = extract_id(actual_url) movie_parse = ImdbParser() movie_parse.parse(imdb_id, soup=soup) log.debug('Perfect hit. Search got redirected to %s', actual_url) movie = { 'match': 1.0, 'name': movie_parse.name, 'imdb_id': imdb_id, 'url': make_url(imdb_id), 'year': movie_parse.year, } movies.append(movie) return movies section_table = soup.find('table', 'findList') if not section_table: log.debug('results table not found') return rows = section_table.find_all('tr') if not rows: log.debug('Titles section does not have links') for count, row in enumerate(rows): # Title search gives a lot of results, only check the first ones if count > self.max_results: break result_text = row.find('td', 'result_text') movie = {} additional = re.findall(r'\((.*?)\)', result_text.text) if len(additional) > 0: if re.match('^\d{4}$', additional[-1]): movie['year'] = str_to_int(additional[-1]) elif len(additional) > 1: movie['year'] = str_to_int(additional[-2]) if additional[-1] not in ['TV Movie', 'Video']: log.debug('skipping %s', result_text.text) continue primary_photo = row.find('td', 'primary_photo') movie['thumbnail'] = primary_photo.find('a').find('img').get('src') link = result_text.find_next('a') movie['name'] = link.text movie['imdb_id'] = extract_id(link.get('href')) movie['url'] = make_url(movie['imdb_id']) log.debug('processing name: %s url: %s' % (movie['name'], movie['url'])) # calc & set best matching ratio seq = difflib.SequenceMatcher(lambda x: x == ' ', movie['name'].title(), name.title()) ratio = seq.ratio() # check if some of the akas have better ratio for aka in link.parent.find_all('i'): aka = aka.next.string match = re.search(r'".*"', aka) if not match: log.debug('aka `%s` is invalid' % aka) continue aka = match.group(0).replace('"', '') log.trace('processing aka %s' % aka) seq = difflib.SequenceMatcher(lambda x: x == ' ', aka.title(), name.title()) aka_ratio = seq.ratio() if aka_ratio > ratio: ratio = aka_ratio * self.aka_weight log.debug( '- aka `%s` matches better to `%s` ratio %s (weighted to %s)' % (aka, name, aka_ratio, ratio) ) # prioritize items by position position_ratio = (self.first_weight - 1) / (count + 1) + 1 log.debug( '- prioritizing based on position %s `%s`: %s' % (count, movie['url'], position_ratio) ) ratio *= position_ratio # store ratio movie['match'] = ratio movies.append(movie) movies.sort(key=lambda x: x['match'], reverse=True) return movies class ImdbParser(object): """Quick-hack to parse relevant imdb details""" def __init__(self): self.genres = [] self.languages = [] self.actors = {} self.directors = {} self.writers = {} self.score = 0.0 self.votes = 0 self.meta_score = 0 self.year = 0 self.plot_outline = None self.name = None self.original_name = None self.url = None self.imdb_id = None self.photo = None self.mpaa_rating = '' def __str__(self): return '<ImdbParser(name=%s,imdb_id=%s)>' % (self.name, self.imdb_id) def parse(self, imdb_id, soup=None): self.imdb_id = extract_id(imdb_id) url = make_url(self.imdb_id) self.url = url if not soup: page = requests.get(url) soup = get_soup(page.text) title_wrapper = soup.find('div', attrs={'class': 'title_wrapper'}) data = json.loads(soup.find('script', {'type': 'application/ld+json'}).text) if not data: raise plugin.PluginError( 'IMDB parser needs updating, imdb format changed. Please report on Github.' ) # Parse stuff from the title-overview section name_elem = data['name'] if name_elem: self.name = name_elem.strip() else: log.error('Possible IMDB parser needs updating, Please report on Github.') raise plugin.PluginError( 'Unable to set imdb_name for %s from %s' % (self.imdb_id, self.url) ) year = soup.find('span', attrs={'id': 'titleYear'}) if year: m = re.search(r'([0-9]{4})', year.text) if m: self.year = int(m.group(1)) if not self.year: log.debug('No year found for %s', self.imdb_id) mpaa_rating_elem = data.get('contentRating') if mpaa_rating_elem: self.mpaa_rating = mpaa_rating_elem else: log.debug('No rating found for %s', self.imdb_id) photo_elem = data.get('image') if photo_elem: self.photo = photo_elem else: log.debug('No photo found for %s', self.imdb_id) original_name_elem = title_wrapper.find('div', {'class': 'originalTitle'}) if original_name_elem: self.name = title_wrapper.find('h1').contents[0].strip() self.original_name = original_name_elem.contents[0].strip().strip('"') else: log.debug('No original title found for %s', self.imdb_id) votes_elem = data.get('aggregateRating', {}).get('ratingCount') if votes_elem: self.votes = str_to_int(votes_elem) if not isinstance(votes_elem, int) else votes_elem else: log.debug('No votes found for %s', self.imdb_id) score_elem = data.get('aggregateRating', {}).get('ratingValue') if score_elem: self.score = float(score_elem) else: log.debug('No score found for %s', self.imdb_id) meta_score_elem = soup.find(attrs={'class': 'metacriticScore'}) if meta_score_elem: self.meta_score = str_to_int(meta_score_elem.text) else: log.debug('No Metacritic score found for %s', self.imdb_id) # get director(s) directors = data.get('director', []) if not isinstance(directors, list): directors = [directors] for director in directors: if director['@type'] != 'Person': continue director_id = extract_id(director['url']) director_name = director['name'] self.directors[director_id] = director_name # get writer(s) writers = data.get('creator', []) if not isinstance(writers, list): writers = [writers] for writer in writers: if writer['@type'] != 'Person': continue writer_id = extract_id(writer['url']) writer_name = writer['name'] self.writers[writer_id] = writer_name # Details section title_details = soup.find('div', attrs={'id': 'titleDetails'}) if title_details: # get languages for link in title_details.find_all( 'a', href=re.compile(r'^/search/title\?title_type=feature' '&primary_language=') ): lang = link.text.strip().lower() if lang not in self.languages: self.languages.append(lang.strip()) # Storyline section storyline = soup.find('div', attrs={'id': 'titleStoryLine'}) if storyline: plot_elem = storyline.find('p') if plot_elem: # Remove the "Written By" part. if plot_elem.em: plot_elem.em.replace_with('') self.plot_outline = plot_elem.text.strip() else: log.debug('No storyline found for %s', self.imdb_id) genres = data.get('genre', []) if not isinstance(genres, list): genres = [genres] self.genres = [g.strip().lower() for g in genres] # Cast section cast = soup.find('table', attrs={'class': 'cast_list'}) if cast: for actor in cast.select('tr > td:nth-of-type(2) > a'): actor_id = extract_id(actor['href']) actor_name = actor.text.strip() # tag instead of name if isinstance(actor_name, Tag): actor_name = None self.actors[actor_id] = actor_name
mit
Shouqun/node-gn
tools/depot_tools/third_party/retry_decorator/decorators.py
55
1565
import time from functools import wraps def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None): """Retry calling the decorated function using an exponential backoff. http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/ original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry :param ExceptionToCheck: the exception to check. may be a tuple of exceptions to check :type ExceptionToCheck: Exception or tuple :param tries: number of times to try (not retry) before giving up :type tries: int :param delay: initial delay between retries in seconds :type delay: int :param backoff: backoff multiplier e.g. value of 2 will double the delay each retry :type backoff: int :param logger: logger to use. If None, print :type logger: logging.Logger instance """ def deco_retry(f): @wraps(f) def f_retry(*args, **kwargs): mtries, mdelay = tries, delay while mtries > 1: try: return f(*args, **kwargs) except ExceptionToCheck, e: msg = "%s, Retrying in %d seconds..." % (str(e), mdelay) if logger: logger.warning(msg) else: print msg time.sleep(mdelay) mtries -= 1 mdelay *= backoff return f(*args, **kwargs) return f_retry # true decorator return deco_retry
mit
dougbenjamin/panda-harvester
pandaharvester/harvesterstager/rucio_stager.py
1
8949
import os import sys import shutil import os.path import uuid from future.utils import iteritems from pandaharvester.harvestercore import core_utils from .base_stager import BaseStager from pandaharvester.harvestermover import mover_utils from rucio.client import Client as RucioClient from rucio.common.exception import RuleNotFound # logger baseLogger = core_utils.setup_logger('rucio_stager') # plugin for stage-out with Rucio class RucioStager(BaseStager): # constructor def __init__(self, **kwarg): BaseStager.__init__(self, **kwarg) if not hasattr(self, 'scopeForTmp'): self.scopeForTmp = 'panda' # check status def check_status(self, jobspec): # make logger tmpLog = self.make_logger(baseLogger, 'PandaID={0}'.format(jobspec.PandaID), method_name='check_status') tmpLog.debug('start') # loop over all files allChecked = True oneErrMsg = None transferStatus = dict() for fileSpec in jobspec.outFiles: # skip already don if fileSpec.status in ['finished', 'failed']: continue # get transfer ID transferID = fileSpec.fileAttributes['transferID'] if transferID not in transferStatus: # get status try: rucioAPI = RucioClient() ruleInfo = rucioAPI.get_replication_rule(transferID) tmpTransferStatus = ruleInfo['state'] tmpLog.debug('got state={0} for rule={1}'.format(tmpTransferStatus, transferID)) except RuleNotFound: tmpLog.error('rule {0} not found'.format(transferID)) tmpTransferStatus = 'FAILED' except: err_type, err_value = sys.exc_info()[:2] errMsg = "{0} {1}".format(err_type.__name__, err_value) tmpLog.error('failed to get status for rule={0} with {1}'.format(transferID, errMsg)) # set dummy not to lookup again tmpTransferStatus = None allChecked = False # keep one message if oneErrMsg is None: oneErrMsg = errMsg tmpTransferStatus = 'OK' transferStatus[transferID] = tmpTransferStatus # final status if transferStatus[transferID] == 'OK': fileSpec.status = 'finished' elif transferStatus[transferID] in ['FAILED', 'CANCELED']: fileSpec.status = 'failed' if allChecked: return True, '' else: return False, oneErrMsg # trigger stage out def trigger_stage_out(self, jobspec): # make logger tmpLog = self.make_logger(baseLogger, 'PandaID={0}'.format(jobspec.PandaID), method_name='trigger_stage_out') tmpLog.debug('start') # loop over all files files = dict() transferIDs = dict() transferDatasets = dict() fileAttrs = jobspec.get_output_file_attributes() for fileSpec in jobspec.outFiles: # skip zipped files if fileSpec.zipFileID is not None: continue # skip if already processed if 'transferDataset' in fileSpec.fileAttributes: if fileSpec.fileType not in transferDatasets: transferDatasets[fileSpec.fileType] = fileSpec.fileAttributes['transferDataset'] if fileSpec.fileType not in transferIDs: transferIDs[fileSpec.fileType] = fileSpec.fileAttributes['transferID'] continue # set OS ID if fileSpec.fileType == ['es_output', 'zip_output']: fileSpec.objstoreID = self.objStoreID_ES # make path where file is copied for transfer if fileSpec.fileType != 'zip_output': scope = fileAttrs[fileSpec.lfn]['scope'] datasetName = fileAttrs[fileSpec.lfn]['dataset'] else: # use panda scope for zipped files scope = self.scopeForTmp datasetName = 'dummy' srcPath = fileSpec.path dstPath = mover_utils.construct_file_path(self.srcBasePath, scope, fileSpec.lfn) # remove if os.path.exists(dstPath): os.remove(dstPath) # copy tmpLog.debug('copy src={srcPath} dst={dstPath}'.format(srcPath=srcPath, dstPath=dstPath)) dstDir = os.path.dirname(dstPath) if not os.path.exists(dstDir): os.makedirs(dstDir) shutil.copyfile(srcPath, dstPath) # collect files tmpFile = dict() tmpFile['scope'] = scope tmpFile['name'] = fileSpec.lfn tmpFile['bytes'] = fileSpec.fsize if fileSpec.fileType not in files: files[fileSpec.fileType] = [] files[fileSpec.fileType].append(tmpFile) # loop over all file types to be registered to rucio rucioAPI = RucioClient() for fileType, fileList in iteritems(files): # set destination RSE if fileType in ['es_output', 'zip_output']: dstRSE = self.dstRSE_ES elif fileType == 'output': dstRSE = self.dstRSE_Out elif fileType == 'log': dstRSE = self.dstRSE_Log else: errMsg = 'unsupported file type {0}'.format(fileType) tmpLog.error(errMsg) return (False, errMsg) # skip if destination is None if dstRSE is None: continue # make datasets if missing if fileType not in transferDatasets: try: tmpScope = self.scopeForTmp tmpDS = 'panda.harvester_stage_out.{0}'.format(str(uuid.uuid4())) rucioAPI.add_dataset(tmpScope, tmpDS, meta={'hidden': True}, lifetime=30*24*60*60, files=fileList, rse=self.srcRSE ) transferDatasets[fileType] = tmpDS # add rule tmpDID = dict() tmpDID['scope'] = tmpScope tmpDID['name'] = tmpDS tmpRet = rucioAPI.add_replication_rule([tmpDID], 1, dstRSE, lifetime=30*24*60*60 ) tmpTransferIDs = tmpRet[0] transferIDs[fileType] = tmpTransferIDs tmpLog.debug('register dataset {0} with rule {1}'.format(tmpDS, str(tmpTransferIDs))) except: errMsg = core_utils.dump_error_message(tmpLog) return (False, errMsg) else: # add files to existing dataset try: tmpScope = self.scopeForTmp tmpDS = transferDatasets[fileType] rucioAPI.add_files_to_dataset(tmpScope, tmpDS, fileList, self.srcRSE) tmpLog.debug('added files to {0}'.format(tmpDS)) except: errMsg = core_utils.dump_error_message(tmpLog) return (False, errMsg) # set transfer datasets and rules for fileSpec in jobspec.outFiles: # skip zipped files if fileSpec.zipFileID is not None: continue # skip already done if fileSpec.status in ['finished', 'failed']: continue # skip if already processed if 'transferDataset' in fileSpec.fileAttributes: continue # no destination if fileSpec.fileType not in transferDatasets: fileSpec.status = 'finished' continue # set dataset fileSpec.fileAttributes['transferDataset'] = transferDatasets[fileSpec.fileType] # set rule fileSpec.fileAttributes['transferID'] = transferIDs[fileSpec.fileType] # force update fileSpec.force_update('fileAttributes') # return tmpLog.debug('done') return (True, '') # zip output files def zip_output(self, jobspec): # make logger tmpLog = self.make_logger(baseLogger, 'PandaID={0}'.format(jobspec.PandaID), method_name='zip_output') return self.simple_zip_output(jobspec, tmpLog)
apache-2.0